mirror of
https://github.com/facebook/docusaurus.git
synced 2025-04-28 09:47:48 +02:00
10031 lines
270 KiB
JavaScript
Vendored
10031 lines
270 KiB
JavaScript
Vendored
var __create = Object.create;
|
||
var __defProp = Object.defineProperty;
|
||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||
var __getProtoOf = Object.getPrototypeOf;
|
||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||
var __commonJS = (cb, mod) => function __require() {
|
||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
||
};
|
||
var __export = (target, all2) => {
|
||
for (var name in all2)
|
||
__defProp(target, name, { get: all2[name], enumerable: true });
|
||
};
|
||
var __copyProps = (to, from, except, desc) => {
|
||
if (from && typeof from === "object" || typeof from === "function") {
|
||
for (let key of __getOwnPropNames(from))
|
||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||
}
|
||
return to;
|
||
};
|
||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||
// If the importer is in node compatibility mode or this is not an ESM
|
||
// file that has been converted to a CommonJS file using a Babel-
|
||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||
mod
|
||
));
|
||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||
|
||
// node_modules/extend/index.js
|
||
var require_extend = __commonJS({
|
||
"node_modules/extend/index.js"(exports, module2) {
|
||
"use strict";
|
||
var hasOwn = Object.prototype.hasOwnProperty;
|
||
var toStr = Object.prototype.toString;
|
||
var defineProperty = Object.defineProperty;
|
||
var gOPD = Object.getOwnPropertyDescriptor;
|
||
var isArray = function isArray2(arr) {
|
||
if (typeof Array.isArray === "function") {
|
||
return Array.isArray(arr);
|
||
}
|
||
return toStr.call(arr) === "[object Array]";
|
||
};
|
||
var isPlainObject2 = function isPlainObject3(obj) {
|
||
if (!obj || toStr.call(obj) !== "[object Object]") {
|
||
return false;
|
||
}
|
||
var hasOwnConstructor = hasOwn.call(obj, "constructor");
|
||
var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, "isPrototypeOf");
|
||
if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {
|
||
return false;
|
||
}
|
||
var key;
|
||
for (key in obj) {
|
||
}
|
||
return typeof key === "undefined" || hasOwn.call(obj, key);
|
||
};
|
||
var setProperty = function setProperty2(target, options) {
|
||
if (defineProperty && options.name === "__proto__") {
|
||
defineProperty(target, options.name, {
|
||
enumerable: true,
|
||
configurable: true,
|
||
value: options.newValue,
|
||
writable: true
|
||
});
|
||
} else {
|
||
target[options.name] = options.newValue;
|
||
}
|
||
};
|
||
var getProperty = function getProperty2(obj, name) {
|
||
if (name === "__proto__") {
|
||
if (!hasOwn.call(obj, name)) {
|
||
return void 0;
|
||
} else if (gOPD) {
|
||
return gOPD(obj, name).value;
|
||
}
|
||
}
|
||
return obj[name];
|
||
};
|
||
module2.exports = function extend2() {
|
||
var options, name, src, copy, copyIsArray, clone;
|
||
var target = arguments[0];
|
||
var i = 1;
|
||
var length = arguments.length;
|
||
var deep = false;
|
||
if (typeof target === "boolean") {
|
||
deep = target;
|
||
target = arguments[1] || {};
|
||
i = 2;
|
||
}
|
||
if (target == null || typeof target !== "object" && typeof target !== "function") {
|
||
target = {};
|
||
}
|
||
for (; i < length; ++i) {
|
||
options = arguments[i];
|
||
if (options != null) {
|
||
for (name in options) {
|
||
src = getProperty(target, name);
|
||
copy = getProperty(options, name);
|
||
if (target !== copy) {
|
||
if (deep && copy && (isPlainObject2(copy) || (copyIsArray = isArray(copy)))) {
|
||
if (copyIsArray) {
|
||
copyIsArray = false;
|
||
clone = src && isArray(src) ? src : [];
|
||
} else {
|
||
clone = src && isPlainObject2(src) ? src : {};
|
||
}
|
||
setProperty(target, { name, newValue: extend2(deep, clone, copy) });
|
||
} else if (typeof copy !== "undefined") {
|
||
setProperty(target, { name, newValue: copy });
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return target;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/remark/index.js
|
||
var remark_exports = {};
|
||
__export(remark_exports, {
|
||
remark: () => remark1501
|
||
});
|
||
module.exports = __toCommonJS(remark_exports);
|
||
|
||
// node_modules/mdast-util-to-string/lib/index.js
|
||
var emptyOptions = {};
|
||
function toString(value, options) {
|
||
const settings = options || emptyOptions;
|
||
const includeImageAlt = typeof settings.includeImageAlt === "boolean" ? settings.includeImageAlt : true;
|
||
const includeHtml = typeof settings.includeHtml === "boolean" ? settings.includeHtml : true;
|
||
return one(value, includeImageAlt, includeHtml);
|
||
}
|
||
function one(value, includeImageAlt, includeHtml) {
|
||
if (node(value)) {
|
||
if ("value" in value) {
|
||
return value.type === "html" && !includeHtml ? "" : value.value;
|
||
}
|
||
if (includeImageAlt && "alt" in value && value.alt) {
|
||
return value.alt;
|
||
}
|
||
if ("children" in value) {
|
||
return all(value.children, includeImageAlt, includeHtml);
|
||
}
|
||
}
|
||
if (Array.isArray(value)) {
|
||
return all(value, includeImageAlt, includeHtml);
|
||
}
|
||
return "";
|
||
}
|
||
function all(values, includeImageAlt, includeHtml) {
|
||
const result = [];
|
||
let index2 = -1;
|
||
while (++index2 < values.length) {
|
||
result[index2] = one(values[index2], includeImageAlt, includeHtml);
|
||
}
|
||
return result.join("");
|
||
}
|
||
function node(value) {
|
||
return Boolean(value && typeof value === "object");
|
||
}
|
||
|
||
// node_modules/character-entities/index.js
|
||
var characterEntities = {
|
||
AElig: "\xC6",
|
||
AMP: "&",
|
||
Aacute: "\xC1",
|
||
Abreve: "\u0102",
|
||
Acirc: "\xC2",
|
||
Acy: "\u0410",
|
||
Afr: "\u{1D504}",
|
||
Agrave: "\xC0",
|
||
Alpha: "\u0391",
|
||
Amacr: "\u0100",
|
||
And: "\u2A53",
|
||
Aogon: "\u0104",
|
||
Aopf: "\u{1D538}",
|
||
ApplyFunction: "\u2061",
|
||
Aring: "\xC5",
|
||
Ascr: "\u{1D49C}",
|
||
Assign: "\u2254",
|
||
Atilde: "\xC3",
|
||
Auml: "\xC4",
|
||
Backslash: "\u2216",
|
||
Barv: "\u2AE7",
|
||
Barwed: "\u2306",
|
||
Bcy: "\u0411",
|
||
Because: "\u2235",
|
||
Bernoullis: "\u212C",
|
||
Beta: "\u0392",
|
||
Bfr: "\u{1D505}",
|
||
Bopf: "\u{1D539}",
|
||
Breve: "\u02D8",
|
||
Bscr: "\u212C",
|
||
Bumpeq: "\u224E",
|
||
CHcy: "\u0427",
|
||
COPY: "\xA9",
|
||
Cacute: "\u0106",
|
||
Cap: "\u22D2",
|
||
CapitalDifferentialD: "\u2145",
|
||
Cayleys: "\u212D",
|
||
Ccaron: "\u010C",
|
||
Ccedil: "\xC7",
|
||
Ccirc: "\u0108",
|
||
Cconint: "\u2230",
|
||
Cdot: "\u010A",
|
||
Cedilla: "\xB8",
|
||
CenterDot: "\xB7",
|
||
Cfr: "\u212D",
|
||
Chi: "\u03A7",
|
||
CircleDot: "\u2299",
|
||
CircleMinus: "\u2296",
|
||
CirclePlus: "\u2295",
|
||
CircleTimes: "\u2297",
|
||
ClockwiseContourIntegral: "\u2232",
|
||
CloseCurlyDoubleQuote: "\u201D",
|
||
CloseCurlyQuote: "\u2019",
|
||
Colon: "\u2237",
|
||
Colone: "\u2A74",
|
||
Congruent: "\u2261",
|
||
Conint: "\u222F",
|
||
ContourIntegral: "\u222E",
|
||
Copf: "\u2102",
|
||
Coproduct: "\u2210",
|
||
CounterClockwiseContourIntegral: "\u2233",
|
||
Cross: "\u2A2F",
|
||
Cscr: "\u{1D49E}",
|
||
Cup: "\u22D3",
|
||
CupCap: "\u224D",
|
||
DD: "\u2145",
|
||
DDotrahd: "\u2911",
|
||
DJcy: "\u0402",
|
||
DScy: "\u0405",
|
||
DZcy: "\u040F",
|
||
Dagger: "\u2021",
|
||
Darr: "\u21A1",
|
||
Dashv: "\u2AE4",
|
||
Dcaron: "\u010E",
|
||
Dcy: "\u0414",
|
||
Del: "\u2207",
|
||
Delta: "\u0394",
|
||
Dfr: "\u{1D507}",
|
||
DiacriticalAcute: "\xB4",
|
||
DiacriticalDot: "\u02D9",
|
||
DiacriticalDoubleAcute: "\u02DD",
|
||
DiacriticalGrave: "`",
|
||
DiacriticalTilde: "\u02DC",
|
||
Diamond: "\u22C4",
|
||
DifferentialD: "\u2146",
|
||
Dopf: "\u{1D53B}",
|
||
Dot: "\xA8",
|
||
DotDot: "\u20DC",
|
||
DotEqual: "\u2250",
|
||
DoubleContourIntegral: "\u222F",
|
||
DoubleDot: "\xA8",
|
||
DoubleDownArrow: "\u21D3",
|
||
DoubleLeftArrow: "\u21D0",
|
||
DoubleLeftRightArrow: "\u21D4",
|
||
DoubleLeftTee: "\u2AE4",
|
||
DoubleLongLeftArrow: "\u27F8",
|
||
DoubleLongLeftRightArrow: "\u27FA",
|
||
DoubleLongRightArrow: "\u27F9",
|
||
DoubleRightArrow: "\u21D2",
|
||
DoubleRightTee: "\u22A8",
|
||
DoubleUpArrow: "\u21D1",
|
||
DoubleUpDownArrow: "\u21D5",
|
||
DoubleVerticalBar: "\u2225",
|
||
DownArrow: "\u2193",
|
||
DownArrowBar: "\u2913",
|
||
DownArrowUpArrow: "\u21F5",
|
||
DownBreve: "\u0311",
|
||
DownLeftRightVector: "\u2950",
|
||
DownLeftTeeVector: "\u295E",
|
||
DownLeftVector: "\u21BD",
|
||
DownLeftVectorBar: "\u2956",
|
||
DownRightTeeVector: "\u295F",
|
||
DownRightVector: "\u21C1",
|
||
DownRightVectorBar: "\u2957",
|
||
DownTee: "\u22A4",
|
||
DownTeeArrow: "\u21A7",
|
||
Downarrow: "\u21D3",
|
||
Dscr: "\u{1D49F}",
|
||
Dstrok: "\u0110",
|
||
ENG: "\u014A",
|
||
ETH: "\xD0",
|
||
Eacute: "\xC9",
|
||
Ecaron: "\u011A",
|
||
Ecirc: "\xCA",
|
||
Ecy: "\u042D",
|
||
Edot: "\u0116",
|
||
Efr: "\u{1D508}",
|
||
Egrave: "\xC8",
|
||
Element: "\u2208",
|
||
Emacr: "\u0112",
|
||
EmptySmallSquare: "\u25FB",
|
||
EmptyVerySmallSquare: "\u25AB",
|
||
Eogon: "\u0118",
|
||
Eopf: "\u{1D53C}",
|
||
Epsilon: "\u0395",
|
||
Equal: "\u2A75",
|
||
EqualTilde: "\u2242",
|
||
Equilibrium: "\u21CC",
|
||
Escr: "\u2130",
|
||
Esim: "\u2A73",
|
||
Eta: "\u0397",
|
||
Euml: "\xCB",
|
||
Exists: "\u2203",
|
||
ExponentialE: "\u2147",
|
||
Fcy: "\u0424",
|
||
Ffr: "\u{1D509}",
|
||
FilledSmallSquare: "\u25FC",
|
||
FilledVerySmallSquare: "\u25AA",
|
||
Fopf: "\u{1D53D}",
|
||
ForAll: "\u2200",
|
||
Fouriertrf: "\u2131",
|
||
Fscr: "\u2131",
|
||
GJcy: "\u0403",
|
||
GT: ">",
|
||
Gamma: "\u0393",
|
||
Gammad: "\u03DC",
|
||
Gbreve: "\u011E",
|
||
Gcedil: "\u0122",
|
||
Gcirc: "\u011C",
|
||
Gcy: "\u0413",
|
||
Gdot: "\u0120",
|
||
Gfr: "\u{1D50A}",
|
||
Gg: "\u22D9",
|
||
Gopf: "\u{1D53E}",
|
||
GreaterEqual: "\u2265",
|
||
GreaterEqualLess: "\u22DB",
|
||
GreaterFullEqual: "\u2267",
|
||
GreaterGreater: "\u2AA2",
|
||
GreaterLess: "\u2277",
|
||
GreaterSlantEqual: "\u2A7E",
|
||
GreaterTilde: "\u2273",
|
||
Gscr: "\u{1D4A2}",
|
||
Gt: "\u226B",
|
||
HARDcy: "\u042A",
|
||
Hacek: "\u02C7",
|
||
Hat: "^",
|
||
Hcirc: "\u0124",
|
||
Hfr: "\u210C",
|
||
HilbertSpace: "\u210B",
|
||
Hopf: "\u210D",
|
||
HorizontalLine: "\u2500",
|
||
Hscr: "\u210B",
|
||
Hstrok: "\u0126",
|
||
HumpDownHump: "\u224E",
|
||
HumpEqual: "\u224F",
|
||
IEcy: "\u0415",
|
||
IJlig: "\u0132",
|
||
IOcy: "\u0401",
|
||
Iacute: "\xCD",
|
||
Icirc: "\xCE",
|
||
Icy: "\u0418",
|
||
Idot: "\u0130",
|
||
Ifr: "\u2111",
|
||
Igrave: "\xCC",
|
||
Im: "\u2111",
|
||
Imacr: "\u012A",
|
||
ImaginaryI: "\u2148",
|
||
Implies: "\u21D2",
|
||
Int: "\u222C",
|
||
Integral: "\u222B",
|
||
Intersection: "\u22C2",
|
||
InvisibleComma: "\u2063",
|
||
InvisibleTimes: "\u2062",
|
||
Iogon: "\u012E",
|
||
Iopf: "\u{1D540}",
|
||
Iota: "\u0399",
|
||
Iscr: "\u2110",
|
||
Itilde: "\u0128",
|
||
Iukcy: "\u0406",
|
||
Iuml: "\xCF",
|
||
Jcirc: "\u0134",
|
||
Jcy: "\u0419",
|
||
Jfr: "\u{1D50D}",
|
||
Jopf: "\u{1D541}",
|
||
Jscr: "\u{1D4A5}",
|
||
Jsercy: "\u0408",
|
||
Jukcy: "\u0404",
|
||
KHcy: "\u0425",
|
||
KJcy: "\u040C",
|
||
Kappa: "\u039A",
|
||
Kcedil: "\u0136",
|
||
Kcy: "\u041A",
|
||
Kfr: "\u{1D50E}",
|
||
Kopf: "\u{1D542}",
|
||
Kscr: "\u{1D4A6}",
|
||
LJcy: "\u0409",
|
||
LT: "<",
|
||
Lacute: "\u0139",
|
||
Lambda: "\u039B",
|
||
Lang: "\u27EA",
|
||
Laplacetrf: "\u2112",
|
||
Larr: "\u219E",
|
||
Lcaron: "\u013D",
|
||
Lcedil: "\u013B",
|
||
Lcy: "\u041B",
|
||
LeftAngleBracket: "\u27E8",
|
||
LeftArrow: "\u2190",
|
||
LeftArrowBar: "\u21E4",
|
||
LeftArrowRightArrow: "\u21C6",
|
||
LeftCeiling: "\u2308",
|
||
LeftDoubleBracket: "\u27E6",
|
||
LeftDownTeeVector: "\u2961",
|
||
LeftDownVector: "\u21C3",
|
||
LeftDownVectorBar: "\u2959",
|
||
LeftFloor: "\u230A",
|
||
LeftRightArrow: "\u2194",
|
||
LeftRightVector: "\u294E",
|
||
LeftTee: "\u22A3",
|
||
LeftTeeArrow: "\u21A4",
|
||
LeftTeeVector: "\u295A",
|
||
LeftTriangle: "\u22B2",
|
||
LeftTriangleBar: "\u29CF",
|
||
LeftTriangleEqual: "\u22B4",
|
||
LeftUpDownVector: "\u2951",
|
||
LeftUpTeeVector: "\u2960",
|
||
LeftUpVector: "\u21BF",
|
||
LeftUpVectorBar: "\u2958",
|
||
LeftVector: "\u21BC",
|
||
LeftVectorBar: "\u2952",
|
||
Leftarrow: "\u21D0",
|
||
Leftrightarrow: "\u21D4",
|
||
LessEqualGreater: "\u22DA",
|
||
LessFullEqual: "\u2266",
|
||
LessGreater: "\u2276",
|
||
LessLess: "\u2AA1",
|
||
LessSlantEqual: "\u2A7D",
|
||
LessTilde: "\u2272",
|
||
Lfr: "\u{1D50F}",
|
||
Ll: "\u22D8",
|
||
Lleftarrow: "\u21DA",
|
||
Lmidot: "\u013F",
|
||
LongLeftArrow: "\u27F5",
|
||
LongLeftRightArrow: "\u27F7",
|
||
LongRightArrow: "\u27F6",
|
||
Longleftarrow: "\u27F8",
|
||
Longleftrightarrow: "\u27FA",
|
||
Longrightarrow: "\u27F9",
|
||
Lopf: "\u{1D543}",
|
||
LowerLeftArrow: "\u2199",
|
||
LowerRightArrow: "\u2198",
|
||
Lscr: "\u2112",
|
||
Lsh: "\u21B0",
|
||
Lstrok: "\u0141",
|
||
Lt: "\u226A",
|
||
Map: "\u2905",
|
||
Mcy: "\u041C",
|
||
MediumSpace: "\u205F",
|
||
Mellintrf: "\u2133",
|
||
Mfr: "\u{1D510}",
|
||
MinusPlus: "\u2213",
|
||
Mopf: "\u{1D544}",
|
||
Mscr: "\u2133",
|
||
Mu: "\u039C",
|
||
NJcy: "\u040A",
|
||
Nacute: "\u0143",
|
||
Ncaron: "\u0147",
|
||
Ncedil: "\u0145",
|
||
Ncy: "\u041D",
|
||
NegativeMediumSpace: "\u200B",
|
||
NegativeThickSpace: "\u200B",
|
||
NegativeThinSpace: "\u200B",
|
||
NegativeVeryThinSpace: "\u200B",
|
||
NestedGreaterGreater: "\u226B",
|
||
NestedLessLess: "\u226A",
|
||
NewLine: "\n",
|
||
Nfr: "\u{1D511}",
|
||
NoBreak: "\u2060",
|
||
NonBreakingSpace: "\xA0",
|
||
Nopf: "\u2115",
|
||
Not: "\u2AEC",
|
||
NotCongruent: "\u2262",
|
||
NotCupCap: "\u226D",
|
||
NotDoubleVerticalBar: "\u2226",
|
||
NotElement: "\u2209",
|
||
NotEqual: "\u2260",
|
||
NotEqualTilde: "\u2242\u0338",
|
||
NotExists: "\u2204",
|
||
NotGreater: "\u226F",
|
||
NotGreaterEqual: "\u2271",
|
||
NotGreaterFullEqual: "\u2267\u0338",
|
||
NotGreaterGreater: "\u226B\u0338",
|
||
NotGreaterLess: "\u2279",
|
||
NotGreaterSlantEqual: "\u2A7E\u0338",
|
||
NotGreaterTilde: "\u2275",
|
||
NotHumpDownHump: "\u224E\u0338",
|
||
NotHumpEqual: "\u224F\u0338",
|
||
NotLeftTriangle: "\u22EA",
|
||
NotLeftTriangleBar: "\u29CF\u0338",
|
||
NotLeftTriangleEqual: "\u22EC",
|
||
NotLess: "\u226E",
|
||
NotLessEqual: "\u2270",
|
||
NotLessGreater: "\u2278",
|
||
NotLessLess: "\u226A\u0338",
|
||
NotLessSlantEqual: "\u2A7D\u0338",
|
||
NotLessTilde: "\u2274",
|
||
NotNestedGreaterGreater: "\u2AA2\u0338",
|
||
NotNestedLessLess: "\u2AA1\u0338",
|
||
NotPrecedes: "\u2280",
|
||
NotPrecedesEqual: "\u2AAF\u0338",
|
||
NotPrecedesSlantEqual: "\u22E0",
|
||
NotReverseElement: "\u220C",
|
||
NotRightTriangle: "\u22EB",
|
||
NotRightTriangleBar: "\u29D0\u0338",
|
||
NotRightTriangleEqual: "\u22ED",
|
||
NotSquareSubset: "\u228F\u0338",
|
||
NotSquareSubsetEqual: "\u22E2",
|
||
NotSquareSuperset: "\u2290\u0338",
|
||
NotSquareSupersetEqual: "\u22E3",
|
||
NotSubset: "\u2282\u20D2",
|
||
NotSubsetEqual: "\u2288",
|
||
NotSucceeds: "\u2281",
|
||
NotSucceedsEqual: "\u2AB0\u0338",
|
||
NotSucceedsSlantEqual: "\u22E1",
|
||
NotSucceedsTilde: "\u227F\u0338",
|
||
NotSuperset: "\u2283\u20D2",
|
||
NotSupersetEqual: "\u2289",
|
||
NotTilde: "\u2241",
|
||
NotTildeEqual: "\u2244",
|
||
NotTildeFullEqual: "\u2247",
|
||
NotTildeTilde: "\u2249",
|
||
NotVerticalBar: "\u2224",
|
||
Nscr: "\u{1D4A9}",
|
||
Ntilde: "\xD1",
|
||
Nu: "\u039D",
|
||
OElig: "\u0152",
|
||
Oacute: "\xD3",
|
||
Ocirc: "\xD4",
|
||
Ocy: "\u041E",
|
||
Odblac: "\u0150",
|
||
Ofr: "\u{1D512}",
|
||
Ograve: "\xD2",
|
||
Omacr: "\u014C",
|
||
Omega: "\u03A9",
|
||
Omicron: "\u039F",
|
||
Oopf: "\u{1D546}",
|
||
OpenCurlyDoubleQuote: "\u201C",
|
||
OpenCurlyQuote: "\u2018",
|
||
Or: "\u2A54",
|
||
Oscr: "\u{1D4AA}",
|
||
Oslash: "\xD8",
|
||
Otilde: "\xD5",
|
||
Otimes: "\u2A37",
|
||
Ouml: "\xD6",
|
||
OverBar: "\u203E",
|
||
OverBrace: "\u23DE",
|
||
OverBracket: "\u23B4",
|
||
OverParenthesis: "\u23DC",
|
||
PartialD: "\u2202",
|
||
Pcy: "\u041F",
|
||
Pfr: "\u{1D513}",
|
||
Phi: "\u03A6",
|
||
Pi: "\u03A0",
|
||
PlusMinus: "\xB1",
|
||
Poincareplane: "\u210C",
|
||
Popf: "\u2119",
|
||
Pr: "\u2ABB",
|
||
Precedes: "\u227A",
|
||
PrecedesEqual: "\u2AAF",
|
||
PrecedesSlantEqual: "\u227C",
|
||
PrecedesTilde: "\u227E",
|
||
Prime: "\u2033",
|
||
Product: "\u220F",
|
||
Proportion: "\u2237",
|
||
Proportional: "\u221D",
|
||
Pscr: "\u{1D4AB}",
|
||
Psi: "\u03A8",
|
||
QUOT: '"',
|
||
Qfr: "\u{1D514}",
|
||
Qopf: "\u211A",
|
||
Qscr: "\u{1D4AC}",
|
||
RBarr: "\u2910",
|
||
REG: "\xAE",
|
||
Racute: "\u0154",
|
||
Rang: "\u27EB",
|
||
Rarr: "\u21A0",
|
||
Rarrtl: "\u2916",
|
||
Rcaron: "\u0158",
|
||
Rcedil: "\u0156",
|
||
Rcy: "\u0420",
|
||
Re: "\u211C",
|
||
ReverseElement: "\u220B",
|
||
ReverseEquilibrium: "\u21CB",
|
||
ReverseUpEquilibrium: "\u296F",
|
||
Rfr: "\u211C",
|
||
Rho: "\u03A1",
|
||
RightAngleBracket: "\u27E9",
|
||
RightArrow: "\u2192",
|
||
RightArrowBar: "\u21E5",
|
||
RightArrowLeftArrow: "\u21C4",
|
||
RightCeiling: "\u2309",
|
||
RightDoubleBracket: "\u27E7",
|
||
RightDownTeeVector: "\u295D",
|
||
RightDownVector: "\u21C2",
|
||
RightDownVectorBar: "\u2955",
|
||
RightFloor: "\u230B",
|
||
RightTee: "\u22A2",
|
||
RightTeeArrow: "\u21A6",
|
||
RightTeeVector: "\u295B",
|
||
RightTriangle: "\u22B3",
|
||
RightTriangleBar: "\u29D0",
|
||
RightTriangleEqual: "\u22B5",
|
||
RightUpDownVector: "\u294F",
|
||
RightUpTeeVector: "\u295C",
|
||
RightUpVector: "\u21BE",
|
||
RightUpVectorBar: "\u2954",
|
||
RightVector: "\u21C0",
|
||
RightVectorBar: "\u2953",
|
||
Rightarrow: "\u21D2",
|
||
Ropf: "\u211D",
|
||
RoundImplies: "\u2970",
|
||
Rrightarrow: "\u21DB",
|
||
Rscr: "\u211B",
|
||
Rsh: "\u21B1",
|
||
RuleDelayed: "\u29F4",
|
||
SHCHcy: "\u0429",
|
||
SHcy: "\u0428",
|
||
SOFTcy: "\u042C",
|
||
Sacute: "\u015A",
|
||
Sc: "\u2ABC",
|
||
Scaron: "\u0160",
|
||
Scedil: "\u015E",
|
||
Scirc: "\u015C",
|
||
Scy: "\u0421",
|
||
Sfr: "\u{1D516}",
|
||
ShortDownArrow: "\u2193",
|
||
ShortLeftArrow: "\u2190",
|
||
ShortRightArrow: "\u2192",
|
||
ShortUpArrow: "\u2191",
|
||
Sigma: "\u03A3",
|
||
SmallCircle: "\u2218",
|
||
Sopf: "\u{1D54A}",
|
||
Sqrt: "\u221A",
|
||
Square: "\u25A1",
|
||
SquareIntersection: "\u2293",
|
||
SquareSubset: "\u228F",
|
||
SquareSubsetEqual: "\u2291",
|
||
SquareSuperset: "\u2290",
|
||
SquareSupersetEqual: "\u2292",
|
||
SquareUnion: "\u2294",
|
||
Sscr: "\u{1D4AE}",
|
||
Star: "\u22C6",
|
||
Sub: "\u22D0",
|
||
Subset: "\u22D0",
|
||
SubsetEqual: "\u2286",
|
||
Succeeds: "\u227B",
|
||
SucceedsEqual: "\u2AB0",
|
||
SucceedsSlantEqual: "\u227D",
|
||
SucceedsTilde: "\u227F",
|
||
SuchThat: "\u220B",
|
||
Sum: "\u2211",
|
||
Sup: "\u22D1",
|
||
Superset: "\u2283",
|
||
SupersetEqual: "\u2287",
|
||
Supset: "\u22D1",
|
||
THORN: "\xDE",
|
||
TRADE: "\u2122",
|
||
TSHcy: "\u040B",
|
||
TScy: "\u0426",
|
||
Tab: " ",
|
||
Tau: "\u03A4",
|
||
Tcaron: "\u0164",
|
||
Tcedil: "\u0162",
|
||
Tcy: "\u0422",
|
||
Tfr: "\u{1D517}",
|
||
Therefore: "\u2234",
|
||
Theta: "\u0398",
|
||
ThickSpace: "\u205F\u200A",
|
||
ThinSpace: "\u2009",
|
||
Tilde: "\u223C",
|
||
TildeEqual: "\u2243",
|
||
TildeFullEqual: "\u2245",
|
||
TildeTilde: "\u2248",
|
||
Topf: "\u{1D54B}",
|
||
TripleDot: "\u20DB",
|
||
Tscr: "\u{1D4AF}",
|
||
Tstrok: "\u0166",
|
||
Uacute: "\xDA",
|
||
Uarr: "\u219F",
|
||
Uarrocir: "\u2949",
|
||
Ubrcy: "\u040E",
|
||
Ubreve: "\u016C",
|
||
Ucirc: "\xDB",
|
||
Ucy: "\u0423",
|
||
Udblac: "\u0170",
|
||
Ufr: "\u{1D518}",
|
||
Ugrave: "\xD9",
|
||
Umacr: "\u016A",
|
||
UnderBar: "_",
|
||
UnderBrace: "\u23DF",
|
||
UnderBracket: "\u23B5",
|
||
UnderParenthesis: "\u23DD",
|
||
Union: "\u22C3",
|
||
UnionPlus: "\u228E",
|
||
Uogon: "\u0172",
|
||
Uopf: "\u{1D54C}",
|
||
UpArrow: "\u2191",
|
||
UpArrowBar: "\u2912",
|
||
UpArrowDownArrow: "\u21C5",
|
||
UpDownArrow: "\u2195",
|
||
UpEquilibrium: "\u296E",
|
||
UpTee: "\u22A5",
|
||
UpTeeArrow: "\u21A5",
|
||
Uparrow: "\u21D1",
|
||
Updownarrow: "\u21D5",
|
||
UpperLeftArrow: "\u2196",
|
||
UpperRightArrow: "\u2197",
|
||
Upsi: "\u03D2",
|
||
Upsilon: "\u03A5",
|
||
Uring: "\u016E",
|
||
Uscr: "\u{1D4B0}",
|
||
Utilde: "\u0168",
|
||
Uuml: "\xDC",
|
||
VDash: "\u22AB",
|
||
Vbar: "\u2AEB",
|
||
Vcy: "\u0412",
|
||
Vdash: "\u22A9",
|
||
Vdashl: "\u2AE6",
|
||
Vee: "\u22C1",
|
||
Verbar: "\u2016",
|
||
Vert: "\u2016",
|
||
VerticalBar: "\u2223",
|
||
VerticalLine: "|",
|
||
VerticalSeparator: "\u2758",
|
||
VerticalTilde: "\u2240",
|
||
VeryThinSpace: "\u200A",
|
||
Vfr: "\u{1D519}",
|
||
Vopf: "\u{1D54D}",
|
||
Vscr: "\u{1D4B1}",
|
||
Vvdash: "\u22AA",
|
||
Wcirc: "\u0174",
|
||
Wedge: "\u22C0",
|
||
Wfr: "\u{1D51A}",
|
||
Wopf: "\u{1D54E}",
|
||
Wscr: "\u{1D4B2}",
|
||
Xfr: "\u{1D51B}",
|
||
Xi: "\u039E",
|
||
Xopf: "\u{1D54F}",
|
||
Xscr: "\u{1D4B3}",
|
||
YAcy: "\u042F",
|
||
YIcy: "\u0407",
|
||
YUcy: "\u042E",
|
||
Yacute: "\xDD",
|
||
Ycirc: "\u0176",
|
||
Ycy: "\u042B",
|
||
Yfr: "\u{1D51C}",
|
||
Yopf: "\u{1D550}",
|
||
Yscr: "\u{1D4B4}",
|
||
Yuml: "\u0178",
|
||
ZHcy: "\u0416",
|
||
Zacute: "\u0179",
|
||
Zcaron: "\u017D",
|
||
Zcy: "\u0417",
|
||
Zdot: "\u017B",
|
||
ZeroWidthSpace: "\u200B",
|
||
Zeta: "\u0396",
|
||
Zfr: "\u2128",
|
||
Zopf: "\u2124",
|
||
Zscr: "\u{1D4B5}",
|
||
aacute: "\xE1",
|
||
abreve: "\u0103",
|
||
ac: "\u223E",
|
||
acE: "\u223E\u0333",
|
||
acd: "\u223F",
|
||
acirc: "\xE2",
|
||
acute: "\xB4",
|
||
acy: "\u0430",
|
||
aelig: "\xE6",
|
||
af: "\u2061",
|
||
afr: "\u{1D51E}",
|
||
agrave: "\xE0",
|
||
alefsym: "\u2135",
|
||
aleph: "\u2135",
|
||
alpha: "\u03B1",
|
||
amacr: "\u0101",
|
||
amalg: "\u2A3F",
|
||
amp: "&",
|
||
and: "\u2227",
|
||
andand: "\u2A55",
|
||
andd: "\u2A5C",
|
||
andslope: "\u2A58",
|
||
andv: "\u2A5A",
|
||
ang: "\u2220",
|
||
ange: "\u29A4",
|
||
angle: "\u2220",
|
||
angmsd: "\u2221",
|
||
angmsdaa: "\u29A8",
|
||
angmsdab: "\u29A9",
|
||
angmsdac: "\u29AA",
|
||
angmsdad: "\u29AB",
|
||
angmsdae: "\u29AC",
|
||
angmsdaf: "\u29AD",
|
||
angmsdag: "\u29AE",
|
||
angmsdah: "\u29AF",
|
||
angrt: "\u221F",
|
||
angrtvb: "\u22BE",
|
||
angrtvbd: "\u299D",
|
||
angsph: "\u2222",
|
||
angst: "\xC5",
|
||
angzarr: "\u237C",
|
||
aogon: "\u0105",
|
||
aopf: "\u{1D552}",
|
||
ap: "\u2248",
|
||
apE: "\u2A70",
|
||
apacir: "\u2A6F",
|
||
ape: "\u224A",
|
||
apid: "\u224B",
|
||
apos: "'",
|
||
approx: "\u2248",
|
||
approxeq: "\u224A",
|
||
aring: "\xE5",
|
||
ascr: "\u{1D4B6}",
|
||
ast: "*",
|
||
asymp: "\u2248",
|
||
asympeq: "\u224D",
|
||
atilde: "\xE3",
|
||
auml: "\xE4",
|
||
awconint: "\u2233",
|
||
awint: "\u2A11",
|
||
bNot: "\u2AED",
|
||
backcong: "\u224C",
|
||
backepsilon: "\u03F6",
|
||
backprime: "\u2035",
|
||
backsim: "\u223D",
|
||
backsimeq: "\u22CD",
|
||
barvee: "\u22BD",
|
||
barwed: "\u2305",
|
||
barwedge: "\u2305",
|
||
bbrk: "\u23B5",
|
||
bbrktbrk: "\u23B6",
|
||
bcong: "\u224C",
|
||
bcy: "\u0431",
|
||
bdquo: "\u201E",
|
||
becaus: "\u2235",
|
||
because: "\u2235",
|
||
bemptyv: "\u29B0",
|
||
bepsi: "\u03F6",
|
||
bernou: "\u212C",
|
||
beta: "\u03B2",
|
||
beth: "\u2136",
|
||
between: "\u226C",
|
||
bfr: "\u{1D51F}",
|
||
bigcap: "\u22C2",
|
||
bigcirc: "\u25EF",
|
||
bigcup: "\u22C3",
|
||
bigodot: "\u2A00",
|
||
bigoplus: "\u2A01",
|
||
bigotimes: "\u2A02",
|
||
bigsqcup: "\u2A06",
|
||
bigstar: "\u2605",
|
||
bigtriangledown: "\u25BD",
|
||
bigtriangleup: "\u25B3",
|
||
biguplus: "\u2A04",
|
||
bigvee: "\u22C1",
|
||
bigwedge: "\u22C0",
|
||
bkarow: "\u290D",
|
||
blacklozenge: "\u29EB",
|
||
blacksquare: "\u25AA",
|
||
blacktriangle: "\u25B4",
|
||
blacktriangledown: "\u25BE",
|
||
blacktriangleleft: "\u25C2",
|
||
blacktriangleright: "\u25B8",
|
||
blank: "\u2423",
|
||
blk12: "\u2592",
|
||
blk14: "\u2591",
|
||
blk34: "\u2593",
|
||
block: "\u2588",
|
||
bne: "=\u20E5",
|
||
bnequiv: "\u2261\u20E5",
|
||
bnot: "\u2310",
|
||
bopf: "\u{1D553}",
|
||
bot: "\u22A5",
|
||
bottom: "\u22A5",
|
||
bowtie: "\u22C8",
|
||
boxDL: "\u2557",
|
||
boxDR: "\u2554",
|
||
boxDl: "\u2556",
|
||
boxDr: "\u2553",
|
||
boxH: "\u2550",
|
||
boxHD: "\u2566",
|
||
boxHU: "\u2569",
|
||
boxHd: "\u2564",
|
||
boxHu: "\u2567",
|
||
boxUL: "\u255D",
|
||
boxUR: "\u255A",
|
||
boxUl: "\u255C",
|
||
boxUr: "\u2559",
|
||
boxV: "\u2551",
|
||
boxVH: "\u256C",
|
||
boxVL: "\u2563",
|
||
boxVR: "\u2560",
|
||
boxVh: "\u256B",
|
||
boxVl: "\u2562",
|
||
boxVr: "\u255F",
|
||
boxbox: "\u29C9",
|
||
boxdL: "\u2555",
|
||
boxdR: "\u2552",
|
||
boxdl: "\u2510",
|
||
boxdr: "\u250C",
|
||
boxh: "\u2500",
|
||
boxhD: "\u2565",
|
||
boxhU: "\u2568",
|
||
boxhd: "\u252C",
|
||
boxhu: "\u2534",
|
||
boxminus: "\u229F",
|
||
boxplus: "\u229E",
|
||
boxtimes: "\u22A0",
|
||
boxuL: "\u255B",
|
||
boxuR: "\u2558",
|
||
boxul: "\u2518",
|
||
boxur: "\u2514",
|
||
boxv: "\u2502",
|
||
boxvH: "\u256A",
|
||
boxvL: "\u2561",
|
||
boxvR: "\u255E",
|
||
boxvh: "\u253C",
|
||
boxvl: "\u2524",
|
||
boxvr: "\u251C",
|
||
bprime: "\u2035",
|
||
breve: "\u02D8",
|
||
brvbar: "\xA6",
|
||
bscr: "\u{1D4B7}",
|
||
bsemi: "\u204F",
|
||
bsim: "\u223D",
|
||
bsime: "\u22CD",
|
||
bsol: "\\",
|
||
bsolb: "\u29C5",
|
||
bsolhsub: "\u27C8",
|
||
bull: "\u2022",
|
||
bullet: "\u2022",
|
||
bump: "\u224E",
|
||
bumpE: "\u2AAE",
|
||
bumpe: "\u224F",
|
||
bumpeq: "\u224F",
|
||
cacute: "\u0107",
|
||
cap: "\u2229",
|
||
capand: "\u2A44",
|
||
capbrcup: "\u2A49",
|
||
capcap: "\u2A4B",
|
||
capcup: "\u2A47",
|
||
capdot: "\u2A40",
|
||
caps: "\u2229\uFE00",
|
||
caret: "\u2041",
|
||
caron: "\u02C7",
|
||
ccaps: "\u2A4D",
|
||
ccaron: "\u010D",
|
||
ccedil: "\xE7",
|
||
ccirc: "\u0109",
|
||
ccups: "\u2A4C",
|
||
ccupssm: "\u2A50",
|
||
cdot: "\u010B",
|
||
cedil: "\xB8",
|
||
cemptyv: "\u29B2",
|
||
cent: "\xA2",
|
||
centerdot: "\xB7",
|
||
cfr: "\u{1D520}",
|
||
chcy: "\u0447",
|
||
check: "\u2713",
|
||
checkmark: "\u2713",
|
||
chi: "\u03C7",
|
||
cir: "\u25CB",
|
||
cirE: "\u29C3",
|
||
circ: "\u02C6",
|
||
circeq: "\u2257",
|
||
circlearrowleft: "\u21BA",
|
||
circlearrowright: "\u21BB",
|
||
circledR: "\xAE",
|
||
circledS: "\u24C8",
|
||
circledast: "\u229B",
|
||
circledcirc: "\u229A",
|
||
circleddash: "\u229D",
|
||
cire: "\u2257",
|
||
cirfnint: "\u2A10",
|
||
cirmid: "\u2AEF",
|
||
cirscir: "\u29C2",
|
||
clubs: "\u2663",
|
||
clubsuit: "\u2663",
|
||
colon: ":",
|
||
colone: "\u2254",
|
||
coloneq: "\u2254",
|
||
comma: ",",
|
||
commat: "@",
|
||
comp: "\u2201",
|
||
compfn: "\u2218",
|
||
complement: "\u2201",
|
||
complexes: "\u2102",
|
||
cong: "\u2245",
|
||
congdot: "\u2A6D",
|
||
conint: "\u222E",
|
||
copf: "\u{1D554}",
|
||
coprod: "\u2210",
|
||
copy: "\xA9",
|
||
copysr: "\u2117",
|
||
crarr: "\u21B5",
|
||
cross: "\u2717",
|
||
cscr: "\u{1D4B8}",
|
||
csub: "\u2ACF",
|
||
csube: "\u2AD1",
|
||
csup: "\u2AD0",
|
||
csupe: "\u2AD2",
|
||
ctdot: "\u22EF",
|
||
cudarrl: "\u2938",
|
||
cudarrr: "\u2935",
|
||
cuepr: "\u22DE",
|
||
cuesc: "\u22DF",
|
||
cularr: "\u21B6",
|
||
cularrp: "\u293D",
|
||
cup: "\u222A",
|
||
cupbrcap: "\u2A48",
|
||
cupcap: "\u2A46",
|
||
cupcup: "\u2A4A",
|
||
cupdot: "\u228D",
|
||
cupor: "\u2A45",
|
||
cups: "\u222A\uFE00",
|
||
curarr: "\u21B7",
|
||
curarrm: "\u293C",
|
||
curlyeqprec: "\u22DE",
|
||
curlyeqsucc: "\u22DF",
|
||
curlyvee: "\u22CE",
|
||
curlywedge: "\u22CF",
|
||
curren: "\xA4",
|
||
curvearrowleft: "\u21B6",
|
||
curvearrowright: "\u21B7",
|
||
cuvee: "\u22CE",
|
||
cuwed: "\u22CF",
|
||
cwconint: "\u2232",
|
||
cwint: "\u2231",
|
||
cylcty: "\u232D",
|
||
dArr: "\u21D3",
|
||
dHar: "\u2965",
|
||
dagger: "\u2020",
|
||
daleth: "\u2138",
|
||
darr: "\u2193",
|
||
dash: "\u2010",
|
||
dashv: "\u22A3",
|
||
dbkarow: "\u290F",
|
||
dblac: "\u02DD",
|
||
dcaron: "\u010F",
|
||
dcy: "\u0434",
|
||
dd: "\u2146",
|
||
ddagger: "\u2021",
|
||
ddarr: "\u21CA",
|
||
ddotseq: "\u2A77",
|
||
deg: "\xB0",
|
||
delta: "\u03B4",
|
||
demptyv: "\u29B1",
|
||
dfisht: "\u297F",
|
||
dfr: "\u{1D521}",
|
||
dharl: "\u21C3",
|
||
dharr: "\u21C2",
|
||
diam: "\u22C4",
|
||
diamond: "\u22C4",
|
||
diamondsuit: "\u2666",
|
||
diams: "\u2666",
|
||
die: "\xA8",
|
||
digamma: "\u03DD",
|
||
disin: "\u22F2",
|
||
div: "\xF7",
|
||
divide: "\xF7",
|
||
divideontimes: "\u22C7",
|
||
divonx: "\u22C7",
|
||
djcy: "\u0452",
|
||
dlcorn: "\u231E",
|
||
dlcrop: "\u230D",
|
||
dollar: "$",
|
||
dopf: "\u{1D555}",
|
||
dot: "\u02D9",
|
||
doteq: "\u2250",
|
||
doteqdot: "\u2251",
|
||
dotminus: "\u2238",
|
||
dotplus: "\u2214",
|
||
dotsquare: "\u22A1",
|
||
doublebarwedge: "\u2306",
|
||
downarrow: "\u2193",
|
||
downdownarrows: "\u21CA",
|
||
downharpoonleft: "\u21C3",
|
||
downharpoonright: "\u21C2",
|
||
drbkarow: "\u2910",
|
||
drcorn: "\u231F",
|
||
drcrop: "\u230C",
|
||
dscr: "\u{1D4B9}",
|
||
dscy: "\u0455",
|
||
dsol: "\u29F6",
|
||
dstrok: "\u0111",
|
||
dtdot: "\u22F1",
|
||
dtri: "\u25BF",
|
||
dtrif: "\u25BE",
|
||
duarr: "\u21F5",
|
||
duhar: "\u296F",
|
||
dwangle: "\u29A6",
|
||
dzcy: "\u045F",
|
||
dzigrarr: "\u27FF",
|
||
eDDot: "\u2A77",
|
||
eDot: "\u2251",
|
||
eacute: "\xE9",
|
||
easter: "\u2A6E",
|
||
ecaron: "\u011B",
|
||
ecir: "\u2256",
|
||
ecirc: "\xEA",
|
||
ecolon: "\u2255",
|
||
ecy: "\u044D",
|
||
edot: "\u0117",
|
||
ee: "\u2147",
|
||
efDot: "\u2252",
|
||
efr: "\u{1D522}",
|
||
eg: "\u2A9A",
|
||
egrave: "\xE8",
|
||
egs: "\u2A96",
|
||
egsdot: "\u2A98",
|
||
el: "\u2A99",
|
||
elinters: "\u23E7",
|
||
ell: "\u2113",
|
||
els: "\u2A95",
|
||
elsdot: "\u2A97",
|
||
emacr: "\u0113",
|
||
empty: "\u2205",
|
||
emptyset: "\u2205",
|
||
emptyv: "\u2205",
|
||
emsp13: "\u2004",
|
||
emsp14: "\u2005",
|
||
emsp: "\u2003",
|
||
eng: "\u014B",
|
||
ensp: "\u2002",
|
||
eogon: "\u0119",
|
||
eopf: "\u{1D556}",
|
||
epar: "\u22D5",
|
||
eparsl: "\u29E3",
|
||
eplus: "\u2A71",
|
||
epsi: "\u03B5",
|
||
epsilon: "\u03B5",
|
||
epsiv: "\u03F5",
|
||
eqcirc: "\u2256",
|
||
eqcolon: "\u2255",
|
||
eqsim: "\u2242",
|
||
eqslantgtr: "\u2A96",
|
||
eqslantless: "\u2A95",
|
||
equals: "=",
|
||
equest: "\u225F",
|
||
equiv: "\u2261",
|
||
equivDD: "\u2A78",
|
||
eqvparsl: "\u29E5",
|
||
erDot: "\u2253",
|
||
erarr: "\u2971",
|
||
escr: "\u212F",
|
||
esdot: "\u2250",
|
||
esim: "\u2242",
|
||
eta: "\u03B7",
|
||
eth: "\xF0",
|
||
euml: "\xEB",
|
||
euro: "\u20AC",
|
||
excl: "!",
|
||
exist: "\u2203",
|
||
expectation: "\u2130",
|
||
exponentiale: "\u2147",
|
||
fallingdotseq: "\u2252",
|
||
fcy: "\u0444",
|
||
female: "\u2640",
|
||
ffilig: "\uFB03",
|
||
fflig: "\uFB00",
|
||
ffllig: "\uFB04",
|
||
ffr: "\u{1D523}",
|
||
filig: "\uFB01",
|
||
fjlig: "fj",
|
||
flat: "\u266D",
|
||
fllig: "\uFB02",
|
||
fltns: "\u25B1",
|
||
fnof: "\u0192",
|
||
fopf: "\u{1D557}",
|
||
forall: "\u2200",
|
||
fork: "\u22D4",
|
||
forkv: "\u2AD9",
|
||
fpartint: "\u2A0D",
|
||
frac12: "\xBD",
|
||
frac13: "\u2153",
|
||
frac14: "\xBC",
|
||
frac15: "\u2155",
|
||
frac16: "\u2159",
|
||
frac18: "\u215B",
|
||
frac23: "\u2154",
|
||
frac25: "\u2156",
|
||
frac34: "\xBE",
|
||
frac35: "\u2157",
|
||
frac38: "\u215C",
|
||
frac45: "\u2158",
|
||
frac56: "\u215A",
|
||
frac58: "\u215D",
|
||
frac78: "\u215E",
|
||
frasl: "\u2044",
|
||
frown: "\u2322",
|
||
fscr: "\u{1D4BB}",
|
||
gE: "\u2267",
|
||
gEl: "\u2A8C",
|
||
gacute: "\u01F5",
|
||
gamma: "\u03B3",
|
||
gammad: "\u03DD",
|
||
gap: "\u2A86",
|
||
gbreve: "\u011F",
|
||
gcirc: "\u011D",
|
||
gcy: "\u0433",
|
||
gdot: "\u0121",
|
||
ge: "\u2265",
|
||
gel: "\u22DB",
|
||
geq: "\u2265",
|
||
geqq: "\u2267",
|
||
geqslant: "\u2A7E",
|
||
ges: "\u2A7E",
|
||
gescc: "\u2AA9",
|
||
gesdot: "\u2A80",
|
||
gesdoto: "\u2A82",
|
||
gesdotol: "\u2A84",
|
||
gesl: "\u22DB\uFE00",
|
||
gesles: "\u2A94",
|
||
gfr: "\u{1D524}",
|
||
gg: "\u226B",
|
||
ggg: "\u22D9",
|
||
gimel: "\u2137",
|
||
gjcy: "\u0453",
|
||
gl: "\u2277",
|
||
glE: "\u2A92",
|
||
gla: "\u2AA5",
|
||
glj: "\u2AA4",
|
||
gnE: "\u2269",
|
||
gnap: "\u2A8A",
|
||
gnapprox: "\u2A8A",
|
||
gne: "\u2A88",
|
||
gneq: "\u2A88",
|
||
gneqq: "\u2269",
|
||
gnsim: "\u22E7",
|
||
gopf: "\u{1D558}",
|
||
grave: "`",
|
||
gscr: "\u210A",
|
||
gsim: "\u2273",
|
||
gsime: "\u2A8E",
|
||
gsiml: "\u2A90",
|
||
gt: ">",
|
||
gtcc: "\u2AA7",
|
||
gtcir: "\u2A7A",
|
||
gtdot: "\u22D7",
|
||
gtlPar: "\u2995",
|
||
gtquest: "\u2A7C",
|
||
gtrapprox: "\u2A86",
|
||
gtrarr: "\u2978",
|
||
gtrdot: "\u22D7",
|
||
gtreqless: "\u22DB",
|
||
gtreqqless: "\u2A8C",
|
||
gtrless: "\u2277",
|
||
gtrsim: "\u2273",
|
||
gvertneqq: "\u2269\uFE00",
|
||
gvnE: "\u2269\uFE00",
|
||
hArr: "\u21D4",
|
||
hairsp: "\u200A",
|
||
half: "\xBD",
|
||
hamilt: "\u210B",
|
||
hardcy: "\u044A",
|
||
harr: "\u2194",
|
||
harrcir: "\u2948",
|
||
harrw: "\u21AD",
|
||
hbar: "\u210F",
|
||
hcirc: "\u0125",
|
||
hearts: "\u2665",
|
||
heartsuit: "\u2665",
|
||
hellip: "\u2026",
|
||
hercon: "\u22B9",
|
||
hfr: "\u{1D525}",
|
||
hksearow: "\u2925",
|
||
hkswarow: "\u2926",
|
||
hoarr: "\u21FF",
|
||
homtht: "\u223B",
|
||
hookleftarrow: "\u21A9",
|
||
hookrightarrow: "\u21AA",
|
||
hopf: "\u{1D559}",
|
||
horbar: "\u2015",
|
||
hscr: "\u{1D4BD}",
|
||
hslash: "\u210F",
|
||
hstrok: "\u0127",
|
||
hybull: "\u2043",
|
||
hyphen: "\u2010",
|
||
iacute: "\xED",
|
||
ic: "\u2063",
|
||
icirc: "\xEE",
|
||
icy: "\u0438",
|
||
iecy: "\u0435",
|
||
iexcl: "\xA1",
|
||
iff: "\u21D4",
|
||
ifr: "\u{1D526}",
|
||
igrave: "\xEC",
|
||
ii: "\u2148",
|
||
iiiint: "\u2A0C",
|
||
iiint: "\u222D",
|
||
iinfin: "\u29DC",
|
||
iiota: "\u2129",
|
||
ijlig: "\u0133",
|
||
imacr: "\u012B",
|
||
image: "\u2111",
|
||
imagline: "\u2110",
|
||
imagpart: "\u2111",
|
||
imath: "\u0131",
|
||
imof: "\u22B7",
|
||
imped: "\u01B5",
|
||
in: "\u2208",
|
||
incare: "\u2105",
|
||
infin: "\u221E",
|
||
infintie: "\u29DD",
|
||
inodot: "\u0131",
|
||
int: "\u222B",
|
||
intcal: "\u22BA",
|
||
integers: "\u2124",
|
||
intercal: "\u22BA",
|
||
intlarhk: "\u2A17",
|
||
intprod: "\u2A3C",
|
||
iocy: "\u0451",
|
||
iogon: "\u012F",
|
||
iopf: "\u{1D55A}",
|
||
iota: "\u03B9",
|
||
iprod: "\u2A3C",
|
||
iquest: "\xBF",
|
||
iscr: "\u{1D4BE}",
|
||
isin: "\u2208",
|
||
isinE: "\u22F9",
|
||
isindot: "\u22F5",
|
||
isins: "\u22F4",
|
||
isinsv: "\u22F3",
|
||
isinv: "\u2208",
|
||
it: "\u2062",
|
||
itilde: "\u0129",
|
||
iukcy: "\u0456",
|
||
iuml: "\xEF",
|
||
jcirc: "\u0135",
|
||
jcy: "\u0439",
|
||
jfr: "\u{1D527}",
|
||
jmath: "\u0237",
|
||
jopf: "\u{1D55B}",
|
||
jscr: "\u{1D4BF}",
|
||
jsercy: "\u0458",
|
||
jukcy: "\u0454",
|
||
kappa: "\u03BA",
|
||
kappav: "\u03F0",
|
||
kcedil: "\u0137",
|
||
kcy: "\u043A",
|
||
kfr: "\u{1D528}",
|
||
kgreen: "\u0138",
|
||
khcy: "\u0445",
|
||
kjcy: "\u045C",
|
||
kopf: "\u{1D55C}",
|
||
kscr: "\u{1D4C0}",
|
||
lAarr: "\u21DA",
|
||
lArr: "\u21D0",
|
||
lAtail: "\u291B",
|
||
lBarr: "\u290E",
|
||
lE: "\u2266",
|
||
lEg: "\u2A8B",
|
||
lHar: "\u2962",
|
||
lacute: "\u013A",
|
||
laemptyv: "\u29B4",
|
||
lagran: "\u2112",
|
||
lambda: "\u03BB",
|
||
lang: "\u27E8",
|
||
langd: "\u2991",
|
||
langle: "\u27E8",
|
||
lap: "\u2A85",
|
||
laquo: "\xAB",
|
||
larr: "\u2190",
|
||
larrb: "\u21E4",
|
||
larrbfs: "\u291F",
|
||
larrfs: "\u291D",
|
||
larrhk: "\u21A9",
|
||
larrlp: "\u21AB",
|
||
larrpl: "\u2939",
|
||
larrsim: "\u2973",
|
||
larrtl: "\u21A2",
|
||
lat: "\u2AAB",
|
||
latail: "\u2919",
|
||
late: "\u2AAD",
|
||
lates: "\u2AAD\uFE00",
|
||
lbarr: "\u290C",
|
||
lbbrk: "\u2772",
|
||
lbrace: "{",
|
||
lbrack: "[",
|
||
lbrke: "\u298B",
|
||
lbrksld: "\u298F",
|
||
lbrkslu: "\u298D",
|
||
lcaron: "\u013E",
|
||
lcedil: "\u013C",
|
||
lceil: "\u2308",
|
||
lcub: "{",
|
||
lcy: "\u043B",
|
||
ldca: "\u2936",
|
||
ldquo: "\u201C",
|
||
ldquor: "\u201E",
|
||
ldrdhar: "\u2967",
|
||
ldrushar: "\u294B",
|
||
ldsh: "\u21B2",
|
||
le: "\u2264",
|
||
leftarrow: "\u2190",
|
||
leftarrowtail: "\u21A2",
|
||
leftharpoondown: "\u21BD",
|
||
leftharpoonup: "\u21BC",
|
||
leftleftarrows: "\u21C7",
|
||
leftrightarrow: "\u2194",
|
||
leftrightarrows: "\u21C6",
|
||
leftrightharpoons: "\u21CB",
|
||
leftrightsquigarrow: "\u21AD",
|
||
leftthreetimes: "\u22CB",
|
||
leg: "\u22DA",
|
||
leq: "\u2264",
|
||
leqq: "\u2266",
|
||
leqslant: "\u2A7D",
|
||
les: "\u2A7D",
|
||
lescc: "\u2AA8",
|
||
lesdot: "\u2A7F",
|
||
lesdoto: "\u2A81",
|
||
lesdotor: "\u2A83",
|
||
lesg: "\u22DA\uFE00",
|
||
lesges: "\u2A93",
|
||
lessapprox: "\u2A85",
|
||
lessdot: "\u22D6",
|
||
lesseqgtr: "\u22DA",
|
||
lesseqqgtr: "\u2A8B",
|
||
lessgtr: "\u2276",
|
||
lesssim: "\u2272",
|
||
lfisht: "\u297C",
|
||
lfloor: "\u230A",
|
||
lfr: "\u{1D529}",
|
||
lg: "\u2276",
|
||
lgE: "\u2A91",
|
||
lhard: "\u21BD",
|
||
lharu: "\u21BC",
|
||
lharul: "\u296A",
|
||
lhblk: "\u2584",
|
||
ljcy: "\u0459",
|
||
ll: "\u226A",
|
||
llarr: "\u21C7",
|
||
llcorner: "\u231E",
|
||
llhard: "\u296B",
|
||
lltri: "\u25FA",
|
||
lmidot: "\u0140",
|
||
lmoust: "\u23B0",
|
||
lmoustache: "\u23B0",
|
||
lnE: "\u2268",
|
||
lnap: "\u2A89",
|
||
lnapprox: "\u2A89",
|
||
lne: "\u2A87",
|
||
lneq: "\u2A87",
|
||
lneqq: "\u2268",
|
||
lnsim: "\u22E6",
|
||
loang: "\u27EC",
|
||
loarr: "\u21FD",
|
||
lobrk: "\u27E6",
|
||
longleftarrow: "\u27F5",
|
||
longleftrightarrow: "\u27F7",
|
||
longmapsto: "\u27FC",
|
||
longrightarrow: "\u27F6",
|
||
looparrowleft: "\u21AB",
|
||
looparrowright: "\u21AC",
|
||
lopar: "\u2985",
|
||
lopf: "\u{1D55D}",
|
||
loplus: "\u2A2D",
|
||
lotimes: "\u2A34",
|
||
lowast: "\u2217",
|
||
lowbar: "_",
|
||
loz: "\u25CA",
|
||
lozenge: "\u25CA",
|
||
lozf: "\u29EB",
|
||
lpar: "(",
|
||
lparlt: "\u2993",
|
||
lrarr: "\u21C6",
|
||
lrcorner: "\u231F",
|
||
lrhar: "\u21CB",
|
||
lrhard: "\u296D",
|
||
lrm: "\u200E",
|
||
lrtri: "\u22BF",
|
||
lsaquo: "\u2039",
|
||
lscr: "\u{1D4C1}",
|
||
lsh: "\u21B0",
|
||
lsim: "\u2272",
|
||
lsime: "\u2A8D",
|
||
lsimg: "\u2A8F",
|
||
lsqb: "[",
|
||
lsquo: "\u2018",
|
||
lsquor: "\u201A",
|
||
lstrok: "\u0142",
|
||
lt: "<",
|
||
ltcc: "\u2AA6",
|
||
ltcir: "\u2A79",
|
||
ltdot: "\u22D6",
|
||
lthree: "\u22CB",
|
||
ltimes: "\u22C9",
|
||
ltlarr: "\u2976",
|
||
ltquest: "\u2A7B",
|
||
ltrPar: "\u2996",
|
||
ltri: "\u25C3",
|
||
ltrie: "\u22B4",
|
||
ltrif: "\u25C2",
|
||
lurdshar: "\u294A",
|
||
luruhar: "\u2966",
|
||
lvertneqq: "\u2268\uFE00",
|
||
lvnE: "\u2268\uFE00",
|
||
mDDot: "\u223A",
|
||
macr: "\xAF",
|
||
male: "\u2642",
|
||
malt: "\u2720",
|
||
maltese: "\u2720",
|
||
map: "\u21A6",
|
||
mapsto: "\u21A6",
|
||
mapstodown: "\u21A7",
|
||
mapstoleft: "\u21A4",
|
||
mapstoup: "\u21A5",
|
||
marker: "\u25AE",
|
||
mcomma: "\u2A29",
|
||
mcy: "\u043C",
|
||
mdash: "\u2014",
|
||
measuredangle: "\u2221",
|
||
mfr: "\u{1D52A}",
|
||
mho: "\u2127",
|
||
micro: "\xB5",
|
||
mid: "\u2223",
|
||
midast: "*",
|
||
midcir: "\u2AF0",
|
||
middot: "\xB7",
|
||
minus: "\u2212",
|
||
minusb: "\u229F",
|
||
minusd: "\u2238",
|
||
minusdu: "\u2A2A",
|
||
mlcp: "\u2ADB",
|
||
mldr: "\u2026",
|
||
mnplus: "\u2213",
|
||
models: "\u22A7",
|
||
mopf: "\u{1D55E}",
|
||
mp: "\u2213",
|
||
mscr: "\u{1D4C2}",
|
||
mstpos: "\u223E",
|
||
mu: "\u03BC",
|
||
multimap: "\u22B8",
|
||
mumap: "\u22B8",
|
||
nGg: "\u22D9\u0338",
|
||
nGt: "\u226B\u20D2",
|
||
nGtv: "\u226B\u0338",
|
||
nLeftarrow: "\u21CD",
|
||
nLeftrightarrow: "\u21CE",
|
||
nLl: "\u22D8\u0338",
|
||
nLt: "\u226A\u20D2",
|
||
nLtv: "\u226A\u0338",
|
||
nRightarrow: "\u21CF",
|
||
nVDash: "\u22AF",
|
||
nVdash: "\u22AE",
|
||
nabla: "\u2207",
|
||
nacute: "\u0144",
|
||
nang: "\u2220\u20D2",
|
||
nap: "\u2249",
|
||
napE: "\u2A70\u0338",
|
||
napid: "\u224B\u0338",
|
||
napos: "\u0149",
|
||
napprox: "\u2249",
|
||
natur: "\u266E",
|
||
natural: "\u266E",
|
||
naturals: "\u2115",
|
||
nbsp: "\xA0",
|
||
nbump: "\u224E\u0338",
|
||
nbumpe: "\u224F\u0338",
|
||
ncap: "\u2A43",
|
||
ncaron: "\u0148",
|
||
ncedil: "\u0146",
|
||
ncong: "\u2247",
|
||
ncongdot: "\u2A6D\u0338",
|
||
ncup: "\u2A42",
|
||
ncy: "\u043D",
|
||
ndash: "\u2013",
|
||
ne: "\u2260",
|
||
neArr: "\u21D7",
|
||
nearhk: "\u2924",
|
||
nearr: "\u2197",
|
||
nearrow: "\u2197",
|
||
nedot: "\u2250\u0338",
|
||
nequiv: "\u2262",
|
||
nesear: "\u2928",
|
||
nesim: "\u2242\u0338",
|
||
nexist: "\u2204",
|
||
nexists: "\u2204",
|
||
nfr: "\u{1D52B}",
|
||
ngE: "\u2267\u0338",
|
||
nge: "\u2271",
|
||
ngeq: "\u2271",
|
||
ngeqq: "\u2267\u0338",
|
||
ngeqslant: "\u2A7E\u0338",
|
||
nges: "\u2A7E\u0338",
|
||
ngsim: "\u2275",
|
||
ngt: "\u226F",
|
||
ngtr: "\u226F",
|
||
nhArr: "\u21CE",
|
||
nharr: "\u21AE",
|
||
nhpar: "\u2AF2",
|
||
ni: "\u220B",
|
||
nis: "\u22FC",
|
||
nisd: "\u22FA",
|
||
niv: "\u220B",
|
||
njcy: "\u045A",
|
||
nlArr: "\u21CD",
|
||
nlE: "\u2266\u0338",
|
||
nlarr: "\u219A",
|
||
nldr: "\u2025",
|
||
nle: "\u2270",
|
||
nleftarrow: "\u219A",
|
||
nleftrightarrow: "\u21AE",
|
||
nleq: "\u2270",
|
||
nleqq: "\u2266\u0338",
|
||
nleqslant: "\u2A7D\u0338",
|
||
nles: "\u2A7D\u0338",
|
||
nless: "\u226E",
|
||
nlsim: "\u2274",
|
||
nlt: "\u226E",
|
||
nltri: "\u22EA",
|
||
nltrie: "\u22EC",
|
||
nmid: "\u2224",
|
||
nopf: "\u{1D55F}",
|
||
not: "\xAC",
|
||
notin: "\u2209",
|
||
notinE: "\u22F9\u0338",
|
||
notindot: "\u22F5\u0338",
|
||
notinva: "\u2209",
|
||
notinvb: "\u22F7",
|
||
notinvc: "\u22F6",
|
||
notni: "\u220C",
|
||
notniva: "\u220C",
|
||
notnivb: "\u22FE",
|
||
notnivc: "\u22FD",
|
||
npar: "\u2226",
|
||
nparallel: "\u2226",
|
||
nparsl: "\u2AFD\u20E5",
|
||
npart: "\u2202\u0338",
|
||
npolint: "\u2A14",
|
||
npr: "\u2280",
|
||
nprcue: "\u22E0",
|
||
npre: "\u2AAF\u0338",
|
||
nprec: "\u2280",
|
||
npreceq: "\u2AAF\u0338",
|
||
nrArr: "\u21CF",
|
||
nrarr: "\u219B",
|
||
nrarrc: "\u2933\u0338",
|
||
nrarrw: "\u219D\u0338",
|
||
nrightarrow: "\u219B",
|
||
nrtri: "\u22EB",
|
||
nrtrie: "\u22ED",
|
||
nsc: "\u2281",
|
||
nsccue: "\u22E1",
|
||
nsce: "\u2AB0\u0338",
|
||
nscr: "\u{1D4C3}",
|
||
nshortmid: "\u2224",
|
||
nshortparallel: "\u2226",
|
||
nsim: "\u2241",
|
||
nsime: "\u2244",
|
||
nsimeq: "\u2244",
|
||
nsmid: "\u2224",
|
||
nspar: "\u2226",
|
||
nsqsube: "\u22E2",
|
||
nsqsupe: "\u22E3",
|
||
nsub: "\u2284",
|
||
nsubE: "\u2AC5\u0338",
|
||
nsube: "\u2288",
|
||
nsubset: "\u2282\u20D2",
|
||
nsubseteq: "\u2288",
|
||
nsubseteqq: "\u2AC5\u0338",
|
||
nsucc: "\u2281",
|
||
nsucceq: "\u2AB0\u0338",
|
||
nsup: "\u2285",
|
||
nsupE: "\u2AC6\u0338",
|
||
nsupe: "\u2289",
|
||
nsupset: "\u2283\u20D2",
|
||
nsupseteq: "\u2289",
|
||
nsupseteqq: "\u2AC6\u0338",
|
||
ntgl: "\u2279",
|
||
ntilde: "\xF1",
|
||
ntlg: "\u2278",
|
||
ntriangleleft: "\u22EA",
|
||
ntrianglelefteq: "\u22EC",
|
||
ntriangleright: "\u22EB",
|
||
ntrianglerighteq: "\u22ED",
|
||
nu: "\u03BD",
|
||
num: "#",
|
||
numero: "\u2116",
|
||
numsp: "\u2007",
|
||
nvDash: "\u22AD",
|
||
nvHarr: "\u2904",
|
||
nvap: "\u224D\u20D2",
|
||
nvdash: "\u22AC",
|
||
nvge: "\u2265\u20D2",
|
||
nvgt: ">\u20D2",
|
||
nvinfin: "\u29DE",
|
||
nvlArr: "\u2902",
|
||
nvle: "\u2264\u20D2",
|
||
nvlt: "<\u20D2",
|
||
nvltrie: "\u22B4\u20D2",
|
||
nvrArr: "\u2903",
|
||
nvrtrie: "\u22B5\u20D2",
|
||
nvsim: "\u223C\u20D2",
|
||
nwArr: "\u21D6",
|
||
nwarhk: "\u2923",
|
||
nwarr: "\u2196",
|
||
nwarrow: "\u2196",
|
||
nwnear: "\u2927",
|
||
oS: "\u24C8",
|
||
oacute: "\xF3",
|
||
oast: "\u229B",
|
||
ocir: "\u229A",
|
||
ocirc: "\xF4",
|
||
ocy: "\u043E",
|
||
odash: "\u229D",
|
||
odblac: "\u0151",
|
||
odiv: "\u2A38",
|
||
odot: "\u2299",
|
||
odsold: "\u29BC",
|
||
oelig: "\u0153",
|
||
ofcir: "\u29BF",
|
||
ofr: "\u{1D52C}",
|
||
ogon: "\u02DB",
|
||
ograve: "\xF2",
|
||
ogt: "\u29C1",
|
||
ohbar: "\u29B5",
|
||
ohm: "\u03A9",
|
||
oint: "\u222E",
|
||
olarr: "\u21BA",
|
||
olcir: "\u29BE",
|
||
olcross: "\u29BB",
|
||
oline: "\u203E",
|
||
olt: "\u29C0",
|
||
omacr: "\u014D",
|
||
omega: "\u03C9",
|
||
omicron: "\u03BF",
|
||
omid: "\u29B6",
|
||
ominus: "\u2296",
|
||
oopf: "\u{1D560}",
|
||
opar: "\u29B7",
|
||
operp: "\u29B9",
|
||
oplus: "\u2295",
|
||
or: "\u2228",
|
||
orarr: "\u21BB",
|
||
ord: "\u2A5D",
|
||
order: "\u2134",
|
||
orderof: "\u2134",
|
||
ordf: "\xAA",
|
||
ordm: "\xBA",
|
||
origof: "\u22B6",
|
||
oror: "\u2A56",
|
||
orslope: "\u2A57",
|
||
orv: "\u2A5B",
|
||
oscr: "\u2134",
|
||
oslash: "\xF8",
|
||
osol: "\u2298",
|
||
otilde: "\xF5",
|
||
otimes: "\u2297",
|
||
otimesas: "\u2A36",
|
||
ouml: "\xF6",
|
||
ovbar: "\u233D",
|
||
par: "\u2225",
|
||
para: "\xB6",
|
||
parallel: "\u2225",
|
||
parsim: "\u2AF3",
|
||
parsl: "\u2AFD",
|
||
part: "\u2202",
|
||
pcy: "\u043F",
|
||
percnt: "%",
|
||
period: ".",
|
||
permil: "\u2030",
|
||
perp: "\u22A5",
|
||
pertenk: "\u2031",
|
||
pfr: "\u{1D52D}",
|
||
phi: "\u03C6",
|
||
phiv: "\u03D5",
|
||
phmmat: "\u2133",
|
||
phone: "\u260E",
|
||
pi: "\u03C0",
|
||
pitchfork: "\u22D4",
|
||
piv: "\u03D6",
|
||
planck: "\u210F",
|
||
planckh: "\u210E",
|
||
plankv: "\u210F",
|
||
plus: "+",
|
||
plusacir: "\u2A23",
|
||
plusb: "\u229E",
|
||
pluscir: "\u2A22",
|
||
plusdo: "\u2214",
|
||
plusdu: "\u2A25",
|
||
pluse: "\u2A72",
|
||
plusmn: "\xB1",
|
||
plussim: "\u2A26",
|
||
plustwo: "\u2A27",
|
||
pm: "\xB1",
|
||
pointint: "\u2A15",
|
||
popf: "\u{1D561}",
|
||
pound: "\xA3",
|
||
pr: "\u227A",
|
||
prE: "\u2AB3",
|
||
prap: "\u2AB7",
|
||
prcue: "\u227C",
|
||
pre: "\u2AAF",
|
||
prec: "\u227A",
|
||
precapprox: "\u2AB7",
|
||
preccurlyeq: "\u227C",
|
||
preceq: "\u2AAF",
|
||
precnapprox: "\u2AB9",
|
||
precneqq: "\u2AB5",
|
||
precnsim: "\u22E8",
|
||
precsim: "\u227E",
|
||
prime: "\u2032",
|
||
primes: "\u2119",
|
||
prnE: "\u2AB5",
|
||
prnap: "\u2AB9",
|
||
prnsim: "\u22E8",
|
||
prod: "\u220F",
|
||
profalar: "\u232E",
|
||
profline: "\u2312",
|
||
profsurf: "\u2313",
|
||
prop: "\u221D",
|
||
propto: "\u221D",
|
||
prsim: "\u227E",
|
||
prurel: "\u22B0",
|
||
pscr: "\u{1D4C5}",
|
||
psi: "\u03C8",
|
||
puncsp: "\u2008",
|
||
qfr: "\u{1D52E}",
|
||
qint: "\u2A0C",
|
||
qopf: "\u{1D562}",
|
||
qprime: "\u2057",
|
||
qscr: "\u{1D4C6}",
|
||
quaternions: "\u210D",
|
||
quatint: "\u2A16",
|
||
quest: "?",
|
||
questeq: "\u225F",
|
||
quot: '"',
|
||
rAarr: "\u21DB",
|
||
rArr: "\u21D2",
|
||
rAtail: "\u291C",
|
||
rBarr: "\u290F",
|
||
rHar: "\u2964",
|
||
race: "\u223D\u0331",
|
||
racute: "\u0155",
|
||
radic: "\u221A",
|
||
raemptyv: "\u29B3",
|
||
rang: "\u27E9",
|
||
rangd: "\u2992",
|
||
range: "\u29A5",
|
||
rangle: "\u27E9",
|
||
raquo: "\xBB",
|
||
rarr: "\u2192",
|
||
rarrap: "\u2975",
|
||
rarrb: "\u21E5",
|
||
rarrbfs: "\u2920",
|
||
rarrc: "\u2933",
|
||
rarrfs: "\u291E",
|
||
rarrhk: "\u21AA",
|
||
rarrlp: "\u21AC",
|
||
rarrpl: "\u2945",
|
||
rarrsim: "\u2974",
|
||
rarrtl: "\u21A3",
|
||
rarrw: "\u219D",
|
||
ratail: "\u291A",
|
||
ratio: "\u2236",
|
||
rationals: "\u211A",
|
||
rbarr: "\u290D",
|
||
rbbrk: "\u2773",
|
||
rbrace: "}",
|
||
rbrack: "]",
|
||
rbrke: "\u298C",
|
||
rbrksld: "\u298E",
|
||
rbrkslu: "\u2990",
|
||
rcaron: "\u0159",
|
||
rcedil: "\u0157",
|
||
rceil: "\u2309",
|
||
rcub: "}",
|
||
rcy: "\u0440",
|
||
rdca: "\u2937",
|
||
rdldhar: "\u2969",
|
||
rdquo: "\u201D",
|
||
rdquor: "\u201D",
|
||
rdsh: "\u21B3",
|
||
real: "\u211C",
|
||
realine: "\u211B",
|
||
realpart: "\u211C",
|
||
reals: "\u211D",
|
||
rect: "\u25AD",
|
||
reg: "\xAE",
|
||
rfisht: "\u297D",
|
||
rfloor: "\u230B",
|
||
rfr: "\u{1D52F}",
|
||
rhard: "\u21C1",
|
||
rharu: "\u21C0",
|
||
rharul: "\u296C",
|
||
rho: "\u03C1",
|
||
rhov: "\u03F1",
|
||
rightarrow: "\u2192",
|
||
rightarrowtail: "\u21A3",
|
||
rightharpoondown: "\u21C1",
|
||
rightharpoonup: "\u21C0",
|
||
rightleftarrows: "\u21C4",
|
||
rightleftharpoons: "\u21CC",
|
||
rightrightarrows: "\u21C9",
|
||
rightsquigarrow: "\u219D",
|
||
rightthreetimes: "\u22CC",
|
||
ring: "\u02DA",
|
||
risingdotseq: "\u2253",
|
||
rlarr: "\u21C4",
|
||
rlhar: "\u21CC",
|
||
rlm: "\u200F",
|
||
rmoust: "\u23B1",
|
||
rmoustache: "\u23B1",
|
||
rnmid: "\u2AEE",
|
||
roang: "\u27ED",
|
||
roarr: "\u21FE",
|
||
robrk: "\u27E7",
|
||
ropar: "\u2986",
|
||
ropf: "\u{1D563}",
|
||
roplus: "\u2A2E",
|
||
rotimes: "\u2A35",
|
||
rpar: ")",
|
||
rpargt: "\u2994",
|
||
rppolint: "\u2A12",
|
||
rrarr: "\u21C9",
|
||
rsaquo: "\u203A",
|
||
rscr: "\u{1D4C7}",
|
||
rsh: "\u21B1",
|
||
rsqb: "]",
|
||
rsquo: "\u2019",
|
||
rsquor: "\u2019",
|
||
rthree: "\u22CC",
|
||
rtimes: "\u22CA",
|
||
rtri: "\u25B9",
|
||
rtrie: "\u22B5",
|
||
rtrif: "\u25B8",
|
||
rtriltri: "\u29CE",
|
||
ruluhar: "\u2968",
|
||
rx: "\u211E",
|
||
sacute: "\u015B",
|
||
sbquo: "\u201A",
|
||
sc: "\u227B",
|
||
scE: "\u2AB4",
|
||
scap: "\u2AB8",
|
||
scaron: "\u0161",
|
||
sccue: "\u227D",
|
||
sce: "\u2AB0",
|
||
scedil: "\u015F",
|
||
scirc: "\u015D",
|
||
scnE: "\u2AB6",
|
||
scnap: "\u2ABA",
|
||
scnsim: "\u22E9",
|
||
scpolint: "\u2A13",
|
||
scsim: "\u227F",
|
||
scy: "\u0441",
|
||
sdot: "\u22C5",
|
||
sdotb: "\u22A1",
|
||
sdote: "\u2A66",
|
||
seArr: "\u21D8",
|
||
searhk: "\u2925",
|
||
searr: "\u2198",
|
||
searrow: "\u2198",
|
||
sect: "\xA7",
|
||
semi: ";",
|
||
seswar: "\u2929",
|
||
setminus: "\u2216",
|
||
setmn: "\u2216",
|
||
sext: "\u2736",
|
||
sfr: "\u{1D530}",
|
||
sfrown: "\u2322",
|
||
sharp: "\u266F",
|
||
shchcy: "\u0449",
|
||
shcy: "\u0448",
|
||
shortmid: "\u2223",
|
||
shortparallel: "\u2225",
|
||
shy: "\xAD",
|
||
sigma: "\u03C3",
|
||
sigmaf: "\u03C2",
|
||
sigmav: "\u03C2",
|
||
sim: "\u223C",
|
||
simdot: "\u2A6A",
|
||
sime: "\u2243",
|
||
simeq: "\u2243",
|
||
simg: "\u2A9E",
|
||
simgE: "\u2AA0",
|
||
siml: "\u2A9D",
|
||
simlE: "\u2A9F",
|
||
simne: "\u2246",
|
||
simplus: "\u2A24",
|
||
simrarr: "\u2972",
|
||
slarr: "\u2190",
|
||
smallsetminus: "\u2216",
|
||
smashp: "\u2A33",
|
||
smeparsl: "\u29E4",
|
||
smid: "\u2223",
|
||
smile: "\u2323",
|
||
smt: "\u2AAA",
|
||
smte: "\u2AAC",
|
||
smtes: "\u2AAC\uFE00",
|
||
softcy: "\u044C",
|
||
sol: "/",
|
||
solb: "\u29C4",
|
||
solbar: "\u233F",
|
||
sopf: "\u{1D564}",
|
||
spades: "\u2660",
|
||
spadesuit: "\u2660",
|
||
spar: "\u2225",
|
||
sqcap: "\u2293",
|
||
sqcaps: "\u2293\uFE00",
|
||
sqcup: "\u2294",
|
||
sqcups: "\u2294\uFE00",
|
||
sqsub: "\u228F",
|
||
sqsube: "\u2291",
|
||
sqsubset: "\u228F",
|
||
sqsubseteq: "\u2291",
|
||
sqsup: "\u2290",
|
||
sqsupe: "\u2292",
|
||
sqsupset: "\u2290",
|
||
sqsupseteq: "\u2292",
|
||
squ: "\u25A1",
|
||
square: "\u25A1",
|
||
squarf: "\u25AA",
|
||
squf: "\u25AA",
|
||
srarr: "\u2192",
|
||
sscr: "\u{1D4C8}",
|
||
ssetmn: "\u2216",
|
||
ssmile: "\u2323",
|
||
sstarf: "\u22C6",
|
||
star: "\u2606",
|
||
starf: "\u2605",
|
||
straightepsilon: "\u03F5",
|
||
straightphi: "\u03D5",
|
||
strns: "\xAF",
|
||
sub: "\u2282",
|
||
subE: "\u2AC5",
|
||
subdot: "\u2ABD",
|
||
sube: "\u2286",
|
||
subedot: "\u2AC3",
|
||
submult: "\u2AC1",
|
||
subnE: "\u2ACB",
|
||
subne: "\u228A",
|
||
subplus: "\u2ABF",
|
||
subrarr: "\u2979",
|
||
subset: "\u2282",
|
||
subseteq: "\u2286",
|
||
subseteqq: "\u2AC5",
|
||
subsetneq: "\u228A",
|
||
subsetneqq: "\u2ACB",
|
||
subsim: "\u2AC7",
|
||
subsub: "\u2AD5",
|
||
subsup: "\u2AD3",
|
||
succ: "\u227B",
|
||
succapprox: "\u2AB8",
|
||
succcurlyeq: "\u227D",
|
||
succeq: "\u2AB0",
|
||
succnapprox: "\u2ABA",
|
||
succneqq: "\u2AB6",
|
||
succnsim: "\u22E9",
|
||
succsim: "\u227F",
|
||
sum: "\u2211",
|
||
sung: "\u266A",
|
||
sup1: "\xB9",
|
||
sup2: "\xB2",
|
||
sup3: "\xB3",
|
||
sup: "\u2283",
|
||
supE: "\u2AC6",
|
||
supdot: "\u2ABE",
|
||
supdsub: "\u2AD8",
|
||
supe: "\u2287",
|
||
supedot: "\u2AC4",
|
||
suphsol: "\u27C9",
|
||
suphsub: "\u2AD7",
|
||
suplarr: "\u297B",
|
||
supmult: "\u2AC2",
|
||
supnE: "\u2ACC",
|
||
supne: "\u228B",
|
||
supplus: "\u2AC0",
|
||
supset: "\u2283",
|
||
supseteq: "\u2287",
|
||
supseteqq: "\u2AC6",
|
||
supsetneq: "\u228B",
|
||
supsetneqq: "\u2ACC",
|
||
supsim: "\u2AC8",
|
||
supsub: "\u2AD4",
|
||
supsup: "\u2AD6",
|
||
swArr: "\u21D9",
|
||
swarhk: "\u2926",
|
||
swarr: "\u2199",
|
||
swarrow: "\u2199",
|
||
swnwar: "\u292A",
|
||
szlig: "\xDF",
|
||
target: "\u2316",
|
||
tau: "\u03C4",
|
||
tbrk: "\u23B4",
|
||
tcaron: "\u0165",
|
||
tcedil: "\u0163",
|
||
tcy: "\u0442",
|
||
tdot: "\u20DB",
|
||
telrec: "\u2315",
|
||
tfr: "\u{1D531}",
|
||
there4: "\u2234",
|
||
therefore: "\u2234",
|
||
theta: "\u03B8",
|
||
thetasym: "\u03D1",
|
||
thetav: "\u03D1",
|
||
thickapprox: "\u2248",
|
||
thicksim: "\u223C",
|
||
thinsp: "\u2009",
|
||
thkap: "\u2248",
|
||
thksim: "\u223C",
|
||
thorn: "\xFE",
|
||
tilde: "\u02DC",
|
||
times: "\xD7",
|
||
timesb: "\u22A0",
|
||
timesbar: "\u2A31",
|
||
timesd: "\u2A30",
|
||
tint: "\u222D",
|
||
toea: "\u2928",
|
||
top: "\u22A4",
|
||
topbot: "\u2336",
|
||
topcir: "\u2AF1",
|
||
topf: "\u{1D565}",
|
||
topfork: "\u2ADA",
|
||
tosa: "\u2929",
|
||
tprime: "\u2034",
|
||
trade: "\u2122",
|
||
triangle: "\u25B5",
|
||
triangledown: "\u25BF",
|
||
triangleleft: "\u25C3",
|
||
trianglelefteq: "\u22B4",
|
||
triangleq: "\u225C",
|
||
triangleright: "\u25B9",
|
||
trianglerighteq: "\u22B5",
|
||
tridot: "\u25EC",
|
||
trie: "\u225C",
|
||
triminus: "\u2A3A",
|
||
triplus: "\u2A39",
|
||
trisb: "\u29CD",
|
||
tritime: "\u2A3B",
|
||
trpezium: "\u23E2",
|
||
tscr: "\u{1D4C9}",
|
||
tscy: "\u0446",
|
||
tshcy: "\u045B",
|
||
tstrok: "\u0167",
|
||
twixt: "\u226C",
|
||
twoheadleftarrow: "\u219E",
|
||
twoheadrightarrow: "\u21A0",
|
||
uArr: "\u21D1",
|
||
uHar: "\u2963",
|
||
uacute: "\xFA",
|
||
uarr: "\u2191",
|
||
ubrcy: "\u045E",
|
||
ubreve: "\u016D",
|
||
ucirc: "\xFB",
|
||
ucy: "\u0443",
|
||
udarr: "\u21C5",
|
||
udblac: "\u0171",
|
||
udhar: "\u296E",
|
||
ufisht: "\u297E",
|
||
ufr: "\u{1D532}",
|
||
ugrave: "\xF9",
|
||
uharl: "\u21BF",
|
||
uharr: "\u21BE",
|
||
uhblk: "\u2580",
|
||
ulcorn: "\u231C",
|
||
ulcorner: "\u231C",
|
||
ulcrop: "\u230F",
|
||
ultri: "\u25F8",
|
||
umacr: "\u016B",
|
||
uml: "\xA8",
|
||
uogon: "\u0173",
|
||
uopf: "\u{1D566}",
|
||
uparrow: "\u2191",
|
||
updownarrow: "\u2195",
|
||
upharpoonleft: "\u21BF",
|
||
upharpoonright: "\u21BE",
|
||
uplus: "\u228E",
|
||
upsi: "\u03C5",
|
||
upsih: "\u03D2",
|
||
upsilon: "\u03C5",
|
||
upuparrows: "\u21C8",
|
||
urcorn: "\u231D",
|
||
urcorner: "\u231D",
|
||
urcrop: "\u230E",
|
||
uring: "\u016F",
|
||
urtri: "\u25F9",
|
||
uscr: "\u{1D4CA}",
|
||
utdot: "\u22F0",
|
||
utilde: "\u0169",
|
||
utri: "\u25B5",
|
||
utrif: "\u25B4",
|
||
uuarr: "\u21C8",
|
||
uuml: "\xFC",
|
||
uwangle: "\u29A7",
|
||
vArr: "\u21D5",
|
||
vBar: "\u2AE8",
|
||
vBarv: "\u2AE9",
|
||
vDash: "\u22A8",
|
||
vangrt: "\u299C",
|
||
varepsilon: "\u03F5",
|
||
varkappa: "\u03F0",
|
||
varnothing: "\u2205",
|
||
varphi: "\u03D5",
|
||
varpi: "\u03D6",
|
||
varpropto: "\u221D",
|
||
varr: "\u2195",
|
||
varrho: "\u03F1",
|
||
varsigma: "\u03C2",
|
||
varsubsetneq: "\u228A\uFE00",
|
||
varsubsetneqq: "\u2ACB\uFE00",
|
||
varsupsetneq: "\u228B\uFE00",
|
||
varsupsetneqq: "\u2ACC\uFE00",
|
||
vartheta: "\u03D1",
|
||
vartriangleleft: "\u22B2",
|
||
vartriangleright: "\u22B3",
|
||
vcy: "\u0432",
|
||
vdash: "\u22A2",
|
||
vee: "\u2228",
|
||
veebar: "\u22BB",
|
||
veeeq: "\u225A",
|
||
vellip: "\u22EE",
|
||
verbar: "|",
|
||
vert: "|",
|
||
vfr: "\u{1D533}",
|
||
vltri: "\u22B2",
|
||
vnsub: "\u2282\u20D2",
|
||
vnsup: "\u2283\u20D2",
|
||
vopf: "\u{1D567}",
|
||
vprop: "\u221D",
|
||
vrtri: "\u22B3",
|
||
vscr: "\u{1D4CB}",
|
||
vsubnE: "\u2ACB\uFE00",
|
||
vsubne: "\u228A\uFE00",
|
||
vsupnE: "\u2ACC\uFE00",
|
||
vsupne: "\u228B\uFE00",
|
||
vzigzag: "\u299A",
|
||
wcirc: "\u0175",
|
||
wedbar: "\u2A5F",
|
||
wedge: "\u2227",
|
||
wedgeq: "\u2259",
|
||
weierp: "\u2118",
|
||
wfr: "\u{1D534}",
|
||
wopf: "\u{1D568}",
|
||
wp: "\u2118",
|
||
wr: "\u2240",
|
||
wreath: "\u2240",
|
||
wscr: "\u{1D4CC}",
|
||
xcap: "\u22C2",
|
||
xcirc: "\u25EF",
|
||
xcup: "\u22C3",
|
||
xdtri: "\u25BD",
|
||
xfr: "\u{1D535}",
|
||
xhArr: "\u27FA",
|
||
xharr: "\u27F7",
|
||
xi: "\u03BE",
|
||
xlArr: "\u27F8",
|
||
xlarr: "\u27F5",
|
||
xmap: "\u27FC",
|
||
xnis: "\u22FB",
|
||
xodot: "\u2A00",
|
||
xopf: "\u{1D569}",
|
||
xoplus: "\u2A01",
|
||
xotime: "\u2A02",
|
||
xrArr: "\u27F9",
|
||
xrarr: "\u27F6",
|
||
xscr: "\u{1D4CD}",
|
||
xsqcup: "\u2A06",
|
||
xuplus: "\u2A04",
|
||
xutri: "\u25B3",
|
||
xvee: "\u22C1",
|
||
xwedge: "\u22C0",
|
||
yacute: "\xFD",
|
||
yacy: "\u044F",
|
||
ycirc: "\u0177",
|
||
ycy: "\u044B",
|
||
yen: "\xA5",
|
||
yfr: "\u{1D536}",
|
||
yicy: "\u0457",
|
||
yopf: "\u{1D56A}",
|
||
yscr: "\u{1D4CE}",
|
||
yucy: "\u044E",
|
||
yuml: "\xFF",
|
||
zacute: "\u017A",
|
||
zcaron: "\u017E",
|
||
zcy: "\u0437",
|
||
zdot: "\u017C",
|
||
zeetrf: "\u2128",
|
||
zeta: "\u03B6",
|
||
zfr: "\u{1D537}",
|
||
zhcy: "\u0436",
|
||
zigrarr: "\u21DD",
|
||
zopf: "\u{1D56B}",
|
||
zscr: "\u{1D4CF}",
|
||
zwj: "\u200D",
|
||
zwnj: "\u200C"
|
||
};
|
||
|
||
// node_modules/decode-named-character-reference/index.js
|
||
var own = {}.hasOwnProperty;
|
||
function decodeNamedCharacterReference(value) {
|
||
return own.call(characterEntities, value) ? characterEntities[value] : false;
|
||
}
|
||
|
||
// node_modules/micromark-util-chunked/index.js
|
||
function splice(list4, start, remove, items) {
|
||
const end = list4.length;
|
||
let chunkStart = 0;
|
||
let parameters;
|
||
if (start < 0) {
|
||
start = -start > end ? 0 : end + start;
|
||
} else {
|
||
start = start > end ? end : start;
|
||
}
|
||
remove = remove > 0 ? remove : 0;
|
||
if (items.length < 1e4) {
|
||
parameters = Array.from(items);
|
||
parameters.unshift(start, remove);
|
||
list4.splice(...parameters);
|
||
} else {
|
||
if (remove)
|
||
list4.splice(start, remove);
|
||
while (chunkStart < items.length) {
|
||
parameters = items.slice(chunkStart, chunkStart + 1e4);
|
||
parameters.unshift(start, 0);
|
||
list4.splice(...parameters);
|
||
chunkStart += 1e4;
|
||
start += 1e4;
|
||
}
|
||
}
|
||
}
|
||
function push(list4, items) {
|
||
if (list4.length > 0) {
|
||
splice(list4, list4.length, 0, items);
|
||
return list4;
|
||
}
|
||
return items;
|
||
}
|
||
|
||
// node_modules/micromark-util-combine-extensions/index.js
|
||
var hasOwnProperty = {}.hasOwnProperty;
|
||
function combineExtensions(extensions) {
|
||
const all2 = {};
|
||
let index2 = -1;
|
||
while (++index2 < extensions.length) {
|
||
syntaxExtension(all2, extensions[index2]);
|
||
}
|
||
return all2;
|
||
}
|
||
function syntaxExtension(all2, extension2) {
|
||
let hook;
|
||
for (hook in extension2) {
|
||
const maybe = hasOwnProperty.call(all2, hook) ? all2[hook] : void 0;
|
||
const left = maybe || (all2[hook] = {});
|
||
const right = extension2[hook];
|
||
let code2;
|
||
if (right) {
|
||
for (code2 in right) {
|
||
if (!hasOwnProperty.call(left, code2))
|
||
left[code2] = [];
|
||
const value = right[code2];
|
||
constructs(
|
||
// @ts-expect-error Looks like a list.
|
||
left[code2],
|
||
Array.isArray(value) ? value : value ? [value] : []
|
||
);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function constructs(existing, list4) {
|
||
let index2 = -1;
|
||
const before = [];
|
||
while (++index2 < list4.length) {
|
||
;
|
||
(list4[index2].add === "after" ? existing : before).push(list4[index2]);
|
||
}
|
||
splice(existing, 0, 0, before);
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-numeric-character-reference/index.js
|
||
function decodeNumericCharacterReference(value, base) {
|
||
const code2 = Number.parseInt(value, base);
|
||
if (
|
||
// C0 except for HT, LF, FF, CR, space.
|
||
code2 < 9 || code2 === 11 || code2 > 13 && code2 < 32 || // Control character (DEL) of C0, and C1 controls.
|
||
code2 > 126 && code2 < 160 || // Lone high surrogates and low surrogates.
|
||
code2 > 55295 && code2 < 57344 || // Noncharacters.
|
||
code2 > 64975 && code2 < 65008 || (code2 & 65535) === 65535 || (code2 & 65535) === 65534 || // Out of range
|
||
code2 > 1114111
|
||
) {
|
||
return "\uFFFD";
|
||
}
|
||
return String.fromCharCode(code2);
|
||
}
|
||
|
||
// node_modules/micromark-util-normalize-identifier/index.js
|
||
function normalizeIdentifier(value) {
|
||
return value.replace(/[\t\n\r ]+/g, " ").replace(/^ | $/g, "").toLowerCase().toUpperCase();
|
||
}
|
||
|
||
// node_modules/micromark-util-character/index.js
|
||
var unicodePunctuationInternal = regexCheck(/\p{P}/u);
|
||
var asciiAlpha = regexCheck(/[A-Za-z]/);
|
||
var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/);
|
||
var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/);
|
||
function asciiControl(code2) {
|
||
return (
|
||
// Special whitespace codes (which have negative values), C0 and Control
|
||
// character DEL
|
||
code2 !== null && (code2 < 32 || code2 === 127)
|
||
);
|
||
}
|
||
var asciiDigit = regexCheck(/\d/);
|
||
var asciiHexDigit = regexCheck(/[\dA-Fa-f]/);
|
||
var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/);
|
||
function markdownLineEnding(code2) {
|
||
return code2 !== null && code2 < -2;
|
||
}
|
||
function markdownLineEndingOrSpace(code2) {
|
||
return code2 !== null && (code2 < 0 || code2 === 32);
|
||
}
|
||
function markdownSpace(code2) {
|
||
return code2 === -2 || code2 === -1 || code2 === 32;
|
||
}
|
||
function unicodePunctuation(code2) {
|
||
return asciiPunctuation(code2) || unicodePunctuationInternal(code2);
|
||
}
|
||
var unicodeWhitespace = regexCheck(/\s/);
|
||
function regexCheck(regex) {
|
||
return check;
|
||
function check(code2) {
|
||
return code2 !== null && code2 > -1 && regex.test(String.fromCharCode(code2));
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-space/index.js
|
||
function factorySpace(effects, ok3, type, max) {
|
||
const limit = max ? max - 1 : Number.POSITIVE_INFINITY;
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter(type);
|
||
return prefix(code2);
|
||
}
|
||
return ok3(code2);
|
||
}
|
||
function prefix(code2) {
|
||
if (markdownSpace(code2) && size++ < limit) {
|
||
effects.consume(code2);
|
||
return prefix;
|
||
}
|
||
effects.exit(type);
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/content.js
|
||
var content = {
|
||
tokenize: initializeContent
|
||
};
|
||
function initializeContent(effects) {
|
||
const contentStart = effects.attempt(
|
||
this.parser.constructs.contentInitial,
|
||
afterContentStartConstruct,
|
||
paragraphInitial
|
||
);
|
||
let previous2;
|
||
return contentStart;
|
||
function afterContentStartConstruct(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, contentStart, "linePrefix");
|
||
}
|
||
function paragraphInitial(code2) {
|
||
effects.enter("paragraph");
|
||
return lineStart(code2);
|
||
}
|
||
function lineStart(code2) {
|
||
const token = effects.enter("chunkText", {
|
||
contentType: "text",
|
||
previous: previous2
|
||
});
|
||
if (previous2) {
|
||
previous2.next = token;
|
||
}
|
||
previous2 = token;
|
||
return data(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null) {
|
||
effects.exit("chunkText");
|
||
effects.exit("paragraph");
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.consume(code2);
|
||
effects.exit("chunkText");
|
||
return lineStart;
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/document.js
|
||
var document = {
|
||
tokenize: initializeDocument
|
||
};
|
||
var containerConstruct = {
|
||
tokenize: tokenizeContainer
|
||
};
|
||
function initializeDocument(effects) {
|
||
const self = this;
|
||
const stack = [];
|
||
let continued = 0;
|
||
let childFlow;
|
||
let childToken;
|
||
let lineStartOffset;
|
||
return start;
|
||
function start(code2) {
|
||
if (continued < stack.length) {
|
||
const item = stack[continued];
|
||
self.containerState = item[1];
|
||
return effects.attempt(
|
||
item[0].continuation,
|
||
documentContinue,
|
||
checkNewContainers
|
||
)(code2);
|
||
}
|
||
return checkNewContainers(code2);
|
||
}
|
||
function documentContinue(code2) {
|
||
continued++;
|
||
if (self.containerState._closeFlow) {
|
||
self.containerState._closeFlow = void 0;
|
||
if (childFlow) {
|
||
closeFlow();
|
||
}
|
||
const indexBeforeExits = self.events.length;
|
||
let indexBeforeFlow = indexBeforeExits;
|
||
let point3;
|
||
while (indexBeforeFlow--) {
|
||
if (self.events[indexBeforeFlow][0] === "exit" && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||
point3 = self.events[indexBeforeFlow][1].end;
|
||
break;
|
||
}
|
||
}
|
||
exitContainers(continued);
|
||
let index2 = indexBeforeExits;
|
||
while (index2 < self.events.length) {
|
||
self.events[index2][1].end = Object.assign({}, point3);
|
||
index2++;
|
||
}
|
||
splice(
|
||
self.events,
|
||
indexBeforeFlow + 1,
|
||
0,
|
||
self.events.slice(indexBeforeExits)
|
||
);
|
||
self.events.length = index2;
|
||
return checkNewContainers(code2);
|
||
}
|
||
return start(code2);
|
||
}
|
||
function checkNewContainers(code2) {
|
||
if (continued === stack.length) {
|
||
if (!childFlow) {
|
||
return documentContinued(code2);
|
||
}
|
||
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
|
||
return flowStart(code2);
|
||
}
|
||
self.interrupt = Boolean(
|
||
childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack
|
||
);
|
||
}
|
||
self.containerState = {};
|
||
return effects.check(
|
||
containerConstruct,
|
||
thereIsANewContainer,
|
||
thereIsNoNewContainer
|
||
)(code2);
|
||
}
|
||
function thereIsANewContainer(code2) {
|
||
if (childFlow)
|
||
closeFlow();
|
||
exitContainers(continued);
|
||
return documentContinued(code2);
|
||
}
|
||
function thereIsNoNewContainer(code2) {
|
||
self.parser.lazy[self.now().line] = continued !== stack.length;
|
||
lineStartOffset = self.now().offset;
|
||
return flowStart(code2);
|
||
}
|
||
function documentContinued(code2) {
|
||
self.containerState = {};
|
||
return effects.attempt(
|
||
containerConstruct,
|
||
containerContinue,
|
||
flowStart
|
||
)(code2);
|
||
}
|
||
function containerContinue(code2) {
|
||
continued++;
|
||
stack.push([self.currentConstruct, self.containerState]);
|
||
return documentContinued(code2);
|
||
}
|
||
function flowStart(code2) {
|
||
if (code2 === null) {
|
||
if (childFlow)
|
||
closeFlow();
|
||
exitContainers(0);
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
childFlow = childFlow || self.parser.flow(self.now());
|
||
effects.enter("chunkFlow", {
|
||
contentType: "flow",
|
||
previous: childToken,
|
||
_tokenizer: childFlow
|
||
});
|
||
return flowContinue(code2);
|
||
}
|
||
function flowContinue(code2) {
|
||
if (code2 === null) {
|
||
writeToChild(effects.exit("chunkFlow"), true);
|
||
exitContainers(0);
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.consume(code2);
|
||
writeToChild(effects.exit("chunkFlow"));
|
||
continued = 0;
|
||
self.interrupt = void 0;
|
||
return start;
|
||
}
|
||
effects.consume(code2);
|
||
return flowContinue;
|
||
}
|
||
function writeToChild(token, eof) {
|
||
const stream = self.sliceStream(token);
|
||
if (eof)
|
||
stream.push(null);
|
||
token.previous = childToken;
|
||
if (childToken)
|
||
childToken.next = token;
|
||
childToken = token;
|
||
childFlow.defineSkip(token.start);
|
||
childFlow.write(stream);
|
||
if (self.parser.lazy[token.start.line]) {
|
||
let index2 = childFlow.events.length;
|
||
while (index2--) {
|
||
if (
|
||
// The token starts before the line ending…
|
||
childFlow.events[index2][1].start.offset < lineStartOffset && // …and either is not ended yet…
|
||
(!childFlow.events[index2][1].end || // …or ends after it.
|
||
childFlow.events[index2][1].end.offset > lineStartOffset)
|
||
) {
|
||
return;
|
||
}
|
||
}
|
||
const indexBeforeExits = self.events.length;
|
||
let indexBeforeFlow = indexBeforeExits;
|
||
let seen;
|
||
let point3;
|
||
while (indexBeforeFlow--) {
|
||
if (self.events[indexBeforeFlow][0] === "exit" && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||
if (seen) {
|
||
point3 = self.events[indexBeforeFlow][1].end;
|
||
break;
|
||
}
|
||
seen = true;
|
||
}
|
||
}
|
||
exitContainers(continued);
|
||
index2 = indexBeforeExits;
|
||
while (index2 < self.events.length) {
|
||
self.events[index2][1].end = Object.assign({}, point3);
|
||
index2++;
|
||
}
|
||
splice(
|
||
self.events,
|
||
indexBeforeFlow + 1,
|
||
0,
|
||
self.events.slice(indexBeforeExits)
|
||
);
|
||
self.events.length = index2;
|
||
}
|
||
}
|
||
function exitContainers(size) {
|
||
let index2 = stack.length;
|
||
while (index2-- > size) {
|
||
const entry = stack[index2];
|
||
self.containerState = entry[1];
|
||
entry[0].exit.call(self, effects);
|
||
}
|
||
stack.length = size;
|
||
}
|
||
function closeFlow() {
|
||
childFlow.write([null]);
|
||
childToken = void 0;
|
||
childFlow = void 0;
|
||
self.containerState._closeFlow = void 0;
|
||
}
|
||
}
|
||
function tokenizeContainer(effects, ok3, nok) {
|
||
return factorySpace(
|
||
effects,
|
||
effects.attempt(this.parser.constructs.document, ok3, nok),
|
||
"linePrefix",
|
||
this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
);
|
||
}
|
||
|
||
// node_modules/micromark-util-classify-character/index.js
|
||
function classifyCharacter(code2) {
|
||
if (code2 === null || markdownLineEndingOrSpace(code2) || unicodeWhitespace(code2)) {
|
||
return 1;
|
||
}
|
||
if (unicodePunctuation(code2)) {
|
||
return 2;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-resolve-all/index.js
|
||
function resolveAll(constructs2, events, context) {
|
||
const called = [];
|
||
let index2 = -1;
|
||
while (++index2 < constructs2.length) {
|
||
const resolve = constructs2[index2].resolveAll;
|
||
if (resolve && !called.includes(resolve)) {
|
||
events = resolve(events, context);
|
||
called.push(resolve);
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/attention.js
|
||
var attention = {
|
||
name: "attention",
|
||
tokenize: tokenizeAttention,
|
||
resolveAll: resolveAllAttention
|
||
};
|
||
function resolveAllAttention(events, context) {
|
||
let index2 = -1;
|
||
let open;
|
||
let group;
|
||
let text4;
|
||
let openingSequence;
|
||
let closingSequence;
|
||
let use;
|
||
let nextEvents;
|
||
let offset;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][0] === "enter" && events[index2][1].type === "attentionSequence" && events[index2][1]._close) {
|
||
open = index2;
|
||
while (open--) {
|
||
if (events[open][0] === "exit" && events[open][1].type === "attentionSequence" && events[open][1]._open && // If the markers are the same:
|
||
context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index2][1]).charCodeAt(0)) {
|
||
if ((events[open][1]._close || events[index2][1]._open) && (events[index2][1].end.offset - events[index2][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index2][1].end.offset - events[index2][1].start.offset) % 3)) {
|
||
continue;
|
||
}
|
||
use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index2][1].end.offset - events[index2][1].start.offset > 1 ? 2 : 1;
|
||
const start = Object.assign({}, events[open][1].end);
|
||
const end = Object.assign({}, events[index2][1].start);
|
||
movePoint(start, -use);
|
||
movePoint(end, use);
|
||
openingSequence = {
|
||
type: use > 1 ? "strongSequence" : "emphasisSequence",
|
||
start,
|
||
end: Object.assign({}, events[open][1].end)
|
||
};
|
||
closingSequence = {
|
||
type: use > 1 ? "strongSequence" : "emphasisSequence",
|
||
start: Object.assign({}, events[index2][1].start),
|
||
end
|
||
};
|
||
text4 = {
|
||
type: use > 1 ? "strongText" : "emphasisText",
|
||
start: Object.assign({}, events[open][1].end),
|
||
end: Object.assign({}, events[index2][1].start)
|
||
};
|
||
group = {
|
||
type: use > 1 ? "strong" : "emphasis",
|
||
start: Object.assign({}, openingSequence.start),
|
||
end: Object.assign({}, closingSequence.end)
|
||
};
|
||
events[open][1].end = Object.assign({}, openingSequence.start);
|
||
events[index2][1].start = Object.assign({}, closingSequence.end);
|
||
nextEvents = [];
|
||
if (events[open][1].end.offset - events[open][1].start.offset) {
|
||
nextEvents = push(nextEvents, [
|
||
["enter", events[open][1], context],
|
||
["exit", events[open][1], context]
|
||
]);
|
||
}
|
||
nextEvents = push(nextEvents, [
|
||
["enter", group, context],
|
||
["enter", openingSequence, context],
|
||
["exit", openingSequence, context],
|
||
["enter", text4, context]
|
||
]);
|
||
nextEvents = push(
|
||
nextEvents,
|
||
resolveAll(
|
||
context.parser.constructs.insideSpan.null,
|
||
events.slice(open + 1, index2),
|
||
context
|
||
)
|
||
);
|
||
nextEvents = push(nextEvents, [
|
||
["exit", text4, context],
|
||
["enter", closingSequence, context],
|
||
["exit", closingSequence, context],
|
||
["exit", group, context]
|
||
]);
|
||
if (events[index2][1].end.offset - events[index2][1].start.offset) {
|
||
offset = 2;
|
||
nextEvents = push(nextEvents, [
|
||
["enter", events[index2][1], context],
|
||
["exit", events[index2][1], context]
|
||
]);
|
||
} else {
|
||
offset = 0;
|
||
}
|
||
splice(events, open - 1, index2 - open + 3, nextEvents);
|
||
index2 = open + nextEvents.length - offset - 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][1].type === "attentionSequence") {
|
||
events[index2][1].type = "data";
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeAttention(effects, ok3) {
|
||
const attentionMarkers2 = this.parser.constructs.attentionMarkers.null;
|
||
const previous2 = this.previous;
|
||
const before = classifyCharacter(previous2);
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
marker = code2;
|
||
effects.enter("attentionSequence");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
const token = effects.exit("attentionSequence");
|
||
const after = classifyCharacter(code2);
|
||
const open = !after || after === 2 && before || attentionMarkers2.includes(code2);
|
||
const close = !before || before === 2 && after || attentionMarkers2.includes(previous2);
|
||
token._open = Boolean(marker === 42 ? open : open && (before || !close));
|
||
token._close = Boolean(marker === 42 ? close : close && (after || !open));
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function movePoint(point3, offset) {
|
||
point3.column += offset;
|
||
point3.offset += offset;
|
||
point3._bufferIndex += offset;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/autolink.js
|
||
var autolink = {
|
||
name: "autolink",
|
||
tokenize: tokenizeAutolink
|
||
};
|
||
function tokenizeAutolink(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("autolink");
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.enter("autolinkProtocol");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return schemeOrEmailAtext;
|
||
}
|
||
return emailAtext(code2);
|
||
}
|
||
function schemeOrEmailAtext(code2) {
|
||
if (code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) {
|
||
size = 1;
|
||
return schemeInsideOrEmailAtext(code2);
|
||
}
|
||
return emailAtext(code2);
|
||
}
|
||
function schemeInsideOrEmailAtext(code2) {
|
||
if (code2 === 58) {
|
||
effects.consume(code2);
|
||
size = 0;
|
||
return urlInside;
|
||
}
|
||
if ((code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) && size++ < 32) {
|
||
effects.consume(code2);
|
||
return schemeInsideOrEmailAtext;
|
||
}
|
||
size = 0;
|
||
return emailAtext(code2);
|
||
}
|
||
function urlInside(code2) {
|
||
if (code2 === 62) {
|
||
effects.exit("autolinkProtocol");
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.exit("autolink");
|
||
return ok3;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 60 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return urlInside;
|
||
}
|
||
function emailAtext(code2) {
|
||
if (code2 === 64) {
|
||
effects.consume(code2);
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (asciiAtext(code2)) {
|
||
effects.consume(code2);
|
||
return emailAtext;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function emailAtSignOrDot(code2) {
|
||
return asciiAlphanumeric(code2) ? emailLabel(code2) : nok(code2);
|
||
}
|
||
function emailLabel(code2) {
|
||
if (code2 === 46) {
|
||
effects.consume(code2);
|
||
size = 0;
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (code2 === 62) {
|
||
effects.exit("autolinkProtocol").type = "autolinkEmail";
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.exit("autolink");
|
||
return ok3;
|
||
}
|
||
return emailValue(code2);
|
||
}
|
||
function emailValue(code2) {
|
||
if ((code2 === 45 || asciiAlphanumeric(code2)) && size++ < 63) {
|
||
const next = code2 === 45 ? emailValue : emailLabel;
|
||
effects.consume(code2);
|
||
return next;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/blank-line.js
|
||
var blankLine = {
|
||
tokenize: tokenizeBlankLine,
|
||
partial: true
|
||
};
|
||
function tokenizeBlankLine(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, after, "linePrefix")(code2) : after(code2);
|
||
}
|
||
function after(code2) {
|
||
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/block-quote.js
|
||
var blockQuote = {
|
||
name: "blockQuote",
|
||
tokenize: tokenizeBlockQuoteStart,
|
||
continuation: {
|
||
tokenize: tokenizeBlockQuoteContinuation
|
||
},
|
||
exit
|
||
};
|
||
function tokenizeBlockQuoteStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 62) {
|
||
const state = self.containerState;
|
||
if (!state.open) {
|
||
effects.enter("blockQuote", {
|
||
_container: true
|
||
});
|
||
state.open = true;
|
||
}
|
||
effects.enter("blockQuotePrefix");
|
||
effects.enter("blockQuoteMarker");
|
||
effects.consume(code2);
|
||
effects.exit("blockQuoteMarker");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("blockQuotePrefixWhitespace");
|
||
effects.consume(code2);
|
||
effects.exit("blockQuotePrefixWhitespace");
|
||
effects.exit("blockQuotePrefix");
|
||
return ok3;
|
||
}
|
||
effects.exit("blockQuotePrefix");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeBlockQuoteContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return contStart;
|
||
function contStart(code2) {
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(
|
||
effects,
|
||
contBefore,
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2);
|
||
}
|
||
return contBefore(code2);
|
||
}
|
||
function contBefore(code2) {
|
||
return effects.attempt(blockQuote, ok3, nok)(code2);
|
||
}
|
||
}
|
||
function exit(effects) {
|
||
effects.exit("blockQuote");
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/character-escape.js
|
||
var characterEscape = {
|
||
name: "characterEscape",
|
||
tokenize: tokenizeCharacterEscape
|
||
};
|
||
function tokenizeCharacterEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("characterEscape");
|
||
effects.enter("escapeMarker");
|
||
effects.consume(code2);
|
||
effects.exit("escapeMarker");
|
||
return inside;
|
||
}
|
||
function inside(code2) {
|
||
if (asciiPunctuation(code2)) {
|
||
effects.enter("characterEscapeValue");
|
||
effects.consume(code2);
|
||
effects.exit("characterEscapeValue");
|
||
effects.exit("characterEscape");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/character-reference.js
|
||
var characterReference = {
|
||
name: "characterReference",
|
||
tokenize: tokenizeCharacterReference
|
||
};
|
||
function tokenizeCharacterReference(effects, ok3, nok) {
|
||
const self = this;
|
||
let size = 0;
|
||
let max;
|
||
let test;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("characterReference");
|
||
effects.enter("characterReferenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarker");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 35) {
|
||
effects.enter("characterReferenceMarkerNumeric");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarkerNumeric");
|
||
return numeric;
|
||
}
|
||
effects.enter("characterReferenceValue");
|
||
max = 31;
|
||
test = asciiAlphanumeric;
|
||
return value(code2);
|
||
}
|
||
function numeric(code2) {
|
||
if (code2 === 88 || code2 === 120) {
|
||
effects.enter("characterReferenceMarkerHexadecimal");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarkerHexadecimal");
|
||
effects.enter("characterReferenceValue");
|
||
max = 6;
|
||
test = asciiHexDigit;
|
||
return value;
|
||
}
|
||
effects.enter("characterReferenceValue");
|
||
max = 7;
|
||
test = asciiDigit;
|
||
return value(code2);
|
||
}
|
||
function value(code2) {
|
||
if (code2 === 59 && size) {
|
||
const token = effects.exit("characterReferenceValue");
|
||
if (test === asciiAlphanumeric && !decodeNamedCharacterReference(self.sliceSerialize(token))) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter("characterReferenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarker");
|
||
effects.exit("characterReference");
|
||
return ok3;
|
||
}
|
||
if (test(code2) && size++ < max) {
|
||
effects.consume(code2);
|
||
return value;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-fenced.js
|
||
var nonLazyContinuation = {
|
||
tokenize: tokenizeNonLazyContinuation,
|
||
partial: true
|
||
};
|
||
var codeFenced = {
|
||
name: "codeFenced",
|
||
tokenize: tokenizeCodeFenced,
|
||
concrete: true
|
||
};
|
||
function tokenizeCodeFenced(effects, ok3, nok) {
|
||
const self = this;
|
||
const closeStart = {
|
||
tokenize: tokenizeCloseStart,
|
||
partial: true
|
||
};
|
||
let initialPrefix = 0;
|
||
let sizeOpen = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
return beforeSequenceOpen(code2);
|
||
}
|
||
function beforeSequenceOpen(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
initialPrefix = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
marker = code2;
|
||
effects.enter("codeFenced");
|
||
effects.enter("codeFencedFence");
|
||
effects.enter("codeFencedFenceSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === marker) {
|
||
sizeOpen++;
|
||
effects.consume(code2);
|
||
return sequenceOpen;
|
||
}
|
||
if (sizeOpen < 3) {
|
||
return nok(code2);
|
||
}
|
||
effects.exit("codeFencedFenceSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, infoBefore, "whitespace")(code2) : infoBefore(code2);
|
||
}
|
||
function infoBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFencedFence");
|
||
return self.interrupt ? ok3(code2) : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFencedFenceInfo");
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return info(code2);
|
||
}
|
||
function info(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceInfo");
|
||
return infoBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceInfo");
|
||
return factorySpace(effects, metaBefore, "whitespace")(code2);
|
||
}
|
||
if (code2 === 96 && code2 === marker) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return info;
|
||
}
|
||
function metaBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return infoBefore(code2);
|
||
}
|
||
effects.enter("codeFencedFenceMeta");
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return meta(code2);
|
||
}
|
||
function meta(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceMeta");
|
||
return infoBefore(code2);
|
||
}
|
||
if (code2 === 96 && code2 === marker) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return meta;
|
||
}
|
||
function atNonLazyBreak(code2) {
|
||
return effects.attempt(closeStart, after, contentBefore)(code2);
|
||
}
|
||
function contentBefore(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return contentStart;
|
||
}
|
||
function contentStart(code2) {
|
||
return initialPrefix > 0 && markdownSpace(code2) ? factorySpace(
|
||
effects,
|
||
beforeContentChunk,
|
||
"linePrefix",
|
||
initialPrefix + 1
|
||
)(code2) : beforeContentChunk(code2);
|
||
}
|
||
function beforeContentChunk(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFlowValue");
|
||
return contentChunk(code2);
|
||
}
|
||
function contentChunk(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFlowValue");
|
||
return beforeContentChunk(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return contentChunk;
|
||
}
|
||
function after(code2) {
|
||
effects.exit("codeFenced");
|
||
return ok3(code2);
|
||
}
|
||
function tokenizeCloseStart(effects2, ok4, nok2) {
|
||
let size = 0;
|
||
return startBefore;
|
||
function startBefore(code2) {
|
||
effects2.enter("lineEnding");
|
||
effects2.consume(code2);
|
||
effects2.exit("lineEnding");
|
||
return start2;
|
||
}
|
||
function start2(code2) {
|
||
effects2.enter("codeFencedFence");
|
||
return markdownSpace(code2) ? factorySpace(
|
||
effects2,
|
||
beforeSequenceClose,
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2) : beforeSequenceClose(code2);
|
||
}
|
||
function beforeSequenceClose(code2) {
|
||
if (code2 === marker) {
|
||
effects2.enter("codeFencedFenceSequence");
|
||
return sequenceClose(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
function sequenceClose(code2) {
|
||
if (code2 === marker) {
|
||
size++;
|
||
effects2.consume(code2);
|
||
return sequenceClose;
|
||
}
|
||
if (size >= sizeOpen) {
|
||
effects2.exit("codeFencedFenceSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects2, sequenceCloseAfter, "whitespace")(code2) : sequenceCloseAfter(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
function sequenceCloseAfter(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects2.exit("codeFencedFence");
|
||
return ok4(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return lineStart;
|
||
}
|
||
function lineStart(code2) {
|
||
return self.parser.lazy[self.now().line] ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-indented.js
|
||
var codeIndented = {
|
||
name: "codeIndented",
|
||
tokenize: tokenizeCodeIndented
|
||
};
|
||
var furtherStart = {
|
||
tokenize: tokenizeFurtherStart,
|
||
partial: true
|
||
};
|
||
function tokenizeCodeIndented(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("codeIndented");
|
||
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
|
||
}
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? atBreak(code2) : nok(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === null) {
|
||
return after(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
return effects.attempt(furtherStart, atBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFlowValue");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFlowValue");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
function after(code2) {
|
||
effects.exit("codeIndented");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeFurtherStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return furtherStart2;
|
||
function furtherStart2(code2) {
|
||
if (self.parser.lazy[self.now().line]) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return furtherStart2;
|
||
}
|
||
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
|
||
}
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? ok3(code2) : markdownLineEnding(code2) ? furtherStart2(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-text.js
|
||
var codeText = {
|
||
name: "codeText",
|
||
tokenize: tokenizeCodeText,
|
||
resolve: resolveCodeText,
|
||
previous
|
||
};
|
||
function resolveCodeText(events) {
|
||
let tailExitIndex = events.length - 4;
|
||
let headEnterIndex = 3;
|
||
let index2;
|
||
let enter;
|
||
if ((events[headEnterIndex][1].type === "lineEnding" || events[headEnterIndex][1].type === "space") && (events[tailExitIndex][1].type === "lineEnding" || events[tailExitIndex][1].type === "space")) {
|
||
index2 = headEnterIndex;
|
||
while (++index2 < tailExitIndex) {
|
||
if (events[index2][1].type === "codeTextData") {
|
||
events[headEnterIndex][1].type = "codeTextPadding";
|
||
events[tailExitIndex][1].type = "codeTextPadding";
|
||
headEnterIndex += 2;
|
||
tailExitIndex -= 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
index2 = headEnterIndex - 1;
|
||
tailExitIndex++;
|
||
while (++index2 <= tailExitIndex) {
|
||
if (enter === void 0) {
|
||
if (index2 !== tailExitIndex && events[index2][1].type !== "lineEnding") {
|
||
enter = index2;
|
||
}
|
||
} else if (index2 === tailExitIndex || events[index2][1].type === "lineEnding") {
|
||
events[enter][1].type = "codeTextData";
|
||
if (index2 !== enter + 2) {
|
||
events[enter][1].end = events[index2 - 1][1].end;
|
||
events.splice(enter + 2, index2 - enter - 2);
|
||
tailExitIndex -= index2 - enter - 2;
|
||
index2 = enter + 2;
|
||
}
|
||
enter = void 0;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function previous(code2) {
|
||
return code2 !== 96 || this.events[this.events.length - 1][1].type === "characterEscape";
|
||
}
|
||
function tokenizeCodeText(effects, ok3, nok) {
|
||
const self = this;
|
||
let sizeOpen = 0;
|
||
let size;
|
||
let token;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("codeText");
|
||
effects.enter("codeTextSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === 96) {
|
||
effects.consume(code2);
|
||
sizeOpen++;
|
||
return sequenceOpen;
|
||
}
|
||
effects.exit("codeTextSequence");
|
||
return between2(code2);
|
||
}
|
||
function between2(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 32) {
|
||
effects.enter("space");
|
||
effects.consume(code2);
|
||
effects.exit("space");
|
||
return between2;
|
||
}
|
||
if (code2 === 96) {
|
||
token = effects.enter("codeTextSequence");
|
||
size = 0;
|
||
return sequenceClose(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return between2;
|
||
}
|
||
effects.enter("codeTextData");
|
||
return data(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null || code2 === 32 || code2 === 96 || markdownLineEnding(code2)) {
|
||
effects.exit("codeTextData");
|
||
return between2(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function sequenceClose(code2) {
|
||
if (code2 === 96) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return sequenceClose;
|
||
}
|
||
if (size === sizeOpen) {
|
||
effects.exit("codeTextSequence");
|
||
effects.exit("codeText");
|
||
return ok3(code2);
|
||
}
|
||
token.type = "codeTextData";
|
||
return data(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-subtokenize/index.js
|
||
function subtokenize(events) {
|
||
const jumps = {};
|
||
let index2 = -1;
|
||
let event;
|
||
let lineIndex;
|
||
let otherIndex;
|
||
let otherEvent;
|
||
let parameters;
|
||
let subevents;
|
||
let more;
|
||
while (++index2 < events.length) {
|
||
while (index2 in jumps) {
|
||
index2 = jumps[index2];
|
||
}
|
||
event = events[index2];
|
||
if (index2 && event[1].type === "chunkFlow" && events[index2 - 1][1].type === "listItemPrefix") {
|
||
subevents = event[1]._tokenizer.events;
|
||
otherIndex = 0;
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "lineEndingBlank") {
|
||
otherIndex += 2;
|
||
}
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "content") {
|
||
while (++otherIndex < subevents.length) {
|
||
if (subevents[otherIndex][1].type === "content") {
|
||
break;
|
||
}
|
||
if (subevents[otherIndex][1].type === "chunkText") {
|
||
subevents[otherIndex][1]._isInFirstContentOfListItem = true;
|
||
otherIndex++;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (event[0] === "enter") {
|
||
if (event[1].contentType) {
|
||
Object.assign(jumps, subcontent(events, index2));
|
||
index2 = jumps[index2];
|
||
more = true;
|
||
}
|
||
} else if (event[1]._container) {
|
||
otherIndex = index2;
|
||
lineIndex = void 0;
|
||
while (otherIndex--) {
|
||
otherEvent = events[otherIndex];
|
||
if (otherEvent[1].type === "lineEnding" || otherEvent[1].type === "lineEndingBlank") {
|
||
if (otherEvent[0] === "enter") {
|
||
if (lineIndex) {
|
||
events[lineIndex][1].type = "lineEndingBlank";
|
||
}
|
||
otherEvent[1].type = "lineEnding";
|
||
lineIndex = otherIndex;
|
||
}
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (lineIndex) {
|
||
event[1].end = Object.assign({}, events[lineIndex][1].start);
|
||
parameters = events.slice(lineIndex, index2);
|
||
parameters.unshift(event);
|
||
splice(events, lineIndex, index2 - lineIndex + 1, parameters);
|
||
}
|
||
}
|
||
}
|
||
return !more;
|
||
}
|
||
function subcontent(events, eventIndex) {
|
||
const token = events[eventIndex][1];
|
||
const context = events[eventIndex][2];
|
||
let startPosition = eventIndex - 1;
|
||
const startPositions = [];
|
||
const tokenizer = token._tokenizer || context.parser[token.contentType](token.start);
|
||
const childEvents = tokenizer.events;
|
||
const jumps = [];
|
||
const gaps = {};
|
||
let stream;
|
||
let previous2;
|
||
let index2 = -1;
|
||
let current = token;
|
||
let adjust = 0;
|
||
let start = 0;
|
||
const breaks = [start];
|
||
while (current) {
|
||
while (events[++startPosition][1] !== current) {
|
||
}
|
||
startPositions.push(startPosition);
|
||
if (!current._tokenizer) {
|
||
stream = context.sliceStream(current);
|
||
if (!current.next) {
|
||
stream.push(null);
|
||
}
|
||
if (previous2) {
|
||
tokenizer.defineSkip(current.start);
|
||
}
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = true;
|
||
}
|
||
tokenizer.write(stream);
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = void 0;
|
||
}
|
||
}
|
||
previous2 = current;
|
||
current = current.next;
|
||
}
|
||
current = token;
|
||
while (++index2 < childEvents.length) {
|
||
if (
|
||
// Find a void token that includes a break.
|
||
childEvents[index2][0] === "exit" && childEvents[index2 - 1][0] === "enter" && childEvents[index2][1].type === childEvents[index2 - 1][1].type && childEvents[index2][1].start.line !== childEvents[index2][1].end.line
|
||
) {
|
||
start = index2 + 1;
|
||
breaks.push(start);
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
current = current.next;
|
||
}
|
||
}
|
||
tokenizer.events = [];
|
||
if (current) {
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
} else {
|
||
breaks.pop();
|
||
}
|
||
index2 = breaks.length;
|
||
while (index2--) {
|
||
const slice = childEvents.slice(breaks[index2], breaks[index2 + 1]);
|
||
const start2 = startPositions.pop();
|
||
jumps.unshift([start2, start2 + slice.length - 1]);
|
||
splice(events, start2, 2, slice);
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < jumps.length) {
|
||
gaps[adjust + jumps[index2][0]] = adjust + jumps[index2][1];
|
||
adjust += jumps[index2][1] - jumps[index2][0] - 1;
|
||
}
|
||
return gaps;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/content.js
|
||
var content2 = {
|
||
tokenize: tokenizeContent,
|
||
resolve: resolveContent
|
||
};
|
||
var continuationConstruct = {
|
||
tokenize: tokenizeContinuation,
|
||
partial: true
|
||
};
|
||
function resolveContent(events) {
|
||
subtokenize(events);
|
||
return events;
|
||
}
|
||
function tokenizeContent(effects, ok3) {
|
||
let previous2;
|
||
return chunkStart;
|
||
function chunkStart(code2) {
|
||
effects.enter("content");
|
||
previous2 = effects.enter("chunkContent", {
|
||
contentType: "content"
|
||
});
|
||
return chunkInside(code2);
|
||
}
|
||
function chunkInside(code2) {
|
||
if (code2 === null) {
|
||
return contentEnd(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
return effects.check(
|
||
continuationConstruct,
|
||
contentContinue,
|
||
contentEnd
|
||
)(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return chunkInside;
|
||
}
|
||
function contentEnd(code2) {
|
||
effects.exit("chunkContent");
|
||
effects.exit("content");
|
||
return ok3(code2);
|
||
}
|
||
function contentContinue(code2) {
|
||
effects.consume(code2);
|
||
effects.exit("chunkContent");
|
||
previous2.next = effects.enter("chunkContent", {
|
||
contentType: "content",
|
||
previous: previous2
|
||
});
|
||
previous2 = previous2.next;
|
||
return chunkInside;
|
||
}
|
||
}
|
||
function tokenizeContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return startLookahead;
|
||
function startLookahead(code2) {
|
||
effects.exit("chunkContent");
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, prefixed, "linePrefix");
|
||
}
|
||
function prefixed(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
const tail = self.events[self.events.length - 1];
|
||
if (!self.parser.constructs.disable.null.includes("codeIndented") && tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4) {
|
||
return ok3(code2);
|
||
}
|
||
return effects.interrupt(self.parser.constructs.flow, nok, ok3)(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-destination/index.js
|
||
function factoryDestination(effects, ok3, nok, type, literalType, literalMarkerType, rawType, stringType, max) {
|
||
const limit = max || Number.POSITIVE_INFINITY;
|
||
let balance = 0;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 60) {
|
||
effects.enter(type);
|
||
effects.enter(literalType);
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code2);
|
||
effects.exit(literalMarkerType);
|
||
return enclosedBefore;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 41 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter(type);
|
||
effects.enter(rawType);
|
||
effects.enter(stringType);
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return raw(code2);
|
||
}
|
||
function enclosedBefore(code2) {
|
||
if (code2 === 62) {
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code2);
|
||
effects.exit(literalMarkerType);
|
||
effects.exit(literalType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return enclosed(code2);
|
||
}
|
||
function enclosed(code2) {
|
||
if (code2 === 62) {
|
||
effects.exit("chunkString");
|
||
effects.exit(stringType);
|
||
return enclosedBefore(code2);
|
||
}
|
||
if (code2 === null || code2 === 60 || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? enclosedEscape : enclosed;
|
||
}
|
||
function enclosedEscape(code2) {
|
||
if (code2 === 60 || code2 === 62 || code2 === 92) {
|
||
effects.consume(code2);
|
||
return enclosed;
|
||
}
|
||
return enclosed(code2);
|
||
}
|
||
function raw(code2) {
|
||
if (!balance && (code2 === null || code2 === 41 || markdownLineEndingOrSpace(code2))) {
|
||
effects.exit("chunkString");
|
||
effects.exit(stringType);
|
||
effects.exit(rawType);
|
||
effects.exit(type);
|
||
return ok3(code2);
|
||
}
|
||
if (balance < limit && code2 === 40) {
|
||
effects.consume(code2);
|
||
balance++;
|
||
return raw;
|
||
}
|
||
if (code2 === 41) {
|
||
effects.consume(code2);
|
||
balance--;
|
||
return raw;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 40 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? rawEscape : raw;
|
||
}
|
||
function rawEscape(code2) {
|
||
if (code2 === 40 || code2 === 41 || code2 === 92) {
|
||
effects.consume(code2);
|
||
return raw;
|
||
}
|
||
return raw(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-label/index.js
|
||
function factoryLabel(effects, ok3, nok, type, markerType, stringType) {
|
||
const self = this;
|
||
let size = 0;
|
||
let seen;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.enter(stringType);
|
||
return atBreak;
|
||
}
|
||
function atBreak(code2) {
|
||
if (size > 999 || code2 === null || code2 === 91 || code2 === 93 && !seen || // To do: remove in the future once we’ve switched from
|
||
// `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,
|
||
// which doesn’t need this.
|
||
// Hidden footnotes hook.
|
||
/* c8 ignore next 3 */
|
||
code2 === 94 && !size && "_hiddenFootnoteSupport" in self.parser.constructs) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.exit(stringType);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return atBreak;
|
||
}
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return labelInside(code2);
|
||
}
|
||
function labelInside(code2) {
|
||
if (code2 === null || code2 === 91 || code2 === 93 || markdownLineEnding(code2) || size++ > 999) {
|
||
effects.exit("chunkString");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
if (!seen)
|
||
seen = !markdownSpace(code2);
|
||
return code2 === 92 ? labelEscape : labelInside;
|
||
}
|
||
function labelEscape(code2) {
|
||
if (code2 === 91 || code2 === 92 || code2 === 93) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return labelInside;
|
||
}
|
||
return labelInside(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-title/index.js
|
||
function factoryTitle(effects, ok3, nok, type, markerType, stringType) {
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 34 || code2 === 39 || code2 === 40) {
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
marker = code2 === 40 ? 41 : code2;
|
||
return begin;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function begin(code2) {
|
||
if (code2 === marker) {
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
return atBreak(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === marker) {
|
||
effects.exit(stringType);
|
||
return begin(marker);
|
||
}
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, atBreak, "linePrefix");
|
||
}
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker || code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? escape : inside;
|
||
}
|
||
function escape(code2) {
|
||
if (code2 === marker || code2 === 92) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
return inside(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-whitespace/index.js
|
||
function factoryWhitespace(effects, ok3) {
|
||
let seen;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
seen = true;
|
||
return start;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(
|
||
effects,
|
||
start,
|
||
seen ? "linePrefix" : "lineSuffix"
|
||
)(code2);
|
||
}
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/definition.js
|
||
var definition = {
|
||
name: "definition",
|
||
tokenize: tokenizeDefinition
|
||
};
|
||
var titleBefore = {
|
||
tokenize: tokenizeTitleBefore,
|
||
partial: true
|
||
};
|
||
function tokenizeDefinition(effects, ok3, nok) {
|
||
const self = this;
|
||
let identifier;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("definition");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
return factoryLabel.call(
|
||
self,
|
||
effects,
|
||
labelAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
"definitionLabel",
|
||
"definitionLabelMarker",
|
||
"definitionLabelString"
|
||
)(code2);
|
||
}
|
||
function labelAfter(code2) {
|
||
identifier = normalizeIdentifier(
|
||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||
);
|
||
if (code2 === 58) {
|
||
effects.enter("definitionMarker");
|
||
effects.consume(code2);
|
||
effects.exit("definitionMarker");
|
||
return markerAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function markerAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, destinationBefore)(code2) : destinationBefore(code2);
|
||
}
|
||
function destinationBefore(code2) {
|
||
return factoryDestination(
|
||
effects,
|
||
destinationAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
"definitionDestination",
|
||
"definitionDestinationLiteral",
|
||
"definitionDestinationLiteralMarker",
|
||
"definitionDestinationRaw",
|
||
"definitionDestinationString"
|
||
)(code2);
|
||
}
|
||
function destinationAfter(code2) {
|
||
return effects.attempt(titleBefore, after, after)(code2);
|
||
}
|
||
function after(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, afterWhitespace, "whitespace")(code2) : afterWhitespace(code2);
|
||
}
|
||
function afterWhitespace(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("definition");
|
||
self.parser.defined.push(identifier);
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeTitleBefore(effects, ok3, nok) {
|
||
return titleBefore2;
|
||
function titleBefore2(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, beforeMarker)(code2) : nok(code2);
|
||
}
|
||
function beforeMarker(code2) {
|
||
return factoryTitle(
|
||
effects,
|
||
titleAfter,
|
||
nok,
|
||
"definitionTitle",
|
||
"definitionTitleMarker",
|
||
"definitionTitleString"
|
||
)(code2);
|
||
}
|
||
function titleAfter(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, titleAfterOptionalWhitespace, "whitespace")(code2) : titleAfterOptionalWhitespace(code2);
|
||
}
|
||
function titleAfterOptionalWhitespace(code2) {
|
||
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/hard-break-escape.js
|
||
var hardBreakEscape = {
|
||
name: "hardBreakEscape",
|
||
tokenize: tokenizeHardBreakEscape
|
||
};
|
||
function tokenizeHardBreakEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("hardBreakEscape");
|
||
effects.consume(code2);
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.exit("hardBreakEscape");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/heading-atx.js
|
||
var headingAtx = {
|
||
name: "headingAtx",
|
||
tokenize: tokenizeHeadingAtx,
|
||
resolve: resolveHeadingAtx
|
||
};
|
||
function resolveHeadingAtx(events, context) {
|
||
let contentEnd = events.length - 2;
|
||
let contentStart = 3;
|
||
let content3;
|
||
let text4;
|
||
if (events[contentStart][1].type === "whitespace") {
|
||
contentStart += 2;
|
||
}
|
||
if (contentEnd - 2 > contentStart && events[contentEnd][1].type === "whitespace") {
|
||
contentEnd -= 2;
|
||
}
|
||
if (events[contentEnd][1].type === "atxHeadingSequence" && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === "whitespace")) {
|
||
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4;
|
||
}
|
||
if (contentEnd > contentStart) {
|
||
content3 = {
|
||
type: "atxHeadingText",
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end
|
||
};
|
||
text4 = {
|
||
type: "chunkText",
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end,
|
||
contentType: "text"
|
||
};
|
||
splice(events, contentStart, contentEnd - contentStart + 1, [
|
||
["enter", content3, context],
|
||
["enter", text4, context],
|
||
["exit", text4, context],
|
||
["exit", content3, context]
|
||
]);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHeadingAtx(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("atxHeading");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("atxHeadingSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === 35 && size++ < 6) {
|
||
effects.consume(code2);
|
||
return sequenceOpen;
|
||
}
|
||
if (code2 === null || markdownLineEndingOrSpace(code2)) {
|
||
effects.exit("atxHeadingSequence");
|
||
return atBreak(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === 35) {
|
||
effects.enter("atxHeadingSequence");
|
||
return sequenceFurther(code2);
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("atxHeading");
|
||
return ok3(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(effects, atBreak, "whitespace")(code2);
|
||
}
|
||
effects.enter("atxHeadingText");
|
||
return data(code2);
|
||
}
|
||
function sequenceFurther(code2) {
|
||
if (code2 === 35) {
|
||
effects.consume(code2);
|
||
return sequenceFurther;
|
||
}
|
||
effects.exit("atxHeadingSequence");
|
||
return atBreak(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null || code2 === 35 || markdownLineEndingOrSpace(code2)) {
|
||
effects.exit("atxHeadingText");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-html-tag-name/index.js
|
||
var htmlBlockNames = [
|
||
"address",
|
||
"article",
|
||
"aside",
|
||
"base",
|
||
"basefont",
|
||
"blockquote",
|
||
"body",
|
||
"caption",
|
||
"center",
|
||
"col",
|
||
"colgroup",
|
||
"dd",
|
||
"details",
|
||
"dialog",
|
||
"dir",
|
||
"div",
|
||
"dl",
|
||
"dt",
|
||
"fieldset",
|
||
"figcaption",
|
||
"figure",
|
||
"footer",
|
||
"form",
|
||
"frame",
|
||
"frameset",
|
||
"h1",
|
||
"h2",
|
||
"h3",
|
||
"h4",
|
||
"h5",
|
||
"h6",
|
||
"head",
|
||
"header",
|
||
"hr",
|
||
"html",
|
||
"iframe",
|
||
"legend",
|
||
"li",
|
||
"link",
|
||
"main",
|
||
"menu",
|
||
"menuitem",
|
||
"nav",
|
||
"noframes",
|
||
"ol",
|
||
"optgroup",
|
||
"option",
|
||
"p",
|
||
"param",
|
||
"search",
|
||
"section",
|
||
"summary",
|
||
"table",
|
||
"tbody",
|
||
"td",
|
||
"tfoot",
|
||
"th",
|
||
"thead",
|
||
"title",
|
||
"tr",
|
||
"track",
|
||
"ul"
|
||
];
|
||
var htmlRawNames = ["pre", "script", "style", "textarea"];
|
||
|
||
// node_modules/micromark-core-commonmark/lib/html-flow.js
|
||
var htmlFlow = {
|
||
name: "htmlFlow",
|
||
tokenize: tokenizeHtmlFlow,
|
||
resolveTo: resolveToHtmlFlow,
|
||
concrete: true
|
||
};
|
||
var blankLineBefore = {
|
||
tokenize: tokenizeBlankLineBefore,
|
||
partial: true
|
||
};
|
||
var nonLazyContinuationStart = {
|
||
tokenize: tokenizeNonLazyContinuationStart,
|
||
partial: true
|
||
};
|
||
function resolveToHtmlFlow(events) {
|
||
let index2 = events.length;
|
||
while (index2--) {
|
||
if (events[index2][0] === "enter" && events[index2][1].type === "htmlFlow") {
|
||
break;
|
||
}
|
||
}
|
||
if (index2 > 1 && events[index2 - 2][1].type === "linePrefix") {
|
||
events[index2][1].start = events[index2 - 2][1].start;
|
||
events[index2 + 1][1].start = events[index2 - 2][1].start;
|
||
events.splice(index2 - 2, 2);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHtmlFlow(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
let closingTag;
|
||
let buffer;
|
||
let index2;
|
||
let markerB;
|
||
return start;
|
||
function start(code2) {
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("htmlFlow");
|
||
effects.enter("htmlFlowData");
|
||
effects.consume(code2);
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 33) {
|
||
effects.consume(code2);
|
||
return declarationOpen;
|
||
}
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
closingTag = true;
|
||
return tagCloseStart;
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
marker = 3;
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
buffer = String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function declarationOpen(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
marker = 2;
|
||
return commentOpenInside;
|
||
}
|
||
if (code2 === 91) {
|
||
effects.consume(code2);
|
||
marker = 5;
|
||
index2 = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
marker = 4;
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function commentOpenInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function cdataOpenInside(code2) {
|
||
const value = "CDATA[";
|
||
if (code2 === value.charCodeAt(index2++)) {
|
||
effects.consume(code2);
|
||
if (index2 === value.length) {
|
||
return self.interrupt ? ok3 : continuation;
|
||
}
|
||
return cdataOpenInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagCloseStart(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
buffer = String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagName(code2) {
|
||
if (code2 === null || code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
const slash = code2 === 47;
|
||
const name = buffer.toLowerCase();
|
||
if (!slash && !closingTag && htmlRawNames.includes(name)) {
|
||
marker = 1;
|
||
return self.interrupt ? ok3(code2) : continuation(code2);
|
||
}
|
||
if (htmlBlockNames.includes(buffer.toLowerCase())) {
|
||
marker = 6;
|
||
if (slash) {
|
||
effects.consume(code2);
|
||
return basicSelfClosing;
|
||
}
|
||
return self.interrupt ? ok3(code2) : continuation(code2);
|
||
}
|
||
marker = 7;
|
||
return self.interrupt && !self.parser.lazy[self.now().line] ? nok(code2) : closingTag ? completeClosingTagAfter(code2) : completeAttributeNameBefore(code2);
|
||
}
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
buffer += String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function basicSelfClosing(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return self.interrupt ? ok3 : continuation;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeClosingTagAfter(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeClosingTagAfter;
|
||
}
|
||
return completeEnd(code2);
|
||
}
|
||
function completeAttributeNameBefore(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return completeEnd;
|
||
}
|
||
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeName;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeNameBefore;
|
||
}
|
||
return completeEnd(code2);
|
||
}
|
||
function completeAttributeName(code2) {
|
||
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeName;
|
||
}
|
||
return completeAttributeNameAfter(code2);
|
||
}
|
||
function completeAttributeNameAfter(code2) {
|
||
if (code2 === 61) {
|
||
effects.consume(code2);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeNameAfter;
|
||
}
|
||
return completeAttributeNameBefore(code2);
|
||
}
|
||
function completeAttributeValueBefore(code2) {
|
||
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 34 || code2 === 39) {
|
||
effects.consume(code2);
|
||
markerB = code2;
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
return completeAttributeValueUnquoted(code2);
|
||
}
|
||
function completeAttributeValueQuoted(code2) {
|
||
if (code2 === markerB) {
|
||
effects.consume(code2);
|
||
markerB = null;
|
||
return completeAttributeValueQuotedAfter;
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
function completeAttributeValueUnquoted(code2) {
|
||
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 47 || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96 || markdownLineEndingOrSpace(code2)) {
|
||
return completeAttributeNameAfter(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return completeAttributeValueUnquoted;
|
||
}
|
||
function completeAttributeValueQuotedAfter(code2) {
|
||
if (code2 === 47 || code2 === 62 || markdownSpace(code2)) {
|
||
return completeAttributeNameBefore(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeEnd(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return completeAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeAfter(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return continuation(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function continuation(code2) {
|
||
if (code2 === 45 && marker === 2) {
|
||
effects.consume(code2);
|
||
return continuationCommentInside;
|
||
}
|
||
if (code2 === 60 && marker === 1) {
|
||
effects.consume(code2);
|
||
return continuationRawTagOpen;
|
||
}
|
||
if (code2 === 62 && marker === 4) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
if (code2 === 63 && marker === 3) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
if (code2 === 93 && marker === 5) {
|
||
effects.consume(code2);
|
||
return continuationCdataInside;
|
||
}
|
||
if (markdownLineEnding(code2) && (marker === 6 || marker === 7)) {
|
||
effects.exit("htmlFlowData");
|
||
return effects.check(
|
||
blankLineBefore,
|
||
continuationAfter,
|
||
continuationStart
|
||
)(code2);
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("htmlFlowData");
|
||
return continuationStart(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return continuation;
|
||
}
|
||
function continuationStart(code2) {
|
||
return effects.check(
|
||
nonLazyContinuationStart,
|
||
continuationStartNonLazy,
|
||
continuationAfter
|
||
)(code2);
|
||
}
|
||
function continuationStartNonLazy(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return continuationBefore;
|
||
}
|
||
function continuationBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return continuationStart(code2);
|
||
}
|
||
effects.enter("htmlFlowData");
|
||
return continuation(code2);
|
||
}
|
||
function continuationCommentInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationRawTagOpen(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
buffer = "";
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationRawEndTag(code2) {
|
||
if (code2 === 62) {
|
||
const name = buffer.toLowerCase();
|
||
if (htmlRawNames.includes(name)) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
if (asciiAlpha(code2) && buffer.length < 8) {
|
||
effects.consume(code2);
|
||
buffer += String.fromCharCode(code2);
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationCdataInside(code2) {
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationDeclarationInside(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
if (code2 === 45 && marker === 2) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationClose(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("htmlFlowData");
|
||
return continuationAfter(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
function continuationAfter(code2) {
|
||
effects.exit("htmlFlow");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuationStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
return self.parser.lazy[self.now().line] ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeBlankLineBefore(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return effects.attempt(blankLine, ok3, nok);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/html-text.js
|
||
var htmlText = {
|
||
name: "htmlText",
|
||
tokenize: tokenizeHtmlText
|
||
};
|
||
function tokenizeHtmlText(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
let index2;
|
||
let returnState;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("htmlText");
|
||
effects.enter("htmlTextData");
|
||
effects.consume(code2);
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 33) {
|
||
effects.consume(code2);
|
||
return declarationOpen;
|
||
}
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return tagCloseStart;
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
return instruction;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpen;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function declarationOpen(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentOpenInside;
|
||
}
|
||
if (code2 === 91) {
|
||
effects.consume(code2);
|
||
index2 = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return declaration;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function commentOpenInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentEnd;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function comment(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = comment;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return comment;
|
||
}
|
||
function commentClose(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentEnd;
|
||
}
|
||
return comment(code2);
|
||
}
|
||
function commentEnd(code2) {
|
||
return code2 === 62 ? end(code2) : code2 === 45 ? commentClose(code2) : comment(code2);
|
||
}
|
||
function cdataOpenInside(code2) {
|
||
const value = "CDATA[";
|
||
if (code2 === value.charCodeAt(index2++)) {
|
||
effects.consume(code2);
|
||
return index2 === value.length ? cdata : cdataOpenInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function cdata(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = cdata;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return cdata;
|
||
}
|
||
function cdataClose(code2) {
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code2);
|
||
}
|
||
function cdataEnd(code2) {
|
||
if (code2 === 62) {
|
||
return end(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code2);
|
||
}
|
||
function declaration(code2) {
|
||
if (code2 === null || code2 === 62) {
|
||
return end(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = declaration;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return declaration;
|
||
}
|
||
function instruction(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
return instructionClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = instruction;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return instruction;
|
||
}
|
||
function instructionClose(code2) {
|
||
return code2 === 62 ? end(code2) : instruction(code2);
|
||
}
|
||
function tagCloseStart(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagClose;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagClose(code2) {
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagClose;
|
||
}
|
||
return tagCloseBetween(code2);
|
||
}
|
||
function tagCloseBetween(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagCloseBetween;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagCloseBetween;
|
||
}
|
||
return end(code2);
|
||
}
|
||
function tagOpen(code2) {
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpen;
|
||
}
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagOpenBetween(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return end;
|
||
}
|
||
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeName;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenBetween;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenBetween;
|
||
}
|
||
return end(code2);
|
||
}
|
||
function tagOpenAttributeName(code2) {
|
||
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeName;
|
||
}
|
||
return tagOpenAttributeNameAfter(code2);
|
||
}
|
||
function tagOpenAttributeNameAfter(code2) {
|
||
if (code2 === 61) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeNameAfter;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeNameAfter;
|
||
}
|
||
return tagOpenBetween(code2);
|
||
}
|
||
function tagOpenAttributeValueBefore(code2) {
|
||
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 34 || code2 === 39) {
|
||
effects.consume(code2);
|
||
marker = code2;
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeValueBefore;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuoted(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
marker = void 0;
|
||
return tagOpenAttributeValueQuotedAfter;
|
||
}
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeValueQuoted;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
function tagOpenAttributeValueUnquoted(code2) {
|
||
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 60 || code2 === 61 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuotedAfter(code2) {
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function end(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
effects.exit("htmlTextData");
|
||
effects.exit("htmlText");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function lineEndingBefore(code2) {
|
||
effects.exit("htmlTextData");
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return lineEndingAfter;
|
||
}
|
||
function lineEndingAfter(code2) {
|
||
return markdownSpace(code2) ? factorySpace(
|
||
effects,
|
||
lineEndingAfterPrefix,
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2) : lineEndingAfterPrefix(code2);
|
||
}
|
||
function lineEndingAfterPrefix(code2) {
|
||
effects.enter("htmlTextData");
|
||
return returnState(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-end.js
|
||
var labelEnd = {
|
||
name: "labelEnd",
|
||
tokenize: tokenizeLabelEnd,
|
||
resolveTo: resolveToLabelEnd,
|
||
resolveAll: resolveAllLabelEnd
|
||
};
|
||
var resourceConstruct = {
|
||
tokenize: tokenizeResource
|
||
};
|
||
var referenceFullConstruct = {
|
||
tokenize: tokenizeReferenceFull
|
||
};
|
||
var referenceCollapsedConstruct = {
|
||
tokenize: tokenizeReferenceCollapsed
|
||
};
|
||
function resolveAllLabelEnd(events) {
|
||
let index2 = -1;
|
||
while (++index2 < events.length) {
|
||
const token = events[index2][1];
|
||
if (token.type === "labelImage" || token.type === "labelLink" || token.type === "labelEnd") {
|
||
events.splice(index2 + 1, token.type === "labelImage" ? 4 : 2);
|
||
token.type = "data";
|
||
index2++;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function resolveToLabelEnd(events, context) {
|
||
let index2 = events.length;
|
||
let offset = 0;
|
||
let token;
|
||
let open;
|
||
let close;
|
||
let media;
|
||
while (index2--) {
|
||
token = events[index2][1];
|
||
if (open) {
|
||
if (token.type === "link" || token.type === "labelLink" && token._inactive) {
|
||
break;
|
||
}
|
||
if (events[index2][0] === "enter" && token.type === "labelLink") {
|
||
token._inactive = true;
|
||
}
|
||
} else if (close) {
|
||
if (events[index2][0] === "enter" && (token.type === "labelImage" || token.type === "labelLink") && !token._balanced) {
|
||
open = index2;
|
||
if (token.type !== "labelLink") {
|
||
offset = 2;
|
||
break;
|
||
}
|
||
}
|
||
} else if (token.type === "labelEnd") {
|
||
close = index2;
|
||
}
|
||
}
|
||
const group = {
|
||
type: events[open][1].type === "labelLink" ? "link" : "image",
|
||
start: Object.assign({}, events[open][1].start),
|
||
end: Object.assign({}, events[events.length - 1][1].end)
|
||
};
|
||
const label = {
|
||
type: "label",
|
||
start: Object.assign({}, events[open][1].start),
|
||
end: Object.assign({}, events[close][1].end)
|
||
};
|
||
const text4 = {
|
||
type: "labelText",
|
||
start: Object.assign({}, events[open + offset + 2][1].end),
|
||
end: Object.assign({}, events[close - 2][1].start)
|
||
};
|
||
media = [
|
||
["enter", group, context],
|
||
["enter", label, context]
|
||
];
|
||
media = push(media, events.slice(open + 1, open + offset + 3));
|
||
media = push(media, [["enter", text4, context]]);
|
||
media = push(
|
||
media,
|
||
resolveAll(
|
||
context.parser.constructs.insideSpan.null,
|
||
events.slice(open + offset + 4, close - 3),
|
||
context
|
||
)
|
||
);
|
||
media = push(media, [
|
||
["exit", text4, context],
|
||
events[close - 2],
|
||
events[close - 1],
|
||
["exit", label, context]
|
||
]);
|
||
media = push(media, events.slice(close + 1));
|
||
media = push(media, [["exit", group, context]]);
|
||
splice(events, open, events.length, media);
|
||
return events;
|
||
}
|
||
function tokenizeLabelEnd(effects, ok3, nok) {
|
||
const self = this;
|
||
let index2 = self.events.length;
|
||
let labelStart;
|
||
let defined;
|
||
while (index2--) {
|
||
if ((self.events[index2][1].type === "labelImage" || self.events[index2][1].type === "labelLink") && !self.events[index2][1]._balanced) {
|
||
labelStart = self.events[index2][1];
|
||
break;
|
||
}
|
||
}
|
||
return start;
|
||
function start(code2) {
|
||
if (!labelStart) {
|
||
return nok(code2);
|
||
}
|
||
if (labelStart._inactive) {
|
||
return labelEndNok(code2);
|
||
}
|
||
defined = self.parser.defined.includes(
|
||
normalizeIdentifier(
|
||
self.sliceSerialize({
|
||
start: labelStart.end,
|
||
end: self.now()
|
||
})
|
||
)
|
||
);
|
||
effects.enter("labelEnd");
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelEnd");
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
if (code2 === 40) {
|
||
return effects.attempt(
|
||
resourceConstruct,
|
||
labelEndOk,
|
||
defined ? labelEndOk : labelEndNok
|
||
)(code2);
|
||
}
|
||
if (code2 === 91) {
|
||
return effects.attempt(
|
||
referenceFullConstruct,
|
||
labelEndOk,
|
||
defined ? referenceNotFull : labelEndNok
|
||
)(code2);
|
||
}
|
||
return defined ? labelEndOk(code2) : labelEndNok(code2);
|
||
}
|
||
function referenceNotFull(code2) {
|
||
return effects.attempt(
|
||
referenceCollapsedConstruct,
|
||
labelEndOk,
|
||
labelEndNok
|
||
)(code2);
|
||
}
|
||
function labelEndOk(code2) {
|
||
return ok3(code2);
|
||
}
|
||
function labelEndNok(code2) {
|
||
labelStart._balanced = true;
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeResource(effects, ok3, nok) {
|
||
return resourceStart;
|
||
function resourceStart(code2) {
|
||
effects.enter("resource");
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("resourceMarker");
|
||
return resourceBefore;
|
||
}
|
||
function resourceBefore(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceOpen)(code2) : resourceOpen(code2);
|
||
}
|
||
function resourceOpen(code2) {
|
||
if (code2 === 41) {
|
||
return resourceEnd(code2);
|
||
}
|
||
return factoryDestination(
|
||
effects,
|
||
resourceDestinationAfter,
|
||
resourceDestinationMissing,
|
||
"resourceDestination",
|
||
"resourceDestinationLiteral",
|
||
"resourceDestinationLiteralMarker",
|
||
"resourceDestinationRaw",
|
||
"resourceDestinationString",
|
||
32
|
||
)(code2);
|
||
}
|
||
function resourceDestinationAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceBetween)(code2) : resourceEnd(code2);
|
||
}
|
||
function resourceDestinationMissing(code2) {
|
||
return nok(code2);
|
||
}
|
||
function resourceBetween(code2) {
|
||
if (code2 === 34 || code2 === 39 || code2 === 40) {
|
||
return factoryTitle(
|
||
effects,
|
||
resourceTitleAfter,
|
||
nok,
|
||
"resourceTitle",
|
||
"resourceTitleMarker",
|
||
"resourceTitleString"
|
||
)(code2);
|
||
}
|
||
return resourceEnd(code2);
|
||
}
|
||
function resourceTitleAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceEnd)(code2) : resourceEnd(code2);
|
||
}
|
||
function resourceEnd(code2) {
|
||
if (code2 === 41) {
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("resourceMarker");
|
||
effects.exit("resource");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeReferenceFull(effects, ok3, nok) {
|
||
const self = this;
|
||
return referenceFull;
|
||
function referenceFull(code2) {
|
||
return factoryLabel.call(
|
||
self,
|
||
effects,
|
||
referenceFullAfter,
|
||
referenceFullMissing,
|
||
"reference",
|
||
"referenceMarker",
|
||
"referenceString"
|
||
)(code2);
|
||
}
|
||
function referenceFullAfter(code2) {
|
||
return self.parser.defined.includes(
|
||
normalizeIdentifier(
|
||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||
)
|
||
) ? ok3(code2) : nok(code2);
|
||
}
|
||
function referenceFullMissing(code2) {
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeReferenceCollapsed(effects, ok3, nok) {
|
||
return referenceCollapsedStart;
|
||
function referenceCollapsedStart(code2) {
|
||
effects.enter("reference");
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("referenceMarker");
|
||
return referenceCollapsedOpen;
|
||
}
|
||
function referenceCollapsedOpen(code2) {
|
||
if (code2 === 93) {
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("referenceMarker");
|
||
effects.exit("reference");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-start-image.js
|
||
var labelStartImage = {
|
||
name: "labelStartImage",
|
||
tokenize: tokenizeLabelStartImage,
|
||
resolveAll: labelEnd.resolveAll
|
||
};
|
||
function tokenizeLabelStartImage(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("labelImage");
|
||
effects.enter("labelImageMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelImageMarker");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 91) {
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelImage");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
return code2 === 94 && "_hiddenFootnoteSupport" in self.parser.constructs ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-start-link.js
|
||
var labelStartLink = {
|
||
name: "labelStartLink",
|
||
tokenize: tokenizeLabelStartLink,
|
||
resolveAll: labelEnd.resolveAll
|
||
};
|
||
function tokenizeLabelStartLink(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("labelLink");
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelLink");
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
return code2 === 94 && "_hiddenFootnoteSupport" in self.parser.constructs ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/line-ending.js
|
||
var lineEnding = {
|
||
name: "lineEnding",
|
||
tokenize: tokenizeLineEnding
|
||
};
|
||
function tokenizeLineEnding(effects, ok3) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, ok3, "linePrefix");
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/thematic-break.js
|
||
var thematicBreak = {
|
||
name: "thematicBreak",
|
||
tokenize: tokenizeThematicBreak
|
||
};
|
||
function tokenizeThematicBreak(effects, ok3, nok) {
|
||
let size = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("thematicBreak");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
marker = code2;
|
||
return atBreak(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === marker) {
|
||
effects.enter("thematicBreakSequence");
|
||
return sequence(code2);
|
||
}
|
||
if (size >= 3 && (code2 === null || markdownLineEnding(code2))) {
|
||
effects.exit("thematicBreak");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function sequence(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return sequence;
|
||
}
|
||
effects.exit("thematicBreakSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, atBreak, "whitespace")(code2) : atBreak(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/list.js
|
||
var list = {
|
||
name: "list",
|
||
tokenize: tokenizeListStart,
|
||
continuation: {
|
||
tokenize: tokenizeListContinuation
|
||
},
|
||
exit: tokenizeListEnd
|
||
};
|
||
var listItemPrefixWhitespaceConstruct = {
|
||
tokenize: tokenizeListItemPrefixWhitespace,
|
||
partial: true
|
||
};
|
||
var indentConstruct = {
|
||
tokenize: tokenizeIndent,
|
||
partial: true
|
||
};
|
||
function tokenizeListStart(effects, ok3, nok) {
|
||
const self = this;
|
||
const tail = self.events[self.events.length - 1];
|
||
let initialSize = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
const kind = self.containerState.type || (code2 === 42 || code2 === 43 || code2 === 45 ? "listUnordered" : "listOrdered");
|
||
if (kind === "listUnordered" ? !self.containerState.marker || code2 === self.containerState.marker : asciiDigit(code2)) {
|
||
if (!self.containerState.type) {
|
||
self.containerState.type = kind;
|
||
effects.enter(kind, {
|
||
_container: true
|
||
});
|
||
}
|
||
if (kind === "listUnordered") {
|
||
effects.enter("listItemPrefix");
|
||
return code2 === 42 || code2 === 45 ? effects.check(thematicBreak, nok, atMarker)(code2) : atMarker(code2);
|
||
}
|
||
if (!self.interrupt || code2 === 49) {
|
||
effects.enter("listItemPrefix");
|
||
effects.enter("listItemValue");
|
||
return inside(code2);
|
||
}
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (asciiDigit(code2) && ++size < 10) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
if ((!self.interrupt || size < 2) && (self.containerState.marker ? code2 === self.containerState.marker : code2 === 41 || code2 === 46)) {
|
||
effects.exit("listItemValue");
|
||
return atMarker(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function atMarker(code2) {
|
||
effects.enter("listItemMarker");
|
||
effects.consume(code2);
|
||
effects.exit("listItemMarker");
|
||
self.containerState.marker = self.containerState.marker || code2;
|
||
return effects.check(
|
||
blankLine,
|
||
// Can’t be empty when interrupting.
|
||
self.interrupt ? nok : onBlank,
|
||
effects.attempt(
|
||
listItemPrefixWhitespaceConstruct,
|
||
endOfPrefix,
|
||
otherPrefix
|
||
)
|
||
);
|
||
}
|
||
function onBlank(code2) {
|
||
self.containerState.initialBlankLine = true;
|
||
initialSize++;
|
||
return endOfPrefix(code2);
|
||
}
|
||
function otherPrefix(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("listItemPrefixWhitespace");
|
||
effects.consume(code2);
|
||
effects.exit("listItemPrefixWhitespace");
|
||
return endOfPrefix;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function endOfPrefix(code2) {
|
||
self.containerState.size = initialSize + self.sliceSerialize(effects.exit("listItemPrefix"), true).length;
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeListContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
self.containerState._closeFlow = void 0;
|
||
return effects.check(blankLine, onBlank, notBlank);
|
||
function onBlank(code2) {
|
||
self.containerState.furtherBlankLines = self.containerState.furtherBlankLines || self.containerState.initialBlankLine;
|
||
return factorySpace(
|
||
effects,
|
||
ok3,
|
||
"listItemIndent",
|
||
self.containerState.size + 1
|
||
)(code2);
|
||
}
|
||
function notBlank(code2) {
|
||
if (self.containerState.furtherBlankLines || !markdownSpace(code2)) {
|
||
self.containerState.furtherBlankLines = void 0;
|
||
self.containerState.initialBlankLine = void 0;
|
||
return notInCurrentItem(code2);
|
||
}
|
||
self.containerState.furtherBlankLines = void 0;
|
||
self.containerState.initialBlankLine = void 0;
|
||
return effects.attempt(indentConstruct, ok3, notInCurrentItem)(code2);
|
||
}
|
||
function notInCurrentItem(code2) {
|
||
self.containerState._closeFlow = true;
|
||
self.interrupt = void 0;
|
||
return factorySpace(
|
||
effects,
|
||
effects.attempt(list, ok3, nok),
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2);
|
||
}
|
||
}
|
||
function tokenizeIndent(effects, ok3, nok) {
|
||
const self = this;
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
"listItemIndent",
|
||
self.containerState.size + 1
|
||
);
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === "listItemIndent" && tail[2].sliceSerialize(tail[1], true).length === self.containerState.size ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
function tokenizeListEnd(effects) {
|
||
effects.exit(this.containerState.type);
|
||
}
|
||
function tokenizeListItemPrefixWhitespace(effects, ok3, nok) {
|
||
const self = this;
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
"listItemPrefixWhitespace",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4 + 1
|
||
);
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return !markdownSpace(code2) && tail && tail[1].type === "listItemPrefixWhitespace" ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/setext-underline.js
|
||
var setextUnderline = {
|
||
name: "setextUnderline",
|
||
tokenize: tokenizeSetextUnderline,
|
||
resolveTo: resolveToSetextUnderline
|
||
};
|
||
function resolveToSetextUnderline(events, context) {
|
||
let index2 = events.length;
|
||
let content3;
|
||
let text4;
|
||
let definition3;
|
||
while (index2--) {
|
||
if (events[index2][0] === "enter") {
|
||
if (events[index2][1].type === "content") {
|
||
content3 = index2;
|
||
break;
|
||
}
|
||
if (events[index2][1].type === "paragraph") {
|
||
text4 = index2;
|
||
}
|
||
} else {
|
||
if (events[index2][1].type === "content") {
|
||
events.splice(index2, 1);
|
||
}
|
||
if (!definition3 && events[index2][1].type === "definition") {
|
||
definition3 = index2;
|
||
}
|
||
}
|
||
}
|
||
const heading2 = {
|
||
type: "setextHeading",
|
||
start: Object.assign({}, events[text4][1].start),
|
||
end: Object.assign({}, events[events.length - 1][1].end)
|
||
};
|
||
events[text4][1].type = "setextHeadingText";
|
||
if (definition3) {
|
||
events.splice(text4, 0, ["enter", heading2, context]);
|
||
events.splice(definition3 + 1, 0, ["exit", events[content3][1], context]);
|
||
events[content3][1].end = Object.assign({}, events[definition3][1].end);
|
||
} else {
|
||
events[content3][1] = heading2;
|
||
}
|
||
events.push(["exit", heading2, context]);
|
||
return events;
|
||
}
|
||
function tokenizeSetextUnderline(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
let index2 = self.events.length;
|
||
let paragraph2;
|
||
while (index2--) {
|
||
if (self.events[index2][1].type !== "lineEnding" && self.events[index2][1].type !== "linePrefix" && self.events[index2][1].type !== "content") {
|
||
paragraph2 = self.events[index2][1].type === "paragraph";
|
||
break;
|
||
}
|
||
}
|
||
if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph2)) {
|
||
effects.enter("setextHeadingLine");
|
||
marker = code2;
|
||
return before(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("setextHeadingLineSequence");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
effects.exit("setextHeadingLineSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, after, "lineSuffix")(code2) : after(code2);
|
||
}
|
||
function after(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("setextHeadingLine");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/flow.js
|
||
var flow = {
|
||
tokenize: initializeFlow
|
||
};
|
||
function initializeFlow(effects) {
|
||
const self = this;
|
||
const initial = effects.attempt(
|
||
// Try to parse a blank line.
|
||
blankLine,
|
||
atBlankEnding,
|
||
// Try to parse initial flow (essentially, only code).
|
||
effects.attempt(
|
||
this.parser.constructs.flowInitial,
|
||
afterConstruct,
|
||
factorySpace(
|
||
effects,
|
||
effects.attempt(
|
||
this.parser.constructs.flow,
|
||
afterConstruct,
|
||
effects.attempt(content2, afterConstruct)
|
||
),
|
||
"linePrefix"
|
||
)
|
||
)
|
||
);
|
||
return initial;
|
||
function atBlankEnding(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEndingBlank");
|
||
effects.consume(code2);
|
||
effects.exit("lineEndingBlank");
|
||
self.currentConstruct = void 0;
|
||
return initial;
|
||
}
|
||
function afterConstruct(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
self.currentConstruct = void 0;
|
||
return initial;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/text.js
|
||
var resolver = {
|
||
resolveAll: createResolver()
|
||
};
|
||
var string = initializeFactory("string");
|
||
var text = initializeFactory("text");
|
||
function initializeFactory(field) {
|
||
return {
|
||
tokenize: initializeText,
|
||
resolveAll: createResolver(
|
||
field === "text" ? resolveAllLineSuffixes : void 0
|
||
)
|
||
};
|
||
function initializeText(effects) {
|
||
const self = this;
|
||
const constructs2 = this.parser.constructs[field];
|
||
const text4 = effects.attempt(constructs2, start, notText);
|
||
return start;
|
||
function start(code2) {
|
||
return atBreak(code2) ? text4(code2) : notText(code2);
|
||
}
|
||
function notText(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("data");
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function data(code2) {
|
||
if (atBreak(code2)) {
|
||
effects.exit("data");
|
||
return text4(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === null) {
|
||
return true;
|
||
}
|
||
const list4 = constructs2[code2];
|
||
let index2 = -1;
|
||
if (list4) {
|
||
while (++index2 < list4.length) {
|
||
const item = list4[index2];
|
||
if (!item.previous || item.previous.call(self, self.previous)) {
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
function createResolver(extraResolver) {
|
||
return resolveAllText;
|
||
function resolveAllText(events, context) {
|
||
let index2 = -1;
|
||
let enter;
|
||
while (++index2 <= events.length) {
|
||
if (enter === void 0) {
|
||
if (events[index2] && events[index2][1].type === "data") {
|
||
enter = index2;
|
||
index2++;
|
||
}
|
||
} else if (!events[index2] || events[index2][1].type !== "data") {
|
||
if (index2 !== enter + 2) {
|
||
events[enter][1].end = events[index2 - 1][1].end;
|
||
events.splice(enter + 2, index2 - enter - 2);
|
||
index2 = enter + 2;
|
||
}
|
||
enter = void 0;
|
||
}
|
||
}
|
||
return extraResolver ? extraResolver(events, context) : events;
|
||
}
|
||
}
|
||
function resolveAllLineSuffixes(events, context) {
|
||
let eventIndex = 0;
|
||
while (++eventIndex <= events.length) {
|
||
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
|
||
const data = events[eventIndex - 1][1];
|
||
const chunks = context.sliceStream(data);
|
||
let index2 = chunks.length;
|
||
let bufferIndex = -1;
|
||
let size = 0;
|
||
let tabs;
|
||
while (index2--) {
|
||
const chunk = chunks[index2];
|
||
if (typeof chunk === "string") {
|
||
bufferIndex = chunk.length;
|
||
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
|
||
size++;
|
||
bufferIndex--;
|
||
}
|
||
if (bufferIndex)
|
||
break;
|
||
bufferIndex = -1;
|
||
} else if (chunk === -2) {
|
||
tabs = true;
|
||
size++;
|
||
} else if (chunk === -1) {
|
||
} else {
|
||
index2++;
|
||
break;
|
||
}
|
||
}
|
||
if (size) {
|
||
const token = {
|
||
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
|
||
start: {
|
||
line: data.end.line,
|
||
column: data.end.column - size,
|
||
offset: data.end.offset - size,
|
||
_index: data.start._index + index2,
|
||
_bufferIndex: index2 ? bufferIndex : data.start._bufferIndex + bufferIndex
|
||
},
|
||
end: Object.assign({}, data.end)
|
||
};
|
||
data.end = Object.assign({}, token.start);
|
||
if (data.start.offset === data.end.offset) {
|
||
Object.assign(data, token);
|
||
} else {
|
||
events.splice(
|
||
eventIndex,
|
||
0,
|
||
["enter", token, context],
|
||
["exit", token, context]
|
||
);
|
||
eventIndex += 2;
|
||
}
|
||
}
|
||
eventIndex++;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark/lib/create-tokenizer.js
|
||
function createTokenizer(parser, initialize, from) {
|
||
let point3 = Object.assign(
|
||
from ? Object.assign({}, from) : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
},
|
||
{
|
||
_index: 0,
|
||
_bufferIndex: -1
|
||
}
|
||
);
|
||
const columnStart = {};
|
||
const resolveAllConstructs = [];
|
||
let chunks = [];
|
||
let stack = [];
|
||
let consumed = true;
|
||
const effects = {
|
||
consume,
|
||
enter,
|
||
exit: exit2,
|
||
attempt: constructFactory(onsuccessfulconstruct),
|
||
check: constructFactory(onsuccessfulcheck),
|
||
interrupt: constructFactory(onsuccessfulcheck, {
|
||
interrupt: true
|
||
})
|
||
};
|
||
const context = {
|
||
previous: null,
|
||
code: null,
|
||
containerState: {},
|
||
events: [],
|
||
parser,
|
||
sliceStream,
|
||
sliceSerialize,
|
||
now,
|
||
defineSkip,
|
||
write
|
||
};
|
||
let state = initialize.tokenize.call(context, effects);
|
||
let expectedCode;
|
||
if (initialize.resolveAll) {
|
||
resolveAllConstructs.push(initialize);
|
||
}
|
||
return context;
|
||
function write(slice) {
|
||
chunks = push(chunks, slice);
|
||
main();
|
||
if (chunks[chunks.length - 1] !== null) {
|
||
return [];
|
||
}
|
||
addResult(initialize, 0);
|
||
context.events = resolveAll(resolveAllConstructs, context.events, context);
|
||
return context.events;
|
||
}
|
||
function sliceSerialize(token, expandTabs) {
|
||
return serializeChunks(sliceStream(token), expandTabs);
|
||
}
|
||
function sliceStream(token) {
|
||
return sliceChunks(chunks, token);
|
||
}
|
||
function now() {
|
||
const { line, column, offset, _index, _bufferIndex } = point3;
|
||
return {
|
||
line,
|
||
column,
|
||
offset,
|
||
_index,
|
||
_bufferIndex
|
||
};
|
||
}
|
||
function defineSkip(value) {
|
||
columnStart[value.line] = value.column;
|
||
accountForPotentialSkip();
|
||
}
|
||
function main() {
|
||
let chunkIndex;
|
||
while (point3._index < chunks.length) {
|
||
const chunk = chunks[point3._index];
|
||
if (typeof chunk === "string") {
|
||
chunkIndex = point3._index;
|
||
if (point3._bufferIndex < 0) {
|
||
point3._bufferIndex = 0;
|
||
}
|
||
while (point3._index === chunkIndex && point3._bufferIndex < chunk.length) {
|
||
go(chunk.charCodeAt(point3._bufferIndex));
|
||
}
|
||
} else {
|
||
go(chunk);
|
||
}
|
||
}
|
||
}
|
||
function go(code2) {
|
||
consumed = void 0;
|
||
expectedCode = code2;
|
||
state = state(code2);
|
||
}
|
||
function consume(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
point3.line++;
|
||
point3.column = 1;
|
||
point3.offset += code2 === -3 ? 2 : 1;
|
||
accountForPotentialSkip();
|
||
} else if (code2 !== -1) {
|
||
point3.column++;
|
||
point3.offset++;
|
||
}
|
||
if (point3._bufferIndex < 0) {
|
||
point3._index++;
|
||
} else {
|
||
point3._bufferIndex++;
|
||
if (point3._bufferIndex === chunks[point3._index].length) {
|
||
point3._bufferIndex = -1;
|
||
point3._index++;
|
||
}
|
||
}
|
||
context.previous = code2;
|
||
consumed = true;
|
||
}
|
||
function enter(type, fields) {
|
||
const token = fields || {};
|
||
token.type = type;
|
||
token.start = now();
|
||
context.events.push(["enter", token, context]);
|
||
stack.push(token);
|
||
return token;
|
||
}
|
||
function exit2(type) {
|
||
const token = stack.pop();
|
||
token.end = now();
|
||
context.events.push(["exit", token, context]);
|
||
return token;
|
||
}
|
||
function onsuccessfulconstruct(construct, info) {
|
||
addResult(construct, info.from);
|
||
}
|
||
function onsuccessfulcheck(_, info) {
|
||
info.restore();
|
||
}
|
||
function constructFactory(onreturn, fields) {
|
||
return hook;
|
||
function hook(constructs2, returnState, bogusState) {
|
||
let listOfConstructs;
|
||
let constructIndex;
|
||
let currentConstruct;
|
||
let info;
|
||
return Array.isArray(constructs2) ? handleListOfConstructs(constructs2) : "tokenize" in constructs2 ? (
|
||
// @ts-expect-error Looks like a construct.
|
||
handleListOfConstructs([constructs2])
|
||
) : handleMapOfConstructs(constructs2);
|
||
function handleMapOfConstructs(map4) {
|
||
return start;
|
||
function start(code2) {
|
||
const def = code2 !== null && map4[code2];
|
||
const all2 = code2 !== null && map4.null;
|
||
const list4 = [
|
||
// To do: add more extension tests.
|
||
/* c8 ignore next 2 */
|
||
...Array.isArray(def) ? def : def ? [def] : [],
|
||
...Array.isArray(all2) ? all2 : all2 ? [all2] : []
|
||
];
|
||
return handleListOfConstructs(list4)(code2);
|
||
}
|
||
}
|
||
function handleListOfConstructs(list4) {
|
||
listOfConstructs = list4;
|
||
constructIndex = 0;
|
||
if (list4.length === 0) {
|
||
return bogusState;
|
||
}
|
||
return handleConstruct(list4[constructIndex]);
|
||
}
|
||
function handleConstruct(construct) {
|
||
return start;
|
||
function start(code2) {
|
||
info = store();
|
||
currentConstruct = construct;
|
||
if (!construct.partial) {
|
||
context.currentConstruct = construct;
|
||
}
|
||
if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) {
|
||
return nok(code2);
|
||
}
|
||
return construct.tokenize.call(
|
||
// If we do have fields, create an object w/ `context` as its
|
||
// prototype.
|
||
// This allows a “live binding”, which is needed for `interrupt`.
|
||
fields ? Object.assign(Object.create(context), fields) : context,
|
||
effects,
|
||
ok3,
|
||
nok
|
||
)(code2);
|
||
}
|
||
}
|
||
function ok3(code2) {
|
||
consumed = true;
|
||
onreturn(currentConstruct, info);
|
||
return returnState;
|
||
}
|
||
function nok(code2) {
|
||
consumed = true;
|
||
info.restore();
|
||
if (++constructIndex < listOfConstructs.length) {
|
||
return handleConstruct(listOfConstructs[constructIndex]);
|
||
}
|
||
return bogusState;
|
||
}
|
||
}
|
||
}
|
||
function addResult(construct, from2) {
|
||
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
|
||
resolveAllConstructs.push(construct);
|
||
}
|
||
if (construct.resolve) {
|
||
splice(
|
||
context.events,
|
||
from2,
|
||
context.events.length - from2,
|
||
construct.resolve(context.events.slice(from2), context)
|
||
);
|
||
}
|
||
if (construct.resolveTo) {
|
||
context.events = construct.resolveTo(context.events, context);
|
||
}
|
||
}
|
||
function store() {
|
||
const startPoint = now();
|
||
const startPrevious = context.previous;
|
||
const startCurrentConstruct = context.currentConstruct;
|
||
const startEventsIndex = context.events.length;
|
||
const startStack = Array.from(stack);
|
||
return {
|
||
restore,
|
||
from: startEventsIndex
|
||
};
|
||
function restore() {
|
||
point3 = startPoint;
|
||
context.previous = startPrevious;
|
||
context.currentConstruct = startCurrentConstruct;
|
||
context.events.length = startEventsIndex;
|
||
stack = startStack;
|
||
accountForPotentialSkip();
|
||
}
|
||
}
|
||
function accountForPotentialSkip() {
|
||
if (point3.line in columnStart && point3.column < 2) {
|
||
point3.column = columnStart[point3.line];
|
||
point3.offset += columnStart[point3.line] - 1;
|
||
}
|
||
}
|
||
}
|
||
function sliceChunks(chunks, token) {
|
||
const startIndex = token.start._index;
|
||
const startBufferIndex = token.start._bufferIndex;
|
||
const endIndex = token.end._index;
|
||
const endBufferIndex = token.end._bufferIndex;
|
||
let view;
|
||
if (startIndex === endIndex) {
|
||
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
|
||
} else {
|
||
view = chunks.slice(startIndex, endIndex);
|
||
if (startBufferIndex > -1) {
|
||
const head = view[0];
|
||
if (typeof head === "string") {
|
||
view[0] = head.slice(startBufferIndex);
|
||
} else {
|
||
view.shift();
|
||
}
|
||
}
|
||
if (endBufferIndex > 0) {
|
||
view.push(chunks[endIndex].slice(0, endBufferIndex));
|
||
}
|
||
}
|
||
return view;
|
||
}
|
||
function serializeChunks(chunks, expandTabs) {
|
||
let index2 = -1;
|
||
const result = [];
|
||
let atTab;
|
||
while (++index2 < chunks.length) {
|
||
const chunk = chunks[index2];
|
||
let value;
|
||
if (typeof chunk === "string") {
|
||
value = chunk;
|
||
} else
|
||
switch (chunk) {
|
||
case -5: {
|
||
value = "\r";
|
||
break;
|
||
}
|
||
case -4: {
|
||
value = "\n";
|
||
break;
|
||
}
|
||
case -3: {
|
||
value = "\r\n";
|
||
break;
|
||
}
|
||
case -2: {
|
||
value = expandTabs ? " " : " ";
|
||
break;
|
||
}
|
||
case -1: {
|
||
if (!expandTabs && atTab)
|
||
continue;
|
||
value = " ";
|
||
break;
|
||
}
|
||
default: {
|
||
value = String.fromCharCode(chunk);
|
||
}
|
||
}
|
||
atTab = chunk === -2;
|
||
result.push(value);
|
||
}
|
||
return result.join("");
|
||
}
|
||
|
||
// node_modules/micromark/lib/constructs.js
|
||
var constructs_exports = {};
|
||
__export(constructs_exports, {
|
||
attentionMarkers: () => attentionMarkers,
|
||
contentInitial: () => contentInitial,
|
||
disable: () => disable,
|
||
document: () => document2,
|
||
flow: () => flow2,
|
||
flowInitial: () => flowInitial,
|
||
insideSpan: () => insideSpan,
|
||
string: () => string2,
|
||
text: () => text2
|
||
});
|
||
var document2 = {
|
||
[42]: list,
|
||
[43]: list,
|
||
[45]: list,
|
||
[48]: list,
|
||
[49]: list,
|
||
[50]: list,
|
||
[51]: list,
|
||
[52]: list,
|
||
[53]: list,
|
||
[54]: list,
|
||
[55]: list,
|
||
[56]: list,
|
||
[57]: list,
|
||
[62]: blockQuote
|
||
};
|
||
var contentInitial = {
|
||
[91]: definition
|
||
};
|
||
var flowInitial = {
|
||
[-2]: codeIndented,
|
||
[-1]: codeIndented,
|
||
[32]: codeIndented
|
||
};
|
||
var flow2 = {
|
||
[35]: headingAtx,
|
||
[42]: thematicBreak,
|
||
[45]: [setextUnderline, thematicBreak],
|
||
[60]: htmlFlow,
|
||
[61]: setextUnderline,
|
||
[95]: thematicBreak,
|
||
[96]: codeFenced,
|
||
[126]: codeFenced
|
||
};
|
||
var string2 = {
|
||
[38]: characterReference,
|
||
[92]: characterEscape
|
||
};
|
||
var text2 = {
|
||
[-5]: lineEnding,
|
||
[-4]: lineEnding,
|
||
[-3]: lineEnding,
|
||
[33]: labelStartImage,
|
||
[38]: characterReference,
|
||
[42]: attention,
|
||
[60]: [autolink, htmlText],
|
||
[91]: labelStartLink,
|
||
[92]: [hardBreakEscape, characterEscape],
|
||
[93]: labelEnd,
|
||
[95]: attention,
|
||
[96]: codeText
|
||
};
|
||
var insideSpan = {
|
||
null: [attention, resolver]
|
||
};
|
||
var attentionMarkers = {
|
||
null: [42, 95]
|
||
};
|
||
var disable = {
|
||
null: []
|
||
};
|
||
|
||
// node_modules/micromark/lib/parse.js
|
||
function parse(options) {
|
||
const settings = options || {};
|
||
const constructs2 = (
|
||
/** @type {FullNormalizedExtension} */
|
||
combineExtensions([constructs_exports, ...settings.extensions || []])
|
||
);
|
||
const parser = {
|
||
defined: [],
|
||
lazy: {},
|
||
constructs: constructs2,
|
||
content: create(content),
|
||
document: create(document),
|
||
flow: create(flow),
|
||
string: create(string),
|
||
text: create(text)
|
||
};
|
||
return parser;
|
||
function create(initial) {
|
||
return creator;
|
||
function creator(from) {
|
||
return createTokenizer(parser, initial, from);
|
||
}
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/postprocess.js
|
||
function postprocess(events) {
|
||
while (!subtokenize(events)) {
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark/lib/preprocess.js
|
||
var search = /[\0\t\n\r]/g;
|
||
function preprocess() {
|
||
let column = 1;
|
||
let buffer = "";
|
||
let start = true;
|
||
let atCarriageReturn;
|
||
return preprocessor;
|
||
function preprocessor(value, encoding, end) {
|
||
const chunks = [];
|
||
let match;
|
||
let next;
|
||
let startPosition;
|
||
let endPosition;
|
||
let code2;
|
||
value = buffer + (typeof value === "string" ? value.toString() : new TextDecoder(encoding || void 0).decode(value));
|
||
startPosition = 0;
|
||
buffer = "";
|
||
if (start) {
|
||
if (value.charCodeAt(0) === 65279) {
|
||
startPosition++;
|
||
}
|
||
start = void 0;
|
||
}
|
||
while (startPosition < value.length) {
|
||
search.lastIndex = startPosition;
|
||
match = search.exec(value);
|
||
endPosition = match && match.index !== void 0 ? match.index : value.length;
|
||
code2 = value.charCodeAt(endPosition);
|
||
if (!match) {
|
||
buffer = value.slice(startPosition);
|
||
break;
|
||
}
|
||
if (code2 === 10 && startPosition === endPosition && atCarriageReturn) {
|
||
chunks.push(-3);
|
||
atCarriageReturn = void 0;
|
||
} else {
|
||
if (atCarriageReturn) {
|
||
chunks.push(-5);
|
||
atCarriageReturn = void 0;
|
||
}
|
||
if (startPosition < endPosition) {
|
||
chunks.push(value.slice(startPosition, endPosition));
|
||
column += endPosition - startPosition;
|
||
}
|
||
switch (code2) {
|
||
case 0: {
|
||
chunks.push(65533);
|
||
column++;
|
||
break;
|
||
}
|
||
case 9: {
|
||
next = Math.ceil(column / 4) * 4;
|
||
chunks.push(-2);
|
||
while (column++ < next)
|
||
chunks.push(-1);
|
||
break;
|
||
}
|
||
case 10: {
|
||
chunks.push(-4);
|
||
column = 1;
|
||
break;
|
||
}
|
||
default: {
|
||
atCarriageReturn = true;
|
||
column = 1;
|
||
}
|
||
}
|
||
}
|
||
startPosition = endPosition + 1;
|
||
}
|
||
if (end) {
|
||
if (atCarriageReturn)
|
||
chunks.push(-5);
|
||
if (buffer)
|
||
chunks.push(buffer);
|
||
chunks.push(null);
|
||
}
|
||
return chunks;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-string/index.js
|
||
var characterEscapeOrReference = /\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;
|
||
function decodeString(value) {
|
||
return value.replace(characterEscapeOrReference, decode);
|
||
}
|
||
function decode($0, $1, $2) {
|
||
if ($1) {
|
||
return $1;
|
||
}
|
||
const head = $2.charCodeAt(0);
|
||
if (head === 35) {
|
||
const head2 = $2.charCodeAt(1);
|
||
const hex = head2 === 120 || head2 === 88;
|
||
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10);
|
||
}
|
||
return decodeNamedCharacterReference($2) || $0;
|
||
}
|
||
|
||
// node_modules/unist-util-stringify-position/lib/index.js
|
||
function stringifyPosition(value) {
|
||
if (!value || typeof value !== "object") {
|
||
return "";
|
||
}
|
||
if ("position" in value || "type" in value) {
|
||
return position(value.position);
|
||
}
|
||
if ("start" in value || "end" in value) {
|
||
return position(value);
|
||
}
|
||
if ("line" in value || "column" in value) {
|
||
return point(value);
|
||
}
|
||
return "";
|
||
}
|
||
function point(point3) {
|
||
return index(point3 && point3.line) + ":" + index(point3 && point3.column);
|
||
}
|
||
function position(pos) {
|
||
return point(pos && pos.start) + "-" + point(pos && pos.end);
|
||
}
|
||
function index(value) {
|
||
return value && typeof value === "number" ? value : 1;
|
||
}
|
||
|
||
// node_modules/mdast-util-from-markdown/lib/index.js
|
||
var own2 = {}.hasOwnProperty;
|
||
function fromMarkdown(value, encoding, options) {
|
||
if (typeof encoding !== "string") {
|
||
options = encoding;
|
||
encoding = void 0;
|
||
}
|
||
return compiler(options)(
|
||
postprocess(
|
||
parse(options).document().write(preprocess()(value, encoding, true))
|
||
)
|
||
);
|
||
}
|
||
function compiler(options) {
|
||
const config = {
|
||
transforms: [],
|
||
canContainEols: ["emphasis", "fragment", "heading", "paragraph", "strong"],
|
||
enter: {
|
||
autolink: opener(link2),
|
||
autolinkProtocol: onenterdata,
|
||
autolinkEmail: onenterdata,
|
||
atxHeading: opener(heading2),
|
||
blockQuote: opener(blockQuote2),
|
||
characterEscape: onenterdata,
|
||
characterReference: onenterdata,
|
||
codeFenced: opener(codeFlow),
|
||
codeFencedFenceInfo: buffer,
|
||
codeFencedFenceMeta: buffer,
|
||
codeIndented: opener(codeFlow, buffer),
|
||
codeText: opener(codeText2, buffer),
|
||
codeTextData: onenterdata,
|
||
data: onenterdata,
|
||
codeFlowValue: onenterdata,
|
||
definition: opener(definition3),
|
||
definitionDestinationString: buffer,
|
||
definitionLabelString: buffer,
|
||
definitionTitleString: buffer,
|
||
emphasis: opener(emphasis2),
|
||
hardBreakEscape: opener(hardBreak2),
|
||
hardBreakTrailing: opener(hardBreak2),
|
||
htmlFlow: opener(html2, buffer),
|
||
htmlFlowData: onenterdata,
|
||
htmlText: opener(html2, buffer),
|
||
htmlTextData: onenterdata,
|
||
image: opener(image2),
|
||
label: buffer,
|
||
link: opener(link2),
|
||
listItem: opener(listItem2),
|
||
listItemValue: onenterlistitemvalue,
|
||
listOrdered: opener(list4, onenterlistordered),
|
||
listUnordered: opener(list4),
|
||
paragraph: opener(paragraph2),
|
||
reference: onenterreference,
|
||
referenceString: buffer,
|
||
resourceDestinationString: buffer,
|
||
resourceTitleString: buffer,
|
||
setextHeading: opener(heading2),
|
||
strong: opener(strong2),
|
||
thematicBreak: opener(thematicBreak3)
|
||
},
|
||
exit: {
|
||
atxHeading: closer(),
|
||
atxHeadingSequence: onexitatxheadingsequence,
|
||
autolink: closer(),
|
||
autolinkEmail: onexitautolinkemail,
|
||
autolinkProtocol: onexitautolinkprotocol,
|
||
blockQuote: closer(),
|
||
characterEscapeValue: onexitdata,
|
||
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
|
||
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
|
||
characterReferenceValue: onexitcharacterreferencevalue,
|
||
codeFenced: closer(onexitcodefenced),
|
||
codeFencedFence: onexitcodefencedfence,
|
||
codeFencedFenceInfo: onexitcodefencedfenceinfo,
|
||
codeFencedFenceMeta: onexitcodefencedfencemeta,
|
||
codeFlowValue: onexitdata,
|
||
codeIndented: closer(onexitcodeindented),
|
||
codeText: closer(onexitcodetext),
|
||
codeTextData: onexitdata,
|
||
data: onexitdata,
|
||
definition: closer(),
|
||
definitionDestinationString: onexitdefinitiondestinationstring,
|
||
definitionLabelString: onexitdefinitionlabelstring,
|
||
definitionTitleString: onexitdefinitiontitlestring,
|
||
emphasis: closer(),
|
||
hardBreakEscape: closer(onexithardbreak),
|
||
hardBreakTrailing: closer(onexithardbreak),
|
||
htmlFlow: closer(onexithtmlflow),
|
||
htmlFlowData: onexitdata,
|
||
htmlText: closer(onexithtmltext),
|
||
htmlTextData: onexitdata,
|
||
image: closer(onexitimage),
|
||
label: onexitlabel,
|
||
labelText: onexitlabeltext,
|
||
lineEnding: onexitlineending,
|
||
link: closer(onexitlink),
|
||
listItem: closer(),
|
||
listOrdered: closer(),
|
||
listUnordered: closer(),
|
||
paragraph: closer(),
|
||
referenceString: onexitreferencestring,
|
||
resourceDestinationString: onexitresourcedestinationstring,
|
||
resourceTitleString: onexitresourcetitlestring,
|
||
resource: onexitresource,
|
||
setextHeading: closer(onexitsetextheading),
|
||
setextHeadingLineSequence: onexitsetextheadinglinesequence,
|
||
setextHeadingText: onexitsetextheadingtext,
|
||
strong: closer(),
|
||
thematicBreak: closer()
|
||
}
|
||
};
|
||
configure(config, (options || {}).mdastExtensions || []);
|
||
const data = {};
|
||
return compile;
|
||
function compile(events) {
|
||
let tree = {
|
||
type: "root",
|
||
children: []
|
||
};
|
||
const context = {
|
||
stack: [tree],
|
||
tokenStack: [],
|
||
config,
|
||
enter,
|
||
exit: exit2,
|
||
buffer,
|
||
resume,
|
||
data
|
||
};
|
||
const listStack = [];
|
||
let index2 = -1;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][1].type === "listOrdered" || events[index2][1].type === "listUnordered") {
|
||
if (events[index2][0] === "enter") {
|
||
listStack.push(index2);
|
||
} else {
|
||
const tail = listStack.pop();
|
||
index2 = prepareList(events, tail, index2);
|
||
}
|
||
}
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < events.length) {
|
||
const handler = config[events[index2][0]];
|
||
if (own2.call(handler, events[index2][1].type)) {
|
||
handler[events[index2][1].type].call(
|
||
Object.assign(
|
||
{
|
||
sliceSerialize: events[index2][2].sliceSerialize
|
||
},
|
||
context
|
||
),
|
||
events[index2][1]
|
||
);
|
||
}
|
||
}
|
||
if (context.tokenStack.length > 0) {
|
||
const tail = context.tokenStack[context.tokenStack.length - 1];
|
||
const handler = tail[1] || defaultOnError;
|
||
handler.call(context, void 0, tail[0]);
|
||
}
|
||
tree.position = {
|
||
start: point2(
|
||
events.length > 0 ? events[0][1].start : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
}
|
||
),
|
||
end: point2(
|
||
events.length > 0 ? events[events.length - 2][1].end : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
}
|
||
)
|
||
};
|
||
index2 = -1;
|
||
while (++index2 < config.transforms.length) {
|
||
tree = config.transforms[index2](tree) || tree;
|
||
}
|
||
return tree;
|
||
}
|
||
function prepareList(events, start, length) {
|
||
let index2 = start - 1;
|
||
let containerBalance = -1;
|
||
let listSpread = false;
|
||
let listItem3;
|
||
let lineIndex;
|
||
let firstBlankLineIndex;
|
||
let atMarker;
|
||
while (++index2 <= length) {
|
||
const event = events[index2];
|
||
switch (event[1].type) {
|
||
case "listUnordered":
|
||
case "listOrdered":
|
||
case "blockQuote": {
|
||
if (event[0] === "enter") {
|
||
containerBalance++;
|
||
} else {
|
||
containerBalance--;
|
||
}
|
||
atMarker = void 0;
|
||
break;
|
||
}
|
||
case "lineEndingBlank": {
|
||
if (event[0] === "enter") {
|
||
if (listItem3 && !atMarker && !containerBalance && !firstBlankLineIndex) {
|
||
firstBlankLineIndex = index2;
|
||
}
|
||
atMarker = void 0;
|
||
}
|
||
break;
|
||
}
|
||
case "linePrefix":
|
||
case "listItemValue":
|
||
case "listItemMarker":
|
||
case "listItemPrefix":
|
||
case "listItemPrefixWhitespace": {
|
||
break;
|
||
}
|
||
default: {
|
||
atMarker = void 0;
|
||
}
|
||
}
|
||
if (!containerBalance && event[0] === "enter" && event[1].type === "listItemPrefix" || containerBalance === -1 && event[0] === "exit" && (event[1].type === "listUnordered" || event[1].type === "listOrdered")) {
|
||
if (listItem3) {
|
||
let tailIndex = index2;
|
||
lineIndex = void 0;
|
||
while (tailIndex--) {
|
||
const tailEvent = events[tailIndex];
|
||
if (tailEvent[1].type === "lineEnding" || tailEvent[1].type === "lineEndingBlank") {
|
||
if (tailEvent[0] === "exit")
|
||
continue;
|
||
if (lineIndex) {
|
||
events[lineIndex][1].type = "lineEndingBlank";
|
||
listSpread = true;
|
||
}
|
||
tailEvent[1].type = "lineEnding";
|
||
lineIndex = tailIndex;
|
||
} else if (tailEvent[1].type === "linePrefix" || tailEvent[1].type === "blockQuotePrefix" || tailEvent[1].type === "blockQuotePrefixWhitespace" || tailEvent[1].type === "blockQuoteMarker" || tailEvent[1].type === "listItemIndent") {
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (firstBlankLineIndex && (!lineIndex || firstBlankLineIndex < lineIndex)) {
|
||
listItem3._spread = true;
|
||
}
|
||
listItem3.end = Object.assign(
|
||
{},
|
||
lineIndex ? events[lineIndex][1].start : event[1].end
|
||
);
|
||
events.splice(lineIndex || index2, 0, ["exit", listItem3, event[2]]);
|
||
index2++;
|
||
length++;
|
||
}
|
||
if (event[1].type === "listItemPrefix") {
|
||
const item = {
|
||
type: "listItem",
|
||
_spread: false,
|
||
start: Object.assign({}, event[1].start),
|
||
// @ts-expect-error: we’ll add `end` in a second.
|
||
end: void 0
|
||
};
|
||
listItem3 = item;
|
||
events.splice(index2, 0, ["enter", item, event[2]]);
|
||
index2++;
|
||
length++;
|
||
firstBlankLineIndex = void 0;
|
||
atMarker = true;
|
||
}
|
||
}
|
||
}
|
||
events[start][1]._spread = listSpread;
|
||
return length;
|
||
}
|
||
function opener(create, and) {
|
||
return open;
|
||
function open(token) {
|
||
enter.call(this, create(token), token);
|
||
if (and)
|
||
and.call(this, token);
|
||
}
|
||
}
|
||
function buffer() {
|
||
this.stack.push({
|
||
type: "fragment",
|
||
children: []
|
||
});
|
||
}
|
||
function enter(node2, token, errorHandler) {
|
||
const parent = this.stack[this.stack.length - 1];
|
||
const siblings = parent.children;
|
||
siblings.push(node2);
|
||
this.stack.push(node2);
|
||
this.tokenStack.push([token, errorHandler]);
|
||
node2.position = {
|
||
start: point2(token.start),
|
||
// @ts-expect-error: `end` will be patched later.
|
||
end: void 0
|
||
};
|
||
}
|
||
function closer(and) {
|
||
return close;
|
||
function close(token) {
|
||
if (and)
|
||
and.call(this, token);
|
||
exit2.call(this, token);
|
||
}
|
||
}
|
||
function exit2(token, onExitError) {
|
||
const node2 = this.stack.pop();
|
||
const open = this.tokenStack.pop();
|
||
if (!open) {
|
||
throw new Error(
|
||
"Cannot close `" + token.type + "` (" + stringifyPosition({
|
||
start: token.start,
|
||
end: token.end
|
||
}) + "): it\u2019s not open"
|
||
);
|
||
} else if (open[0].type !== token.type) {
|
||
if (onExitError) {
|
||
onExitError.call(this, token, open[0]);
|
||
} else {
|
||
const handler = open[1] || defaultOnError;
|
||
handler.call(this, token, open[0]);
|
||
}
|
||
}
|
||
node2.position.end = point2(token.end);
|
||
}
|
||
function resume() {
|
||
return toString(this.stack.pop());
|
||
}
|
||
function onenterlistordered() {
|
||
this.data.expectingFirstListItemValue = true;
|
||
}
|
||
function onenterlistitemvalue(token) {
|
||
if (this.data.expectingFirstListItemValue) {
|
||
const ancestor = this.stack[this.stack.length - 2];
|
||
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10);
|
||
this.data.expectingFirstListItemValue = void 0;
|
||
}
|
||
}
|
||
function onexitcodefencedfenceinfo() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.lang = data2;
|
||
}
|
||
function onexitcodefencedfencemeta() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.meta = data2;
|
||
}
|
||
function onexitcodefencedfence() {
|
||
if (this.data.flowCodeInside)
|
||
return;
|
||
this.buffer();
|
||
this.data.flowCodeInside = true;
|
||
}
|
||
function onexitcodefenced() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "");
|
||
this.data.flowCodeInside = void 0;
|
||
}
|
||
function onexitcodeindented() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2.replace(/(\r?\n|\r)$/g, "");
|
||
}
|
||
function onexitdefinitionlabelstring(token) {
|
||
const label = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.label = label;
|
||
node2.identifier = normalizeIdentifier(
|
||
this.sliceSerialize(token)
|
||
).toLowerCase();
|
||
}
|
||
function onexitdefinitiontitlestring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.title = data2;
|
||
}
|
||
function onexitdefinitiondestinationstring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = data2;
|
||
}
|
||
function onexitatxheadingsequence(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (!node2.depth) {
|
||
const depth = this.sliceSerialize(token).length;
|
||
node2.depth = depth;
|
||
}
|
||
}
|
||
function onexitsetextheadingtext() {
|
||
this.data.setextHeadingSlurpLineEnding = true;
|
||
}
|
||
function onexitsetextheadinglinesequence(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.depth = this.sliceSerialize(token).codePointAt(0) === 61 ? 1 : 2;
|
||
}
|
||
function onexitsetextheading() {
|
||
this.data.setextHeadingSlurpLineEnding = void 0;
|
||
}
|
||
function onenterdata(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
const siblings = node2.children;
|
||
let tail = siblings[siblings.length - 1];
|
||
if (!tail || tail.type !== "text") {
|
||
tail = text4();
|
||
tail.position = {
|
||
start: point2(token.start),
|
||
// @ts-expect-error: we’ll add `end` later.
|
||
end: void 0
|
||
};
|
||
siblings.push(tail);
|
||
}
|
||
this.stack.push(tail);
|
||
}
|
||
function onexitdata(token) {
|
||
const tail = this.stack.pop();
|
||
tail.value += this.sliceSerialize(token);
|
||
tail.position.end = point2(token.end);
|
||
}
|
||
function onexitlineending(token) {
|
||
const context = this.stack[this.stack.length - 1];
|
||
if (this.data.atHardBreak) {
|
||
const tail = context.children[context.children.length - 1];
|
||
tail.position.end = point2(token.end);
|
||
this.data.atHardBreak = void 0;
|
||
return;
|
||
}
|
||
if (!this.data.setextHeadingSlurpLineEnding && config.canContainEols.includes(context.type)) {
|
||
onenterdata.call(this, token);
|
||
onexitdata.call(this, token);
|
||
}
|
||
}
|
||
function onexithardbreak() {
|
||
this.data.atHardBreak = true;
|
||
}
|
||
function onexithtmlflow() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexithtmltext() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexitcodetext() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexitlink() {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (this.data.inReference) {
|
||
const referenceType = this.data.referenceType || "shortcut";
|
||
node2.type += "Reference";
|
||
node2.referenceType = referenceType;
|
||
delete node2.url;
|
||
delete node2.title;
|
||
} else {
|
||
delete node2.identifier;
|
||
delete node2.label;
|
||
}
|
||
this.data.referenceType = void 0;
|
||
}
|
||
function onexitimage() {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (this.data.inReference) {
|
||
const referenceType = this.data.referenceType || "shortcut";
|
||
node2.type += "Reference";
|
||
node2.referenceType = referenceType;
|
||
delete node2.url;
|
||
delete node2.title;
|
||
} else {
|
||
delete node2.identifier;
|
||
delete node2.label;
|
||
}
|
||
this.data.referenceType = void 0;
|
||
}
|
||
function onexitlabeltext(token) {
|
||
const string3 = this.sliceSerialize(token);
|
||
const ancestor = this.stack[this.stack.length - 2];
|
||
ancestor.label = decodeString(string3);
|
||
ancestor.identifier = normalizeIdentifier(string3).toLowerCase();
|
||
}
|
||
function onexitlabel() {
|
||
const fragment = this.stack[this.stack.length - 1];
|
||
const value = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
this.data.inReference = true;
|
||
if (node2.type === "link") {
|
||
const children = fragment.children;
|
||
node2.children = children;
|
||
} else {
|
||
node2.alt = value;
|
||
}
|
||
}
|
||
function onexitresourcedestinationstring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = data2;
|
||
}
|
||
function onexitresourcetitlestring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.title = data2;
|
||
}
|
||
function onexitresource() {
|
||
this.data.inReference = void 0;
|
||
}
|
||
function onenterreference() {
|
||
this.data.referenceType = "collapsed";
|
||
}
|
||
function onexitreferencestring(token) {
|
||
const label = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.label = label;
|
||
node2.identifier = normalizeIdentifier(
|
||
this.sliceSerialize(token)
|
||
).toLowerCase();
|
||
this.data.referenceType = "full";
|
||
}
|
||
function onexitcharacterreferencemarker(token) {
|
||
this.data.characterReferenceType = token.type;
|
||
}
|
||
function onexitcharacterreferencevalue(token) {
|
||
const data2 = this.sliceSerialize(token);
|
||
const type = this.data.characterReferenceType;
|
||
let value;
|
||
if (type) {
|
||
value = decodeNumericCharacterReference(
|
||
data2,
|
||
type === "characterReferenceMarkerNumeric" ? 10 : 16
|
||
);
|
||
this.data.characterReferenceType = void 0;
|
||
} else {
|
||
const result = decodeNamedCharacterReference(data2);
|
||
value = result;
|
||
}
|
||
const tail = this.stack.pop();
|
||
tail.value += value;
|
||
tail.position.end = point2(token.end);
|
||
}
|
||
function onexitautolinkprotocol(token) {
|
||
onexitdata.call(this, token);
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = this.sliceSerialize(token);
|
||
}
|
||
function onexitautolinkemail(token) {
|
||
onexitdata.call(this, token);
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = "mailto:" + this.sliceSerialize(token);
|
||
}
|
||
function blockQuote2() {
|
||
return {
|
||
type: "blockquote",
|
||
children: []
|
||
};
|
||
}
|
||
function codeFlow() {
|
||
return {
|
||
type: "code",
|
||
lang: null,
|
||
meta: null,
|
||
value: ""
|
||
};
|
||
}
|
||
function codeText2() {
|
||
return {
|
||
type: "inlineCode",
|
||
value: ""
|
||
};
|
||
}
|
||
function definition3() {
|
||
return {
|
||
type: "definition",
|
||
identifier: "",
|
||
label: null,
|
||
title: null,
|
||
url: ""
|
||
};
|
||
}
|
||
function emphasis2() {
|
||
return {
|
||
type: "emphasis",
|
||
children: []
|
||
};
|
||
}
|
||
function heading2() {
|
||
return {
|
||
type: "heading",
|
||
// @ts-expect-error `depth` will be set later.
|
||
depth: 0,
|
||
children: []
|
||
};
|
||
}
|
||
function hardBreak2() {
|
||
return {
|
||
type: "break"
|
||
};
|
||
}
|
||
function html2() {
|
||
return {
|
||
type: "html",
|
||
value: ""
|
||
};
|
||
}
|
||
function image2() {
|
||
return {
|
||
type: "image",
|
||
title: null,
|
||
url: "",
|
||
alt: null
|
||
};
|
||
}
|
||
function link2() {
|
||
return {
|
||
type: "link",
|
||
title: null,
|
||
url: "",
|
||
children: []
|
||
};
|
||
}
|
||
function list4(token) {
|
||
return {
|
||
type: "list",
|
||
ordered: token.type === "listOrdered",
|
||
start: null,
|
||
spread: token._spread,
|
||
children: []
|
||
};
|
||
}
|
||
function listItem2(token) {
|
||
return {
|
||
type: "listItem",
|
||
spread: token._spread,
|
||
checked: null,
|
||
children: []
|
||
};
|
||
}
|
||
function paragraph2() {
|
||
return {
|
||
type: "paragraph",
|
||
children: []
|
||
};
|
||
}
|
||
function strong2() {
|
||
return {
|
||
type: "strong",
|
||
children: []
|
||
};
|
||
}
|
||
function text4() {
|
||
return {
|
||
type: "text",
|
||
value: ""
|
||
};
|
||
}
|
||
function thematicBreak3() {
|
||
return {
|
||
type: "thematicBreak"
|
||
};
|
||
}
|
||
}
|
||
function point2(d) {
|
||
return {
|
||
line: d.line,
|
||
column: d.column,
|
||
offset: d.offset
|
||
};
|
||
}
|
||
function configure(combined, extensions) {
|
||
let index2 = -1;
|
||
while (++index2 < extensions.length) {
|
||
const value = extensions[index2];
|
||
if (Array.isArray(value)) {
|
||
configure(combined, value);
|
||
} else {
|
||
extension(combined, value);
|
||
}
|
||
}
|
||
}
|
||
function extension(combined, extension2) {
|
||
let key;
|
||
for (key in extension2) {
|
||
if (own2.call(extension2, key)) {
|
||
switch (key) {
|
||
case "canContainEols": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
combined[key].push(...right);
|
||
}
|
||
break;
|
||
}
|
||
case "transforms": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
combined[key].push(...right);
|
||
}
|
||
break;
|
||
}
|
||
case "enter":
|
||
case "exit": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
Object.assign(combined[key], right);
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function defaultOnError(left, right) {
|
||
if (left) {
|
||
throw new Error(
|
||
"Cannot close `" + left.type + "` (" + stringifyPosition({
|
||
start: left.start,
|
||
end: left.end
|
||
}) + "): a different token (`" + right.type + "`, " + stringifyPosition({
|
||
start: right.start,
|
||
end: right.end
|
||
}) + ") is open"
|
||
);
|
||
} else {
|
||
throw new Error(
|
||
"Cannot close document, a token (`" + right.type + "`, " + stringifyPosition({
|
||
start: right.start,
|
||
end: right.end
|
||
}) + ") is still open"
|
||
);
|
||
}
|
||
}
|
||
|
||
// node_modules/remark-parse/lib/index.js
|
||
function remarkParse(options) {
|
||
const self = this;
|
||
self.parser = parser;
|
||
function parser(doc) {
|
||
return fromMarkdown(doc, {
|
||
...self.data("settings"),
|
||
...options,
|
||
// Note: these options are not in the readme.
|
||
// The goal is for them to be set by plugins on `data` instead of being
|
||
// passed by users.
|
||
extensions: self.data("micromarkExtensions") || [],
|
||
mdastExtensions: self.data("fromMarkdownExtensions") || []
|
||
});
|
||
}
|
||
}
|
||
|
||
// node_modules/zwitch/index.js
|
||
var own3 = {}.hasOwnProperty;
|
||
function zwitch(key, options) {
|
||
const settings = options || {};
|
||
function one2(value, ...parameters) {
|
||
let fn = one2.invalid;
|
||
const handlers = one2.handlers;
|
||
if (value && own3.call(value, key)) {
|
||
const id = String(value[key]);
|
||
fn = own3.call(handlers, id) ? handlers[id] : one2.unknown;
|
||
}
|
||
if (fn) {
|
||
return fn.call(this, value, ...parameters);
|
||
}
|
||
}
|
||
one2.handlers = settings.handlers || {};
|
||
one2.invalid = settings.invalid;
|
||
one2.unknown = settings.unknown;
|
||
return one2;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/configure.js
|
||
var own4 = {}.hasOwnProperty;
|
||
function configure2(base, extension2) {
|
||
let index2 = -1;
|
||
let key;
|
||
if (extension2.extensions) {
|
||
while (++index2 < extension2.extensions.length) {
|
||
configure2(base, extension2.extensions[index2]);
|
||
}
|
||
}
|
||
for (key in extension2) {
|
||
if (own4.call(extension2, key)) {
|
||
switch (key) {
|
||
case "extensions": {
|
||
break;
|
||
}
|
||
case "unsafe": {
|
||
list2(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
case "join": {
|
||
list2(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
case "handlers": {
|
||
map(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
default: {
|
||
base.options[key] = extension2[key];
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return base;
|
||
}
|
||
function list2(left, right) {
|
||
if (right) {
|
||
left.push(...right);
|
||
}
|
||
}
|
||
function map(left, right) {
|
||
if (right) {
|
||
Object.assign(left, right);
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/blockquote.js
|
||
function blockquote(node2, _, state, info) {
|
||
const exit2 = state.enter("blockquote");
|
||
const tracker = state.createTracker(info);
|
||
tracker.move("> ");
|
||
tracker.shift(2);
|
||
const value = state.indentLines(
|
||
state.containerFlow(node2, tracker.current()),
|
||
map2
|
||
);
|
||
exit2();
|
||
return value;
|
||
}
|
||
function map2(line, _, blank) {
|
||
return ">" + (blank ? "" : " ") + line;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js
|
||
function patternInScope(stack, pattern) {
|
||
return listInScope(stack, pattern.inConstruct, true) && !listInScope(stack, pattern.notInConstruct, false);
|
||
}
|
||
function listInScope(stack, list4, none) {
|
||
if (typeof list4 === "string") {
|
||
list4 = [list4];
|
||
}
|
||
if (!list4 || list4.length === 0) {
|
||
return none;
|
||
}
|
||
let index2 = -1;
|
||
while (++index2 < list4.length) {
|
||
if (stack.includes(list4[index2])) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/break.js
|
||
function hardBreak(_, _1, state, info) {
|
||
let index2 = -1;
|
||
while (++index2 < state.unsafe.length) {
|
||
if (state.unsafe[index2].character === "\n" && patternInScope(state.stack, state.unsafe[index2])) {
|
||
return /[ \t]/.test(info.before) ? "" : " ";
|
||
}
|
||
}
|
||
return "\\\n";
|
||
}
|
||
|
||
// node_modules/longest-streak/index.js
|
||
function longestStreak(value, substring) {
|
||
const source = String(value);
|
||
let index2 = source.indexOf(substring);
|
||
let expected = index2;
|
||
let count = 0;
|
||
let max = 0;
|
||
if (typeof substring !== "string") {
|
||
throw new TypeError("Expected substring");
|
||
}
|
||
while (index2 !== -1) {
|
||
if (index2 === expected) {
|
||
if (++count > max) {
|
||
max = count;
|
||
}
|
||
} else {
|
||
count = 1;
|
||
}
|
||
expected = index2 + substring.length;
|
||
index2 = source.indexOf(substring, expected);
|
||
}
|
||
return max;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-code-as-indented.js
|
||
function formatCodeAsIndented(node2, state) {
|
||
return Boolean(
|
||
state.options.fences === false && node2.value && // If there’s no info…
|
||
!node2.lang && // And there’s a non-whitespace character…
|
||
/[^ \r\n]/.test(node2.value) && // And the value doesn’t start or end in a blank…
|
||
!/^[\t ]*(?:[\r\n]|$)|(?:^|[\r\n])[\t ]*$/.test(node2.value)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-fence.js
|
||
function checkFence(state) {
|
||
const marker = state.options.fence || "`";
|
||
if (marker !== "`" && marker !== "~") {
|
||
throw new Error(
|
||
"Cannot serialize code with `" + marker + "` for `options.fence`, expected `` ` `` or `~`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/code.js
|
||
function code(node2, _, state, info) {
|
||
const marker = checkFence(state);
|
||
const raw = node2.value || "";
|
||
const suffix = marker === "`" ? "GraveAccent" : "Tilde";
|
||
if (formatCodeAsIndented(node2, state)) {
|
||
const exit3 = state.enter("codeIndented");
|
||
const value2 = state.indentLines(raw, map3);
|
||
exit3();
|
||
return value2;
|
||
}
|
||
const tracker = state.createTracker(info);
|
||
const sequence = marker.repeat(Math.max(longestStreak(raw, marker) + 1, 3));
|
||
const exit2 = state.enter("codeFenced");
|
||
let value = tracker.move(sequence);
|
||
if (node2.lang) {
|
||
const subexit = state.enter(`codeFencedLang${suffix}`);
|
||
value += tracker.move(
|
||
state.safe(node2.lang, {
|
||
before: value,
|
||
after: " ",
|
||
encode: ["`"],
|
||
...tracker.current()
|
||
})
|
||
);
|
||
subexit();
|
||
}
|
||
if (node2.lang && node2.meta) {
|
||
const subexit = state.enter(`codeFencedMeta${suffix}`);
|
||
value += tracker.move(" ");
|
||
value += tracker.move(
|
||
state.safe(node2.meta, {
|
||
before: value,
|
||
after: "\n",
|
||
encode: ["`"],
|
||
...tracker.current()
|
||
})
|
||
);
|
||
subexit();
|
||
}
|
||
value += tracker.move("\n");
|
||
if (raw) {
|
||
value += tracker.move(raw + "\n");
|
||
}
|
||
value += tracker.move(sequence);
|
||
exit2();
|
||
return value;
|
||
}
|
||
function map3(line, _, blank) {
|
||
return (blank ? "" : " ") + line;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-quote.js
|
||
function checkQuote(state) {
|
||
const marker = state.options.quote || '"';
|
||
if (marker !== '"' && marker !== "'") {
|
||
throw new Error(
|
||
"Cannot serialize title with `" + marker + "` for `options.quote`, expected `\"`, or `'`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/definition.js
|
||
function definition2(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const exit2 = state.enter("definition");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("[");
|
||
value += tracker.move(
|
||
state.safe(state.associationId(node2), {
|
||
before: value,
|
||
after: "]",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value += tracker.move("]: ");
|
||
subexit();
|
||
if (
|
||
// If there’s no url, or…
|
||
!node2.url || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value += tracker.move("<");
|
||
value += tracker.move(
|
||
state.safe(node2.url, { before: value, after: ">", ...tracker.current() })
|
||
);
|
||
value += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value += tracker.move(
|
||
state.safe(node2.url, {
|
||
before: value,
|
||
after: node2.title ? " " : "\n",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value += tracker.move(" " + quote);
|
||
value += tracker.move(
|
||
state.safe(node2.title, {
|
||
before: value,
|
||
after: quote,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-emphasis.js
|
||
function checkEmphasis(state) {
|
||
const marker = state.options.emphasis || "*";
|
||
if (marker !== "*" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize emphasis with `" + marker + "` for `options.emphasis`, expected `*`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/emphasis.js
|
||
emphasis.peek = emphasisPeek;
|
||
function emphasis(node2, _, state, info) {
|
||
const marker = checkEmphasis(state);
|
||
const exit2 = state.enter("emphasis");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move(marker);
|
||
value += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value,
|
||
after: marker,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value += tracker.move(marker);
|
||
exit2();
|
||
return value;
|
||
}
|
||
function emphasisPeek(_, _1, state) {
|
||
return state.options.emphasis || "*";
|
||
}
|
||
|
||
// node_modules/unist-util-is/lib/index.js
|
||
var convert = (
|
||
// Note: overloads in JSDoc can’t yet use different `@template`s.
|
||
/**
|
||
* @type {(
|
||
* (<Condition extends string>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & {type: Condition}) &
|
||
* (<Condition extends Props>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Condition) &
|
||
* (<Condition extends TestFunction>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Predicate<Condition, Node>) &
|
||
* ((test?: null | undefined) => (node?: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node) &
|
||
* ((test?: Test) => Check)
|
||
* )}
|
||
*/
|
||
/**
|
||
* @param {Test} [test]
|
||
* @returns {Check}
|
||
*/
|
||
function(test) {
|
||
if (test === null || test === void 0) {
|
||
return ok;
|
||
}
|
||
if (typeof test === "function") {
|
||
return castFactory(test);
|
||
}
|
||
if (typeof test === "object") {
|
||
return Array.isArray(test) ? anyFactory(test) : propsFactory(test);
|
||
}
|
||
if (typeof test === "string") {
|
||
return typeFactory(test);
|
||
}
|
||
throw new Error("Expected function, string, or object as test");
|
||
}
|
||
);
|
||
function anyFactory(tests) {
|
||
const checks = [];
|
||
let index2 = -1;
|
||
while (++index2 < tests.length) {
|
||
checks[index2] = convert(tests[index2]);
|
||
}
|
||
return castFactory(any);
|
||
function any(...parameters) {
|
||
let index3 = -1;
|
||
while (++index3 < checks.length) {
|
||
if (checks[index3].apply(this, parameters))
|
||
return true;
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
function propsFactory(check) {
|
||
const checkAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
check
|
||
);
|
||
return castFactory(all2);
|
||
function all2(node2) {
|
||
const nodeAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
/** @type {unknown} */
|
||
node2
|
||
);
|
||
let key;
|
||
for (key in check) {
|
||
if (nodeAsRecord[key] !== checkAsRecord[key])
|
||
return false;
|
||
}
|
||
return true;
|
||
}
|
||
}
|
||
function typeFactory(check) {
|
||
return castFactory(type);
|
||
function type(node2) {
|
||
return node2 && node2.type === check;
|
||
}
|
||
}
|
||
function castFactory(testFunction) {
|
||
return check;
|
||
function check(value, index2, parent) {
|
||
return Boolean(
|
||
looksLikeANode(value) && testFunction.call(
|
||
this,
|
||
value,
|
||
typeof index2 === "number" ? index2 : void 0,
|
||
parent || void 0
|
||
)
|
||
);
|
||
}
|
||
}
|
||
function ok() {
|
||
return true;
|
||
}
|
||
function looksLikeANode(value) {
|
||
return value !== null && typeof value === "object" && "type" in value;
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/color.node.js
|
||
function color(d) {
|
||
return "\x1B[33m" + d + "\x1B[39m";
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/index.js
|
||
var empty = [];
|
||
var CONTINUE = true;
|
||
var EXIT = false;
|
||
var SKIP = "skip";
|
||
function visitParents(tree, test, visitor, reverse) {
|
||
let check;
|
||
if (typeof test === "function" && typeof visitor !== "function") {
|
||
reverse = visitor;
|
||
visitor = test;
|
||
} else {
|
||
check = test;
|
||
}
|
||
const is2 = convert(check);
|
||
const step = reverse ? -1 : 1;
|
||
factory(tree, void 0, [])();
|
||
function factory(node2, index2, parents) {
|
||
const value = (
|
||
/** @type {Record<string, unknown>} */
|
||
node2 && typeof node2 === "object" ? node2 : {}
|
||
);
|
||
if (typeof value.type === "string") {
|
||
const name = (
|
||
// `hast`
|
||
typeof value.tagName === "string" ? value.tagName : (
|
||
// `xast`
|
||
typeof value.name === "string" ? value.name : void 0
|
||
)
|
||
);
|
||
Object.defineProperty(visit2, "name", {
|
||
value: "node (" + color(node2.type + (name ? "<" + name + ">" : "")) + ")"
|
||
});
|
||
}
|
||
return visit2;
|
||
function visit2() {
|
||
let result = empty;
|
||
let subresult;
|
||
let offset;
|
||
let grandparents;
|
||
if (!test || is2(node2, index2, parents[parents.length - 1] || void 0)) {
|
||
result = toResult(visitor(node2, parents));
|
||
if (result[0] === EXIT) {
|
||
return result;
|
||
}
|
||
}
|
||
if ("children" in node2 && node2.children) {
|
||
const nodeAsParent = (
|
||
/** @type {UnistParent} */
|
||
node2
|
||
);
|
||
if (nodeAsParent.children && result[0] !== SKIP) {
|
||
offset = (reverse ? nodeAsParent.children.length : -1) + step;
|
||
grandparents = parents.concat(nodeAsParent);
|
||
while (offset > -1 && offset < nodeAsParent.children.length) {
|
||
const child = nodeAsParent.children[offset];
|
||
subresult = factory(child, offset, grandparents)();
|
||
if (subresult[0] === EXIT) {
|
||
return subresult;
|
||
}
|
||
offset = typeof subresult[1] === "number" ? subresult[1] : offset + step;
|
||
}
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
}
|
||
}
|
||
function toResult(value) {
|
||
if (Array.isArray(value)) {
|
||
return value;
|
||
}
|
||
if (typeof value === "number") {
|
||
return [CONTINUE, value];
|
||
}
|
||
return value === null || value === void 0 ? empty : [value];
|
||
}
|
||
|
||
// node_modules/unist-util-visit/lib/index.js
|
||
function visit(tree, testOrVisitor, visitorOrReverse, maybeReverse) {
|
||
let reverse;
|
||
let test;
|
||
let visitor;
|
||
if (typeof testOrVisitor === "function" && typeof visitorOrReverse !== "function") {
|
||
test = void 0;
|
||
visitor = testOrVisitor;
|
||
reverse = visitorOrReverse;
|
||
} else {
|
||
test = testOrVisitor;
|
||
visitor = visitorOrReverse;
|
||
reverse = maybeReverse;
|
||
}
|
||
visitParents(tree, test, overload, reverse);
|
||
function overload(node2, parents) {
|
||
const parent = parents[parents.length - 1];
|
||
const index2 = parent ? parent.children.indexOf(node2) : void 0;
|
||
return visitor(node2, index2, parent);
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-heading-as-setext.js
|
||
function formatHeadingAsSetext(node2, state) {
|
||
let literalWithBreak = false;
|
||
visit(node2, function(node3) {
|
||
if ("value" in node3 && /\r?\n|\r/.test(node3.value) || node3.type === "break") {
|
||
literalWithBreak = true;
|
||
return EXIT;
|
||
}
|
||
});
|
||
return Boolean(
|
||
(!node2.depth || node2.depth < 3) && toString(node2) && (state.options.setext || literalWithBreak)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/heading.js
|
||
function heading(node2, _, state, info) {
|
||
const rank = Math.max(Math.min(6, node2.depth || 1), 1);
|
||
const tracker = state.createTracker(info);
|
||
if (formatHeadingAsSetext(node2, state)) {
|
||
const exit3 = state.enter("headingSetext");
|
||
const subexit2 = state.enter("phrasing");
|
||
const value2 = state.containerPhrasing(node2, {
|
||
...tracker.current(),
|
||
before: "\n",
|
||
after: "\n"
|
||
});
|
||
subexit2();
|
||
exit3();
|
||
return value2 + "\n" + (rank === 1 ? "=" : "-").repeat(
|
||
// The whole size…
|
||
value2.length - // Minus the position of the character after the last EOL (or
|
||
// 0 if there is none)…
|
||
(Math.max(value2.lastIndexOf("\r"), value2.lastIndexOf("\n")) + 1)
|
||
);
|
||
}
|
||
const sequence = "#".repeat(rank);
|
||
const exit2 = state.enter("headingAtx");
|
||
const subexit = state.enter("phrasing");
|
||
tracker.move(sequence + " ");
|
||
let value = state.containerPhrasing(node2, {
|
||
before: "# ",
|
||
after: "\n",
|
||
...tracker.current()
|
||
});
|
||
if (/^[\t ]/.test(value)) {
|
||
value = "&#x" + value.charCodeAt(0).toString(16).toUpperCase() + ";" + value.slice(1);
|
||
}
|
||
value = value ? sequence + " " + value : sequence;
|
||
if (state.options.closeAtx) {
|
||
value += " " + sequence;
|
||
}
|
||
subexit();
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/html.js
|
||
html.peek = htmlPeek;
|
||
function html(node2) {
|
||
return node2.value || "";
|
||
}
|
||
function htmlPeek() {
|
||
return "<";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/image.js
|
||
image.peek = imagePeek;
|
||
function image(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const exit2 = state.enter("image");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("![");
|
||
value += tracker.move(
|
||
state.safe(node2.alt, { before: value, after: "]", ...tracker.current() })
|
||
);
|
||
value += tracker.move("](");
|
||
subexit();
|
||
if (
|
||
// If there’s no url but there is a title…
|
||
!node2.url && node2.title || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value += tracker.move("<");
|
||
value += tracker.move(
|
||
state.safe(node2.url, { before: value, after: ">", ...tracker.current() })
|
||
);
|
||
value += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value += tracker.move(
|
||
state.safe(node2.url, {
|
||
before: value,
|
||
after: node2.title ? " " : ")",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value += tracker.move(" " + quote);
|
||
value += tracker.move(
|
||
state.safe(node2.title, {
|
||
before: value,
|
||
after: quote,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
value += tracker.move(")");
|
||
exit2();
|
||
return value;
|
||
}
|
||
function imagePeek() {
|
||
return "!";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/image-reference.js
|
||
imageReference.peek = imageReferencePeek;
|
||
function imageReference(node2, _, state, info) {
|
||
const type = node2.referenceType;
|
||
const exit2 = state.enter("imageReference");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("![");
|
||
const alt = state.safe(node2.alt, {
|
||
before: value,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
value += tracker.move(alt + "][");
|
||
subexit();
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
subexit = state.enter("reference");
|
||
const reference = state.safe(state.associationId(node2), {
|
||
before: value,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
subexit();
|
||
state.stack = stack;
|
||
exit2();
|
||
if (type === "full" || !alt || alt !== reference) {
|
||
value += tracker.move(reference + "]");
|
||
} else if (type === "shortcut") {
|
||
value = value.slice(0, -1);
|
||
} else {
|
||
value += tracker.move("]");
|
||
}
|
||
return value;
|
||
}
|
||
function imageReferencePeek() {
|
||
return "!";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/inline-code.js
|
||
inlineCode.peek = inlineCodePeek;
|
||
function inlineCode(node2, _, state) {
|
||
let value = node2.value || "";
|
||
let sequence = "`";
|
||
let index2 = -1;
|
||
while (new RegExp("(^|[^`])" + sequence + "([^`]|$)").test(value)) {
|
||
sequence += "`";
|
||
}
|
||
if (/[^ \r\n]/.test(value) && (/^[ \r\n]/.test(value) && /[ \r\n]$/.test(value) || /^`|`$/.test(value))) {
|
||
value = " " + value + " ";
|
||
}
|
||
while (++index2 < state.unsafe.length) {
|
||
const pattern = state.unsafe[index2];
|
||
const expression = state.compilePattern(pattern);
|
||
let match;
|
||
if (!pattern.atBreak)
|
||
continue;
|
||
while (match = expression.exec(value)) {
|
||
let position2 = match.index;
|
||
if (value.charCodeAt(position2) === 10 && value.charCodeAt(position2 - 1) === 13) {
|
||
position2--;
|
||
}
|
||
value = value.slice(0, position2) + " " + value.slice(match.index + 1);
|
||
}
|
||
}
|
||
return sequence + value + sequence;
|
||
}
|
||
function inlineCodePeek() {
|
||
return "`";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-link-as-autolink.js
|
||
function formatLinkAsAutolink(node2, state) {
|
||
const raw = toString(node2);
|
||
return Boolean(
|
||
!state.options.resourceLink && // If there’s a url…
|
||
node2.url && // And there’s a no title…
|
||
!node2.title && // And the content of `node` is a single text node…
|
||
node2.children && node2.children.length === 1 && node2.children[0].type === "text" && // And if the url is the same as the content…
|
||
(raw === node2.url || "mailto:" + raw === node2.url) && // And that starts w/ a protocol…
|
||
/^[a-z][a-z+.-]+:/i.test(node2.url) && // And that doesn’t contain ASCII control codes (character escapes and
|
||
// references don’t work), space, or angle brackets…
|
||
!/[\0- <>\u007F]/.test(node2.url)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/link.js
|
||
link.peek = linkPeek;
|
||
function link(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const tracker = state.createTracker(info);
|
||
let exit2;
|
||
let subexit;
|
||
if (formatLinkAsAutolink(node2, state)) {
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
exit2 = state.enter("autolink");
|
||
let value2 = tracker.move("<");
|
||
value2 += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value2,
|
||
after: ">",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move(">");
|
||
exit2();
|
||
state.stack = stack;
|
||
return value2;
|
||
}
|
||
exit2 = state.enter("link");
|
||
subexit = state.enter("label");
|
||
let value = tracker.move("[");
|
||
value += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value,
|
||
after: "](",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value += tracker.move("](");
|
||
subexit();
|
||
if (
|
||
// If there’s no url but there is a title…
|
||
!node2.url && node2.title || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value += tracker.move("<");
|
||
value += tracker.move(
|
||
state.safe(node2.url, { before: value, after: ">", ...tracker.current() })
|
||
);
|
||
value += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value += tracker.move(
|
||
state.safe(node2.url, {
|
||
before: value,
|
||
after: node2.title ? " " : ")",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value += tracker.move(" " + quote);
|
||
value += tracker.move(
|
||
state.safe(node2.title, {
|
||
before: value,
|
||
after: quote,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
value += tracker.move(")");
|
||
exit2();
|
||
return value;
|
||
}
|
||
function linkPeek(node2, _, state) {
|
||
return formatLinkAsAutolink(node2, state) ? "<" : "[";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/link-reference.js
|
||
linkReference.peek = linkReferencePeek;
|
||
function linkReference(node2, _, state, info) {
|
||
const type = node2.referenceType;
|
||
const exit2 = state.enter("linkReference");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("[");
|
||
const text4 = state.containerPhrasing(node2, {
|
||
before: value,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
value += tracker.move(text4 + "][");
|
||
subexit();
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
subexit = state.enter("reference");
|
||
const reference = state.safe(state.associationId(node2), {
|
||
before: value,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
subexit();
|
||
state.stack = stack;
|
||
exit2();
|
||
if (type === "full" || !text4 || text4 !== reference) {
|
||
value += tracker.move(reference + "]");
|
||
} else if (type === "shortcut") {
|
||
value = value.slice(0, -1);
|
||
} else {
|
||
value += tracker.move("]");
|
||
}
|
||
return value;
|
||
}
|
||
function linkReferencePeek() {
|
||
return "[";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet.js
|
||
function checkBullet(state) {
|
||
const marker = state.options.bullet || "*";
|
||
if (marker !== "*" && marker !== "+" && marker !== "-") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + marker + "` for `options.bullet`, expected `*`, `+`, or `-`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet-other.js
|
||
function checkBulletOther(state) {
|
||
const bullet = checkBullet(state);
|
||
const bulletOther = state.options.bulletOther;
|
||
if (!bulletOther) {
|
||
return bullet === "*" ? "-" : "*";
|
||
}
|
||
if (bulletOther !== "*" && bulletOther !== "+" && bulletOther !== "-") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + bulletOther + "` for `options.bulletOther`, expected `*`, `+`, or `-`"
|
||
);
|
||
}
|
||
if (bulletOther === bullet) {
|
||
throw new Error(
|
||
"Expected `bullet` (`" + bullet + "`) and `bulletOther` (`" + bulletOther + "`) to be different"
|
||
);
|
||
}
|
||
return bulletOther;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet-ordered.js
|
||
function checkBulletOrdered(state) {
|
||
const marker = state.options.bulletOrdered || ".";
|
||
if (marker !== "." && marker !== ")") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + marker + "` for `options.bulletOrdered`, expected `.` or `)`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-rule.js
|
||
function checkRule(state) {
|
||
const marker = state.options.rule || "*";
|
||
if (marker !== "*" && marker !== "-" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize rules with `" + marker + "` for `options.rule`, expected `*`, `-`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/list.js
|
||
function list3(node2, parent, state, info) {
|
||
const exit2 = state.enter("list");
|
||
const bulletCurrent = state.bulletCurrent;
|
||
let bullet = node2.ordered ? checkBulletOrdered(state) : checkBullet(state);
|
||
const bulletOther = node2.ordered ? bullet === "." ? ")" : "." : checkBulletOther(state);
|
||
let useDifferentMarker = parent && state.bulletLastUsed ? bullet === state.bulletLastUsed : false;
|
||
if (!node2.ordered) {
|
||
const firstListItem = node2.children ? node2.children[0] : void 0;
|
||
if (
|
||
// Bullet could be used as a thematic break marker:
|
||
(bullet === "*" || bullet === "-") && // Empty first list item:
|
||
firstListItem && (!firstListItem.children || !firstListItem.children[0]) && // Directly in two other list items:
|
||
state.stack[state.stack.length - 1] === "list" && state.stack[state.stack.length - 2] === "listItem" && state.stack[state.stack.length - 3] === "list" && state.stack[state.stack.length - 4] === "listItem" && // That are each the first child.
|
||
state.indexStack[state.indexStack.length - 1] === 0 && state.indexStack[state.indexStack.length - 2] === 0 && state.indexStack[state.indexStack.length - 3] === 0
|
||
) {
|
||
useDifferentMarker = true;
|
||
}
|
||
if (checkRule(state) === bullet && firstListItem) {
|
||
let index2 = -1;
|
||
while (++index2 < node2.children.length) {
|
||
const item = node2.children[index2];
|
||
if (item && item.type === "listItem" && item.children && item.children[0] && item.children[0].type === "thematicBreak") {
|
||
useDifferentMarker = true;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (useDifferentMarker) {
|
||
bullet = bulletOther;
|
||
}
|
||
state.bulletCurrent = bullet;
|
||
const value = state.containerFlow(node2, info);
|
||
state.bulletLastUsed = bullet;
|
||
state.bulletCurrent = bulletCurrent;
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js
|
||
function checkListItemIndent(state) {
|
||
const style = state.options.listItemIndent || "one";
|
||
if (style !== "tab" && style !== "one" && style !== "mixed") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + style + "` for `options.listItemIndent`, expected `tab`, `one`, or `mixed`"
|
||
);
|
||
}
|
||
return style;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/list-item.js
|
||
function listItem(node2, parent, state, info) {
|
||
const listItemIndent = checkListItemIndent(state);
|
||
let bullet = state.bulletCurrent || checkBullet(state);
|
||
if (parent && parent.type === "list" && parent.ordered) {
|
||
bullet = (typeof parent.start === "number" && parent.start > -1 ? parent.start : 1) + (state.options.incrementListMarker === false ? 0 : parent.children.indexOf(node2)) + bullet;
|
||
}
|
||
let size = bullet.length + 1;
|
||
if (listItemIndent === "tab" || listItemIndent === "mixed" && (parent && parent.type === "list" && parent.spread || node2.spread)) {
|
||
size = Math.ceil(size / 4) * 4;
|
||
}
|
||
const tracker = state.createTracker(info);
|
||
tracker.move(bullet + " ".repeat(size - bullet.length));
|
||
tracker.shift(size);
|
||
const exit2 = state.enter("listItem");
|
||
const value = state.indentLines(
|
||
state.containerFlow(node2, tracker.current()),
|
||
map4
|
||
);
|
||
exit2();
|
||
return value;
|
||
function map4(line, index2, blank) {
|
||
if (index2) {
|
||
return (blank ? "" : " ".repeat(size)) + line;
|
||
}
|
||
return (blank ? bullet : bullet + " ".repeat(size - bullet.length)) + line;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/paragraph.js
|
||
function paragraph(node2, _, state, info) {
|
||
const exit2 = state.enter("paragraph");
|
||
const subexit = state.enter("phrasing");
|
||
const value = state.containerPhrasing(node2, info);
|
||
subexit();
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-phrasing/lib/index.js
|
||
var phrasing = (
|
||
/** @type {(node?: unknown) => node is PhrasingContent} */
|
||
convert([
|
||
"break",
|
||
"delete",
|
||
"emphasis",
|
||
"footnote",
|
||
"footnoteReference",
|
||
"image",
|
||
"imageReference",
|
||
"inlineCode",
|
||
"link",
|
||
"linkReference",
|
||
"strong",
|
||
"text"
|
||
])
|
||
);
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/root.js
|
||
function root(node2, _, state, info) {
|
||
const hasPhrasing = node2.children.some(function(d) {
|
||
return phrasing(d);
|
||
});
|
||
const fn = hasPhrasing ? state.containerPhrasing : state.containerFlow;
|
||
return fn.call(state, node2, info);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-strong.js
|
||
function checkStrong(state) {
|
||
const marker = state.options.strong || "*";
|
||
if (marker !== "*" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize strong with `" + marker + "` for `options.strong`, expected `*`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/strong.js
|
||
strong.peek = strongPeek;
|
||
function strong(node2, _, state, info) {
|
||
const marker = checkStrong(state);
|
||
const exit2 = state.enter("strong");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move(marker + marker);
|
||
value += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value,
|
||
after: marker,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value += tracker.move(marker + marker);
|
||
exit2();
|
||
return value;
|
||
}
|
||
function strongPeek(_, _1, state) {
|
||
return state.options.strong || "*";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/text.js
|
||
function text3(node2, _, state, info) {
|
||
return state.safe(node2.value, info);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-rule-repetition.js
|
||
function checkRuleRepetition(state) {
|
||
const repetition = state.options.ruleRepetition || 3;
|
||
if (repetition < 3) {
|
||
throw new Error(
|
||
"Cannot serialize rules with repetition `" + repetition + "` for `options.ruleRepetition`, expected `3` or more"
|
||
);
|
||
}
|
||
return repetition;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/thematic-break.js
|
||
function thematicBreak2(_, _1, state) {
|
||
const value = (checkRule(state) + (state.options.ruleSpaces ? " " : "")).repeat(checkRuleRepetition(state));
|
||
return state.options.ruleSpaces ? value.slice(0, -1) : value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/index.js
|
||
var handle = {
|
||
blockquote,
|
||
break: hardBreak,
|
||
code,
|
||
definition: definition2,
|
||
emphasis,
|
||
hardBreak,
|
||
heading,
|
||
html,
|
||
image,
|
||
imageReference,
|
||
inlineCode,
|
||
link,
|
||
linkReference,
|
||
list: list3,
|
||
listItem,
|
||
paragraph,
|
||
root,
|
||
strong,
|
||
text: text3,
|
||
thematicBreak: thematicBreak2
|
||
};
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/join.js
|
||
var join = [joinDefaults];
|
||
function joinDefaults(left, right, parent, state) {
|
||
if (right.type === "code" && formatCodeAsIndented(right, state) && (left.type === "list" || left.type === right.type && formatCodeAsIndented(left, state))) {
|
||
return false;
|
||
}
|
||
if ("spread" in parent && typeof parent.spread === "boolean") {
|
||
if (left.type === "paragraph" && // Two paragraphs.
|
||
(left.type === right.type || right.type === "definition" || // Paragraph followed by a setext heading.
|
||
right.type === "heading" && formatHeadingAsSetext(right, state))) {
|
||
return;
|
||
}
|
||
return parent.spread ? 1 : 0;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/unsafe.js
|
||
var fullPhrasingSpans = [
|
||
"autolink",
|
||
"destinationLiteral",
|
||
"destinationRaw",
|
||
"reference",
|
||
"titleQuote",
|
||
"titleApostrophe"
|
||
];
|
||
var unsafe = [
|
||
{ character: " ", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{ character: " ", before: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{
|
||
character: " ",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedLangTilde"]
|
||
},
|
||
{
|
||
character: "\r",
|
||
inConstruct: [
|
||
"codeFencedLangGraveAccent",
|
||
"codeFencedLangTilde",
|
||
"codeFencedMetaGraveAccent",
|
||
"codeFencedMetaTilde",
|
||
"destinationLiteral",
|
||
"headingAtx"
|
||
]
|
||
},
|
||
{
|
||
character: "\n",
|
||
inConstruct: [
|
||
"codeFencedLangGraveAccent",
|
||
"codeFencedLangTilde",
|
||
"codeFencedMetaGraveAccent",
|
||
"codeFencedMetaTilde",
|
||
"destinationLiteral",
|
||
"headingAtx"
|
||
]
|
||
},
|
||
{ character: " ", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{ character: " ", before: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{
|
||
character: " ",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedLangTilde"]
|
||
},
|
||
// An exclamation mark can start an image, if it is followed by a link or
|
||
// a link reference.
|
||
{
|
||
character: "!",
|
||
after: "\\[",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
// A quote can break out of a title.
|
||
{ character: '"', inConstruct: "titleQuote" },
|
||
// A number sign could start an ATX heading if it starts a line.
|
||
{ atBreak: true, character: "#" },
|
||
{ character: "#", inConstruct: "headingAtx", after: "(?:[\r\n]|$)" },
|
||
// Dollar sign and percentage are not used in markdown.
|
||
// An ampersand could start a character reference.
|
||
{ character: "&", after: "[#A-Za-z]", inConstruct: "phrasing" },
|
||
// An apostrophe can break out of a title.
|
||
{ character: "'", inConstruct: "titleApostrophe" },
|
||
// A left paren could break out of a destination raw.
|
||
{ character: "(", inConstruct: "destinationRaw" },
|
||
// A left paren followed by `]` could make something into a link or image.
|
||
{
|
||
before: "\\]",
|
||
character: "(",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
// A right paren could start a list item or break out of a destination
|
||
// raw.
|
||
{ atBreak: true, before: "\\d+", character: ")" },
|
||
{ character: ")", inConstruct: "destinationRaw" },
|
||
// An asterisk can start thematic breaks, list items, emphasis, strong.
|
||
{ atBreak: true, character: "*", after: "(?:[ \r\n*])" },
|
||
{ character: "*", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// A plus sign could start a list item.
|
||
{ atBreak: true, character: "+", after: "(?:[ \r\n])" },
|
||
// A dash can start thematic breaks, list items, and setext heading
|
||
// underlines.
|
||
{ atBreak: true, character: "-", after: "(?:[ \r\n-])" },
|
||
// A dot could start a list item.
|
||
{ atBreak: true, before: "\\d+", character: ".", after: "(?:[ \r\n]|$)" },
|
||
// Slash, colon, and semicolon are not used in markdown for constructs.
|
||
// A less than can start html (flow or text) or an autolink.
|
||
// HTML could start with an exclamation mark (declaration, cdata, comment),
|
||
// slash (closing tag), question mark (instruction), or a letter (tag).
|
||
// An autolink also starts with a letter.
|
||
// Finally, it could break out of a destination literal.
|
||
{ atBreak: true, character: "<", after: "[!/?A-Za-z]" },
|
||
{
|
||
character: "<",
|
||
after: "[!/?A-Za-z]",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
{ character: "<", inConstruct: "destinationLiteral" },
|
||
// An equals to can start setext heading underlines.
|
||
{ atBreak: true, character: "=" },
|
||
// A greater than can start block quotes and it can break out of a
|
||
// destination literal.
|
||
{ atBreak: true, character: ">" },
|
||
{ character: ">", inConstruct: "destinationLiteral" },
|
||
// Question mark and at sign are not used in markdown for constructs.
|
||
// A left bracket can start definitions, references, labels,
|
||
{ atBreak: true, character: "[" },
|
||
{ character: "[", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
{ character: "[", inConstruct: ["label", "reference"] },
|
||
// A backslash can start an escape (when followed by punctuation) or a
|
||
// hard break (when followed by an eol).
|
||
// Note: typical escapes are handled in `safe`!
|
||
{ character: "\\", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
// A right bracket can exit labels.
|
||
{ character: "]", inConstruct: ["label", "reference"] },
|
||
// Caret is not used in markdown for constructs.
|
||
// An underscore can start emphasis, strong, or a thematic break.
|
||
{ atBreak: true, character: "_" },
|
||
{ character: "_", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// A grave accent can start code (fenced or text), or it can break out of
|
||
// a grave accent code fence.
|
||
{ atBreak: true, character: "`" },
|
||
{
|
||
character: "`",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedMetaGraveAccent"]
|
||
},
|
||
{ character: "`", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// Left brace, vertical bar, right brace are not used in markdown for
|
||
// constructs.
|
||
// A tilde can start code (fenced).
|
||
{ atBreak: true, character: "~" }
|
||
];
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/association.js
|
||
function association(node2) {
|
||
if (node2.label || !node2.identifier) {
|
||
return node2.label || "";
|
||
}
|
||
return decodeString(node2.identifier);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/compile-pattern.js
|
||
function compilePattern(pattern) {
|
||
if (!pattern._compiled) {
|
||
const before = (pattern.atBreak ? "[\\r\\n][\\t ]*" : "") + (pattern.before ? "(?:" + pattern.before + ")" : "");
|
||
pattern._compiled = new RegExp(
|
||
(before ? "(" + before + ")" : "") + (/[|\\{}()[\]^$+*?.-]/.test(pattern.character) ? "\\" : "") + pattern.character + (pattern.after ? "(?:" + pattern.after + ")" : ""),
|
||
"g"
|
||
);
|
||
}
|
||
return pattern._compiled;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js
|
||
function containerPhrasing(parent, state, info) {
|
||
const indexStack = state.indexStack;
|
||
const children = parent.children || [];
|
||
const results = [];
|
||
let index2 = -1;
|
||
let before = info.before;
|
||
indexStack.push(-1);
|
||
let tracker = state.createTracker(info);
|
||
while (++index2 < children.length) {
|
||
const child = children[index2];
|
||
let after;
|
||
indexStack[indexStack.length - 1] = index2;
|
||
if (index2 + 1 < children.length) {
|
||
let handle2 = state.handle.handlers[children[index2 + 1].type];
|
||
if (handle2 && handle2.peek)
|
||
handle2 = handle2.peek;
|
||
after = handle2 ? handle2(children[index2 + 1], parent, state, {
|
||
before: "",
|
||
after: "",
|
||
...tracker.current()
|
||
}).charAt(0) : "";
|
||
} else {
|
||
after = info.after;
|
||
}
|
||
if (results.length > 0 && (before === "\r" || before === "\n") && child.type === "html") {
|
||
results[results.length - 1] = results[results.length - 1].replace(
|
||
/(\r?\n|\r)$/,
|
||
" "
|
||
);
|
||
before = " ";
|
||
tracker = state.createTracker(info);
|
||
tracker.move(results.join(""));
|
||
}
|
||
results.push(
|
||
tracker.move(
|
||
state.handle(child, parent, state, {
|
||
...tracker.current(),
|
||
before,
|
||
after
|
||
})
|
||
)
|
||
);
|
||
before = results[results.length - 1].slice(-1);
|
||
}
|
||
indexStack.pop();
|
||
return results.join("");
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/container-flow.js
|
||
function containerFlow(parent, state, info) {
|
||
const indexStack = state.indexStack;
|
||
const children = parent.children || [];
|
||
const tracker = state.createTracker(info);
|
||
const results = [];
|
||
let index2 = -1;
|
||
indexStack.push(-1);
|
||
while (++index2 < children.length) {
|
||
const child = children[index2];
|
||
indexStack[indexStack.length - 1] = index2;
|
||
results.push(
|
||
tracker.move(
|
||
state.handle(child, parent, state, {
|
||
before: "\n",
|
||
after: "\n",
|
||
...tracker.current()
|
||
})
|
||
)
|
||
);
|
||
if (child.type !== "list") {
|
||
state.bulletLastUsed = void 0;
|
||
}
|
||
if (index2 < children.length - 1) {
|
||
results.push(
|
||
tracker.move(between(child, children[index2 + 1], parent, state))
|
||
);
|
||
}
|
||
}
|
||
indexStack.pop();
|
||
return results.join("");
|
||
}
|
||
function between(left, right, parent, state) {
|
||
let index2 = state.join.length;
|
||
while (index2--) {
|
||
const result = state.join[index2](left, right, parent, state);
|
||
if (result === true || result === 1) {
|
||
break;
|
||
}
|
||
if (typeof result === "number") {
|
||
return "\n".repeat(1 + result);
|
||
}
|
||
if (result === false) {
|
||
return "\n\n<!---->\n\n";
|
||
}
|
||
}
|
||
return "\n\n";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/indent-lines.js
|
||
var eol = /\r?\n|\r/g;
|
||
function indentLines(value, map4) {
|
||
const result = [];
|
||
let start = 0;
|
||
let line = 0;
|
||
let match;
|
||
while (match = eol.exec(value)) {
|
||
one2(value.slice(start, match.index));
|
||
result.push(match[0]);
|
||
start = match.index + match[0].length;
|
||
line++;
|
||
}
|
||
one2(value.slice(start));
|
||
return result.join("");
|
||
function one2(value2) {
|
||
result.push(map4(value2, line, !value2));
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/safe.js
|
||
function safe(state, input, config) {
|
||
const value = (config.before || "") + (input || "") + (config.after || "");
|
||
const positions = [];
|
||
const result = [];
|
||
const infos = {};
|
||
let index2 = -1;
|
||
while (++index2 < state.unsafe.length) {
|
||
const pattern = state.unsafe[index2];
|
||
if (!patternInScope(state.stack, pattern)) {
|
||
continue;
|
||
}
|
||
const expression = state.compilePattern(pattern);
|
||
let match;
|
||
while (match = expression.exec(value)) {
|
||
const before = "before" in pattern || Boolean(pattern.atBreak);
|
||
const after = "after" in pattern;
|
||
const position2 = match.index + (before ? match[1].length : 0);
|
||
if (positions.includes(position2)) {
|
||
if (infos[position2].before && !before) {
|
||
infos[position2].before = false;
|
||
}
|
||
if (infos[position2].after && !after) {
|
||
infos[position2].after = false;
|
||
}
|
||
} else {
|
||
positions.push(position2);
|
||
infos[position2] = { before, after };
|
||
}
|
||
}
|
||
}
|
||
positions.sort(numerical);
|
||
let start = config.before ? config.before.length : 0;
|
||
const end = value.length - (config.after ? config.after.length : 0);
|
||
index2 = -1;
|
||
while (++index2 < positions.length) {
|
||
const position2 = positions[index2];
|
||
if (position2 < start || position2 >= end) {
|
||
continue;
|
||
}
|
||
if (position2 + 1 < end && positions[index2 + 1] === position2 + 1 && infos[position2].after && !infos[position2 + 1].before && !infos[position2 + 1].after || positions[index2 - 1] === position2 - 1 && infos[position2].before && !infos[position2 - 1].before && !infos[position2 - 1].after) {
|
||
continue;
|
||
}
|
||
if (start !== position2) {
|
||
result.push(escapeBackslashes(value.slice(start, position2), "\\"));
|
||
}
|
||
start = position2;
|
||
if (/[!-/:-@[-`{-~]/.test(value.charAt(position2)) && (!config.encode || !config.encode.includes(value.charAt(position2)))) {
|
||
result.push("\\");
|
||
} else {
|
||
result.push(
|
||
"&#x" + value.charCodeAt(position2).toString(16).toUpperCase() + ";"
|
||
);
|
||
start++;
|
||
}
|
||
}
|
||
result.push(escapeBackslashes(value.slice(start, end), config.after));
|
||
return result.join("");
|
||
}
|
||
function numerical(a, b) {
|
||
return a - b;
|
||
}
|
||
function escapeBackslashes(value, after) {
|
||
const expression = /\\(?=[!-/:-@[-`{-~])/g;
|
||
const positions = [];
|
||
const results = [];
|
||
const whole = value + after;
|
||
let index2 = -1;
|
||
let start = 0;
|
||
let match;
|
||
while (match = expression.exec(whole)) {
|
||
positions.push(match.index);
|
||
}
|
||
while (++index2 < positions.length) {
|
||
if (start !== positions[index2]) {
|
||
results.push(value.slice(start, positions[index2]));
|
||
}
|
||
results.push("\\");
|
||
start = positions[index2];
|
||
}
|
||
results.push(value.slice(start));
|
||
return results.join("");
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/track.js
|
||
function track(config) {
|
||
const options = config || {};
|
||
const now = options.now || {};
|
||
let lineShift = options.lineShift || 0;
|
||
let line = now.line || 1;
|
||
let column = now.column || 1;
|
||
return { move, current, shift };
|
||
function current() {
|
||
return { now: { line, column }, lineShift };
|
||
}
|
||
function shift(value) {
|
||
lineShift += value;
|
||
}
|
||
function move(input) {
|
||
const value = input || "";
|
||
const chunks = value.split(/\r?\n|\r/g);
|
||
const tail = chunks[chunks.length - 1];
|
||
line += chunks.length - 1;
|
||
column = chunks.length === 1 ? column + tail.length : 1 + tail.length + lineShift;
|
||
return value;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/index.js
|
||
function toMarkdown(tree, options = {}) {
|
||
const state = {
|
||
enter,
|
||
indentLines,
|
||
associationId: association,
|
||
containerPhrasing: containerPhrasingBound,
|
||
containerFlow: containerFlowBound,
|
||
createTracker: track,
|
||
compilePattern,
|
||
safe: safeBound,
|
||
stack: [],
|
||
unsafe: [...unsafe],
|
||
join: [...join],
|
||
// @ts-expect-error: GFM / frontmatter are typed in `mdast` but not defined
|
||
// here.
|
||
handlers: { ...handle },
|
||
options: {},
|
||
indexStack: [],
|
||
// @ts-expect-error: add `handle` in a second.
|
||
handle: void 0
|
||
};
|
||
configure2(state, options);
|
||
if (state.options.tightDefinitions) {
|
||
state.join.push(joinDefinition);
|
||
}
|
||
state.handle = zwitch("type", {
|
||
invalid,
|
||
unknown,
|
||
handlers: state.handlers
|
||
});
|
||
let result = state.handle(tree, void 0, state, {
|
||
before: "\n",
|
||
after: "\n",
|
||
now: { line: 1, column: 1 },
|
||
lineShift: 0
|
||
});
|
||
if (result && result.charCodeAt(result.length - 1) !== 10 && result.charCodeAt(result.length - 1) !== 13) {
|
||
result += "\n";
|
||
}
|
||
return result;
|
||
function enter(name) {
|
||
state.stack.push(name);
|
||
return exit2;
|
||
function exit2() {
|
||
state.stack.pop();
|
||
}
|
||
}
|
||
}
|
||
function invalid(value) {
|
||
throw new Error("Cannot handle value `" + value + "`, expected node");
|
||
}
|
||
function unknown(value) {
|
||
const node2 = (
|
||
/** @type {Nodes} */
|
||
value
|
||
);
|
||
throw new Error("Cannot handle unknown node `" + node2.type + "`");
|
||
}
|
||
function joinDefinition(left, right) {
|
||
if (left.type === "definition" && left.type === right.type) {
|
||
return 0;
|
||
}
|
||
}
|
||
function containerPhrasingBound(parent, info) {
|
||
return containerPhrasing(parent, this, info);
|
||
}
|
||
function containerFlowBound(parent, info) {
|
||
return containerFlow(parent, this, info);
|
||
}
|
||
function safeBound(value, config) {
|
||
return safe(this, value, config);
|
||
}
|
||
|
||
// node_modules/remark-stringify/lib/index.js
|
||
function remarkStringify(options) {
|
||
const self = this;
|
||
self.compiler = compiler2;
|
||
function compiler2(tree) {
|
||
return toMarkdown(tree, {
|
||
...self.data("settings"),
|
||
...options,
|
||
// Note: this option is not in the readme.
|
||
// The goal is for it to be set by plugins on `data` instead of being
|
||
// passed by users.
|
||
extensions: self.data("toMarkdownExtensions") || []
|
||
});
|
||
}
|
||
}
|
||
|
||
// node_modules/bail/index.js
|
||
function bail(error) {
|
||
if (error) {
|
||
throw error;
|
||
}
|
||
}
|
||
|
||
// node_modules/unified/lib/index.js
|
||
var import_extend = __toESM(require_extend(), 1);
|
||
|
||
// node_modules/devlop/lib/default.js
|
||
function ok2() {
|
||
}
|
||
|
||
// node_modules/is-plain-obj/index.js
|
||
function isPlainObject(value) {
|
||
if (typeof value !== "object" || value === null) {
|
||
return false;
|
||
}
|
||
const prototype = Object.getPrototypeOf(value);
|
||
return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in value) && !(Symbol.iterator in value);
|
||
}
|
||
|
||
// node_modules/trough/index.js
|
||
function trough() {
|
||
const fns = [];
|
||
const pipeline = { run, use };
|
||
return pipeline;
|
||
function run(...values) {
|
||
let middlewareIndex = -1;
|
||
const callback = values.pop();
|
||
if (typeof callback !== "function") {
|
||
throw new TypeError("Expected function as last argument, not " + callback);
|
||
}
|
||
next(null, ...values);
|
||
function next(error, ...output) {
|
||
const fn = fns[++middlewareIndex];
|
||
let index2 = -1;
|
||
if (error) {
|
||
callback(error);
|
||
return;
|
||
}
|
||
while (++index2 < values.length) {
|
||
if (output[index2] === null || output[index2] === void 0) {
|
||
output[index2] = values[index2];
|
||
}
|
||
}
|
||
values = output;
|
||
if (fn) {
|
||
wrap(fn, next)(...output);
|
||
} else {
|
||
callback(null, ...output);
|
||
}
|
||
}
|
||
}
|
||
function use(middelware) {
|
||
if (typeof middelware !== "function") {
|
||
throw new TypeError(
|
||
"Expected `middelware` to be a function, not " + middelware
|
||
);
|
||
}
|
||
fns.push(middelware);
|
||
return pipeline;
|
||
}
|
||
}
|
||
function wrap(middleware, callback) {
|
||
let called;
|
||
return wrapped;
|
||
function wrapped(...parameters) {
|
||
const fnExpectsCallback = middleware.length > parameters.length;
|
||
let result;
|
||
if (fnExpectsCallback) {
|
||
parameters.push(done);
|
||
}
|
||
try {
|
||
result = middleware.apply(this, parameters);
|
||
} catch (error) {
|
||
const exception = (
|
||
/** @type {Error} */
|
||
error
|
||
);
|
||
if (fnExpectsCallback && called) {
|
||
throw exception;
|
||
}
|
||
return done(exception);
|
||
}
|
||
if (!fnExpectsCallback) {
|
||
if (result instanceof Promise) {
|
||
result.then(then, done);
|
||
} else if (result instanceof Error) {
|
||
done(result);
|
||
} else {
|
||
then(result);
|
||
}
|
||
}
|
||
}
|
||
function done(error, ...output) {
|
||
if (!called) {
|
||
called = true;
|
||
callback(error, ...output);
|
||
}
|
||
}
|
||
function then(value) {
|
||
done(null, value);
|
||
}
|
||
}
|
||
|
||
// node_modules/vfile-message/lib/index.js
|
||
var VFileMessage = class extends Error {
|
||
/**
|
||
* Create a message for `reason`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Options | null | undefined} [options]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | Options | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns
|
||
* Instance of `VFileMessage`.
|
||
*/
|
||
// eslint-disable-next-line complexity
|
||
constructor(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
super();
|
||
if (typeof optionsOrParentOrPlace === "string") {
|
||
origin = optionsOrParentOrPlace;
|
||
optionsOrParentOrPlace = void 0;
|
||
}
|
||
let reason = "";
|
||
let options = {};
|
||
let legacyCause = false;
|
||
if (optionsOrParentOrPlace) {
|
||
if ("line" in optionsOrParentOrPlace && "column" in optionsOrParentOrPlace) {
|
||
options = { place: optionsOrParentOrPlace };
|
||
} else if ("start" in optionsOrParentOrPlace && "end" in optionsOrParentOrPlace) {
|
||
options = { place: optionsOrParentOrPlace };
|
||
} else if ("type" in optionsOrParentOrPlace) {
|
||
options = {
|
||
ancestors: [optionsOrParentOrPlace],
|
||
place: optionsOrParentOrPlace.position
|
||
};
|
||
} else {
|
||
options = { ...optionsOrParentOrPlace };
|
||
}
|
||
}
|
||
if (typeof causeOrReason === "string") {
|
||
reason = causeOrReason;
|
||
} else if (!options.cause && causeOrReason) {
|
||
legacyCause = true;
|
||
reason = causeOrReason.message;
|
||
options.cause = causeOrReason;
|
||
}
|
||
if (!options.ruleId && !options.source && typeof origin === "string") {
|
||
const index2 = origin.indexOf(":");
|
||
if (index2 === -1) {
|
||
options.ruleId = origin;
|
||
} else {
|
||
options.source = origin.slice(0, index2);
|
||
options.ruleId = origin.slice(index2 + 1);
|
||
}
|
||
}
|
||
if (!options.place && options.ancestors && options.ancestors) {
|
||
const parent = options.ancestors[options.ancestors.length - 1];
|
||
if (parent) {
|
||
options.place = parent.position;
|
||
}
|
||
}
|
||
const start = options.place && "start" in options.place ? options.place.start : options.place;
|
||
this.ancestors = options.ancestors || void 0;
|
||
this.cause = options.cause || void 0;
|
||
this.column = start ? start.column : void 0;
|
||
this.fatal = void 0;
|
||
this.file;
|
||
this.message = reason;
|
||
this.line = start ? start.line : void 0;
|
||
this.name = stringifyPosition(options.place) || "1:1";
|
||
this.place = options.place || void 0;
|
||
this.reason = this.message;
|
||
this.ruleId = options.ruleId || void 0;
|
||
this.source = options.source || void 0;
|
||
this.stack = legacyCause && options.cause && typeof options.cause.stack === "string" ? options.cause.stack : "";
|
||
this.actual;
|
||
this.expected;
|
||
this.note;
|
||
this.url;
|
||
}
|
||
};
|
||
VFileMessage.prototype.file = "";
|
||
VFileMessage.prototype.name = "";
|
||
VFileMessage.prototype.reason = "";
|
||
VFileMessage.prototype.message = "";
|
||
VFileMessage.prototype.stack = "";
|
||
VFileMessage.prototype.column = void 0;
|
||
VFileMessage.prototype.line = void 0;
|
||
VFileMessage.prototype.ancestors = void 0;
|
||
VFileMessage.prototype.cause = void 0;
|
||
VFileMessage.prototype.fatal = void 0;
|
||
VFileMessage.prototype.place = void 0;
|
||
VFileMessage.prototype.ruleId = void 0;
|
||
VFileMessage.prototype.source = void 0;
|
||
|
||
// node_modules/vfile/lib/minpath.js
|
||
var import_node_path = __toESM(require("node:path"), 1);
|
||
|
||
// node_modules/vfile/lib/minproc.js
|
||
var import_node_process = __toESM(require("node:process"), 1);
|
||
|
||
// node_modules/vfile/lib/minurl.js
|
||
var import_node_url = require("node:url");
|
||
|
||
// node_modules/vfile/lib/minurl.shared.js
|
||
function isUrl(fileUrlOrPath) {
|
||
return Boolean(
|
||
fileUrlOrPath !== null && typeof fileUrlOrPath === "object" && "href" in fileUrlOrPath && fileUrlOrPath.href && "protocol" in fileUrlOrPath && fileUrlOrPath.protocol && // @ts-expect-error: indexing is fine.
|
||
fileUrlOrPath.auth === void 0
|
||
);
|
||
}
|
||
|
||
// node_modules/vfile/lib/index.js
|
||
var order = (
|
||
/** @type {const} */
|
||
[
|
||
"history",
|
||
"path",
|
||
"basename",
|
||
"stem",
|
||
"extname",
|
||
"dirname"
|
||
]
|
||
);
|
||
var VFile = class {
|
||
/**
|
||
* Create a new virtual file.
|
||
*
|
||
* `options` is treated as:
|
||
*
|
||
* * `string` or `Uint8Array` — `{value: options}`
|
||
* * `URL` — `{path: options}`
|
||
* * `VFile` — shallow copies its data over to the new file
|
||
* * `object` — all fields are shallow copied over to the new file
|
||
*
|
||
* Path related fields are set in the following order (least specific to
|
||
* most specific): `history`, `path`, `basename`, `stem`, `extname`,
|
||
* `dirname`.
|
||
*
|
||
* You cannot set `dirname` or `extname` without setting either `history`,
|
||
* `path`, `basename`, or `stem` too.
|
||
*
|
||
* @param {Compatible | null | undefined} [value]
|
||
* File value.
|
||
* @returns
|
||
* New instance.
|
||
*/
|
||
constructor(value) {
|
||
let options;
|
||
if (!value) {
|
||
options = {};
|
||
} else if (isUrl(value)) {
|
||
options = { path: value };
|
||
} else if (typeof value === "string" || isUint8Array(value)) {
|
||
options = { value };
|
||
} else {
|
||
options = value;
|
||
}
|
||
this.cwd = import_node_process.default.cwd();
|
||
this.data = {};
|
||
this.history = [];
|
||
this.messages = [];
|
||
this.value;
|
||
this.map;
|
||
this.result;
|
||
this.stored;
|
||
let index2 = -1;
|
||
while (++index2 < order.length) {
|
||
const prop2 = order[index2];
|
||
if (prop2 in options && options[prop2] !== void 0 && options[prop2] !== null) {
|
||
this[prop2] = prop2 === "history" ? [...options[prop2]] : options[prop2];
|
||
}
|
||
}
|
||
let prop;
|
||
for (prop in options) {
|
||
if (!order.includes(prop)) {
|
||
this[prop] = options[prop];
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Get the basename (including extname) (example: `'index.min.js'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Basename.
|
||
*/
|
||
get basename() {
|
||
return typeof this.path === "string" ? import_node_path.default.basename(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set basename (including extname) (`'index.min.js'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be nullified (use `file.path = file.dirname` instead).
|
||
*
|
||
* @param {string} basename
|
||
* Basename.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set basename(basename) {
|
||
assertNonEmpty(basename, "basename");
|
||
assertPart(basename, "basename");
|
||
this.path = import_node_path.default.join(this.dirname || "", basename);
|
||
}
|
||
/**
|
||
* Get the parent path (example: `'~'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Dirname.
|
||
*/
|
||
get dirname() {
|
||
return typeof this.path === "string" ? import_node_path.default.dirname(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set the parent path (example: `'~'`).
|
||
*
|
||
* Cannot be set if there’s no `path` yet.
|
||
*
|
||
* @param {string | undefined} dirname
|
||
* Dirname.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set dirname(dirname) {
|
||
assertPath(this.basename, "dirname");
|
||
this.path = import_node_path.default.join(dirname || "", this.basename);
|
||
}
|
||
/**
|
||
* Get the extname (including dot) (example: `'.js'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Extname.
|
||
*/
|
||
get extname() {
|
||
return typeof this.path === "string" ? import_node_path.default.extname(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set the extname (including dot) (example: `'.js'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be set if there’s no `path` yet.
|
||
*
|
||
* @param {string | undefined} extname
|
||
* Extname.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set extname(extname) {
|
||
assertPart(extname, "extname");
|
||
assertPath(this.dirname, "extname");
|
||
if (extname) {
|
||
if (extname.codePointAt(0) !== 46) {
|
||
throw new Error("`extname` must start with `.`");
|
||
}
|
||
if (extname.includes(".", 1)) {
|
||
throw new Error("`extname` cannot contain multiple dots");
|
||
}
|
||
}
|
||
this.path = import_node_path.default.join(this.dirname, this.stem + (extname || ""));
|
||
}
|
||
/**
|
||
* Get the full path (example: `'~/index.min.js'`).
|
||
*
|
||
* @returns {string}
|
||
* Path.
|
||
*/
|
||
get path() {
|
||
return this.history[this.history.length - 1];
|
||
}
|
||
/**
|
||
* Set the full path (example: `'~/index.min.js'`).
|
||
*
|
||
* Cannot be nullified.
|
||
* You can set a file URL (a `URL` object with a `file:` protocol) which will
|
||
* be turned into a path with `url.fileURLToPath`.
|
||
*
|
||
* @param {URL | string} path
|
||
* Path.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set path(path) {
|
||
if (isUrl(path)) {
|
||
path = (0, import_node_url.fileURLToPath)(path);
|
||
}
|
||
assertNonEmpty(path, "path");
|
||
if (this.path !== path) {
|
||
this.history.push(path);
|
||
}
|
||
}
|
||
/**
|
||
* Get the stem (basename w/o extname) (example: `'index.min'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Stem.
|
||
*/
|
||
get stem() {
|
||
return typeof this.path === "string" ? import_node_path.default.basename(this.path, this.extname) : void 0;
|
||
}
|
||
/**
|
||
* Set the stem (basename w/o extname) (example: `'index.min'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be nullified (use `file.path = file.dirname` instead).
|
||
*
|
||
* @param {string} stem
|
||
* Stem.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set stem(stem) {
|
||
assertNonEmpty(stem, "stem");
|
||
assertPart(stem, "stem");
|
||
this.path = import_node_path.default.join(this.dirname || "", stem + (this.extname || ""));
|
||
}
|
||
// Normal prototypal methods.
|
||
/**
|
||
* Create a fatal message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `true` (error; file not usable)
|
||
* and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {never}
|
||
* Never.
|
||
* @throws {VFileMessage}
|
||
* Message.
|
||
*/
|
||
fail(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
|
||
message.fatal = true;
|
||
throw message;
|
||
}
|
||
/**
|
||
* Create an info message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `undefined` (info; change
|
||
* likely not needed) and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {VFileMessage}
|
||
* Message.
|
||
*/
|
||
info(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
|
||
message.fatal = void 0;
|
||
return message;
|
||
}
|
||
/**
|
||
* Create a message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `false` (warning; change may be
|
||
* needed) and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {VFileMessage}
|
||
* Message.
|
||
*/
|
||
message(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = new VFileMessage(
|
||
// @ts-expect-error: the overloads are fine.
|
||
causeOrReason,
|
||
optionsOrParentOrPlace,
|
||
origin
|
||
);
|
||
if (this.path) {
|
||
message.name = this.path + ":" + message.name;
|
||
message.file = this.path;
|
||
}
|
||
message.fatal = false;
|
||
this.messages.push(message);
|
||
return message;
|
||
}
|
||
/**
|
||
* Serialize the file.
|
||
*
|
||
* > **Note**: which encodings are supported depends on the engine.
|
||
* > For info on Node.js, see:
|
||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||
*
|
||
* @param {string | null | undefined} [encoding='utf8']
|
||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||
* (default: `'utf-8'`).
|
||
* @returns {string}
|
||
* Serialized file.
|
||
*/
|
||
toString(encoding) {
|
||
if (this.value === void 0) {
|
||
return "";
|
||
}
|
||
if (typeof this.value === "string") {
|
||
return this.value;
|
||
}
|
||
const decoder = new TextDecoder(encoding || void 0);
|
||
return decoder.decode(this.value);
|
||
}
|
||
};
|
||
function assertPart(part, name) {
|
||
if (part && part.includes(import_node_path.default.sep)) {
|
||
throw new Error(
|
||
"`" + name + "` cannot be a path: did not expect `" + import_node_path.default.sep + "`"
|
||
);
|
||
}
|
||
}
|
||
function assertNonEmpty(part, name) {
|
||
if (!part) {
|
||
throw new Error("`" + name + "` cannot be empty");
|
||
}
|
||
}
|
||
function assertPath(path, name) {
|
||
if (!path) {
|
||
throw new Error("Setting `" + name + "` requires `path` to be set too");
|
||
}
|
||
}
|
||
function isUint8Array(value) {
|
||
return Boolean(
|
||
value && typeof value === "object" && "byteLength" in value && "byteOffset" in value
|
||
);
|
||
}
|
||
|
||
// node_modules/unified/lib/callable-instance.js
|
||
var CallableInstance = (
|
||
/**
|
||
* @type {new <Parameters extends Array<unknown>, Result>(property: string | symbol) => (...parameters: Parameters) => Result}
|
||
*/
|
||
/** @type {unknown} */
|
||
/**
|
||
* @this {Function}
|
||
* @param {string | symbol} property
|
||
* @returns {(...parameters: Array<unknown>) => unknown}
|
||
*/
|
||
function(property) {
|
||
const self = this;
|
||
const constr = self.constructor;
|
||
const proto = (
|
||
/** @type {Record<string | symbol, Function>} */
|
||
// Prototypes do exist.
|
||
// type-coverage:ignore-next-line
|
||
constr.prototype
|
||
);
|
||
const func = proto[property];
|
||
const apply = function() {
|
||
return func.apply(apply, arguments);
|
||
};
|
||
Object.setPrototypeOf(apply, proto);
|
||
const names = Object.getOwnPropertyNames(func);
|
||
for (const p of names) {
|
||
const descriptor = Object.getOwnPropertyDescriptor(func, p);
|
||
if (descriptor)
|
||
Object.defineProperty(apply, p, descriptor);
|
||
}
|
||
return apply;
|
||
}
|
||
);
|
||
|
||
// node_modules/unified/lib/index.js
|
||
var own5 = {}.hasOwnProperty;
|
||
var Processor = class _Processor extends CallableInstance {
|
||
/**
|
||
* Create a processor.
|
||
*/
|
||
constructor() {
|
||
super("copy");
|
||
this.Compiler = void 0;
|
||
this.Parser = void 0;
|
||
this.attachers = [];
|
||
this.compiler = void 0;
|
||
this.freezeIndex = -1;
|
||
this.frozen = void 0;
|
||
this.namespace = {};
|
||
this.parser = void 0;
|
||
this.transformers = trough();
|
||
}
|
||
/**
|
||
* Copy a processor.
|
||
*
|
||
* @deprecated
|
||
* This is a private internal method and should not be used.
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* New *unfrozen* processor ({@link Processor `Processor`}) that is
|
||
* configured to work the same as its ancestor.
|
||
* When the descendant processor is configured in the future it does not
|
||
* affect the ancestral processor.
|
||
*/
|
||
copy() {
|
||
const destination = (
|
||
/** @type {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>} */
|
||
new _Processor()
|
||
);
|
||
let index2 = -1;
|
||
while (++index2 < this.attachers.length) {
|
||
const attacher = this.attachers[index2];
|
||
destination.use(...attacher);
|
||
}
|
||
destination.data((0, import_extend.default)(true, {}, this.namespace));
|
||
return destination;
|
||
}
|
||
/**
|
||
* Configure the processor with info available to all plugins.
|
||
* Information is stored in an object.
|
||
*
|
||
* Typically, options can be given to a specific plugin, but sometimes it
|
||
* makes sense to have information shared with several plugins.
|
||
* For example, a list of HTML elements that are self-closing, which is
|
||
* needed during all phases.
|
||
*
|
||
* > 👉 **Note**: setting information cannot occur on *frozen* processors.
|
||
* > Call the processor first to create a new unfrozen processor.
|
||
*
|
||
* > 👉 **Note**: to register custom data in TypeScript, augment the
|
||
* > {@link Data `Data`} interface.
|
||
*
|
||
* @example
|
||
* This example show how to get and set info:
|
||
*
|
||
* ```js
|
||
* import {unified} from 'unified'
|
||
*
|
||
* const processor = unified().data('alpha', 'bravo')
|
||
*
|
||
* processor.data('alpha') // => 'bravo'
|
||
*
|
||
* processor.data() // => {alpha: 'bravo'}
|
||
*
|
||
* processor.data({charlie: 'delta'})
|
||
*
|
||
* processor.data() // => {charlie: 'delta'}
|
||
* ```
|
||
*
|
||
* @template {keyof Data} Key
|
||
*
|
||
* @overload
|
||
* @returns {Data}
|
||
*
|
||
* @overload
|
||
* @param {Data} dataset
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {Key} key
|
||
* @returns {Data[Key]}
|
||
*
|
||
* @overload
|
||
* @param {Key} key
|
||
* @param {Data[Key]} value
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @param {Data | Key} [key]
|
||
* Key to get or set, or entire dataset to set, or nothing to get the
|
||
* entire dataset (optional).
|
||
* @param {Data[Key]} [value]
|
||
* Value to set (optional).
|
||
* @returns {unknown}
|
||
* The current processor when setting, the value at `key` when getting, or
|
||
* the entire dataset when getting without key.
|
||
*/
|
||
data(key, value) {
|
||
if (typeof key === "string") {
|
||
if (arguments.length === 2) {
|
||
assertUnfrozen("data", this.frozen);
|
||
this.namespace[key] = value;
|
||
return this;
|
||
}
|
||
return own5.call(this.namespace, key) && this.namespace[key] || void 0;
|
||
}
|
||
if (key) {
|
||
assertUnfrozen("data", this.frozen);
|
||
this.namespace = key;
|
||
return this;
|
||
}
|
||
return this.namespace;
|
||
}
|
||
/**
|
||
* Freeze a processor.
|
||
*
|
||
* Frozen processors are meant to be extended and not to be configured
|
||
* directly.
|
||
*
|
||
* When a processor is frozen it cannot be unfrozen.
|
||
* New processors working the same way can be created by calling the
|
||
* processor.
|
||
*
|
||
* It’s possible to freeze processors explicitly by calling `.freeze()`.
|
||
* Processors freeze automatically when `.parse()`, `.run()`, `.runSync()`,
|
||
* `.stringify()`, `.process()`, or `.processSync()` are called.
|
||
*
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* The current processor.
|
||
*/
|
||
freeze() {
|
||
if (this.frozen) {
|
||
return this;
|
||
}
|
||
const self = (
|
||
/** @type {Processor} */
|
||
/** @type {unknown} */
|
||
this
|
||
);
|
||
while (++this.freezeIndex < this.attachers.length) {
|
||
const [attacher, ...options] = this.attachers[this.freezeIndex];
|
||
if (options[0] === false) {
|
||
continue;
|
||
}
|
||
if (options[0] === true) {
|
||
options[0] = void 0;
|
||
}
|
||
const transformer = attacher.call(self, ...options);
|
||
if (typeof transformer === "function") {
|
||
this.transformers.use(transformer);
|
||
}
|
||
}
|
||
this.frozen = true;
|
||
this.freezeIndex = Number.POSITIVE_INFINITY;
|
||
return this;
|
||
}
|
||
/**
|
||
* Parse text to a syntax tree.
|
||
*
|
||
* > 👉 **Note**: `parse` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `parse` performs the parse phase, not the run phase or other
|
||
* > phases.
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* file to parse (optional); typically `string` or `VFile`; any value
|
||
* accepted as `x` in `new VFile(x)`.
|
||
* @returns {ParseTree extends undefined ? Node : ParseTree}
|
||
* Syntax tree representing `file`.
|
||
*/
|
||
parse(file) {
|
||
this.freeze();
|
||
const realFile = vfile(file);
|
||
const parser = this.parser || this.Parser;
|
||
assertParser("parse", parser);
|
||
return parser(String(realFile), realFile);
|
||
}
|
||
/**
|
||
* Process the given file as configured on the processor.
|
||
*
|
||
* > 👉 **Note**: `process` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `process` performs the parse, run, and stringify phases.
|
||
*
|
||
* @overload
|
||
* @param {Compatible | undefined} file
|
||
* @param {ProcessCallback<VFileWithOutput<CompileResult>>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {Compatible | undefined} [file]
|
||
* @returns {Promise<VFileWithOutput<CompileResult>>}
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* File (optional); typically `string` or `VFile`]; any value accepted as
|
||
* `x` in `new VFile(x)`.
|
||
* @param {ProcessCallback<VFileWithOutput<CompileResult>> | undefined} [done]
|
||
* Callback (optional).
|
||
* @returns {Promise<VFile> | undefined}
|
||
* Nothing if `done` is given.
|
||
* Otherwise a promise, rejected with a fatal error or resolved with the
|
||
* processed file.
|
||
*
|
||
* The parsed, transformed, and compiled value is available at
|
||
* `file.value` (see note).
|
||
*
|
||
* > 👉 **Note**: unified typically compiles by serializing: most
|
||
* > compilers return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@link CompileResultMap `CompileResultMap`}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
process(file, done) {
|
||
const self = this;
|
||
this.freeze();
|
||
assertParser("process", this.parser || this.Parser);
|
||
assertCompiler("process", this.compiler || this.Compiler);
|
||
return done ? executor(void 0, done) : new Promise(executor);
|
||
function executor(resolve, reject) {
|
||
const realFile = vfile(file);
|
||
const parseTree = (
|
||
/** @type {HeadTree extends undefined ? Node : HeadTree} */
|
||
/** @type {unknown} */
|
||
self.parse(realFile)
|
||
);
|
||
self.run(parseTree, realFile, function(error, tree, file2) {
|
||
if (error || !tree || !file2) {
|
||
return realDone(error);
|
||
}
|
||
const compileTree = (
|
||
/** @type {CompileTree extends undefined ? Node : CompileTree} */
|
||
/** @type {unknown} */
|
||
tree
|
||
);
|
||
const compileResult = self.stringify(compileTree, file2);
|
||
if (looksLikeAValue(compileResult)) {
|
||
file2.value = compileResult;
|
||
} else {
|
||
file2.result = compileResult;
|
||
}
|
||
realDone(
|
||
error,
|
||
/** @type {VFileWithOutput<CompileResult>} */
|
||
file2
|
||
);
|
||
});
|
||
function realDone(error, file2) {
|
||
if (error || !file2) {
|
||
reject(error);
|
||
} else if (resolve) {
|
||
resolve(file2);
|
||
} else {
|
||
ok2(done, "`done` is defined if `resolve` is not");
|
||
done(void 0, file2);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Process the given file as configured on the processor.
|
||
*
|
||
* An error is thrown if asynchronous transforms are configured.
|
||
*
|
||
* > 👉 **Note**: `processSync` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `processSync` performs the parse, run, and stringify phases.
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* File (optional); typically `string` or `VFile`; any value accepted as
|
||
* `x` in `new VFile(x)`.
|
||
* @returns {VFileWithOutput<CompileResult>}
|
||
* The processed file.
|
||
*
|
||
* The parsed, transformed, and compiled value is available at
|
||
* `file.value` (see note).
|
||
*
|
||
* > 👉 **Note**: unified typically compiles by serializing: most
|
||
* > compilers return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@link CompileResultMap `CompileResultMap`}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
processSync(file) {
|
||
let complete = false;
|
||
let result;
|
||
this.freeze();
|
||
assertParser("processSync", this.parser || this.Parser);
|
||
assertCompiler("processSync", this.compiler || this.Compiler);
|
||
this.process(file, realDone);
|
||
assertDone("processSync", "process", complete);
|
||
ok2(result, "we either bailed on an error or have a tree");
|
||
return result;
|
||
function realDone(error, file2) {
|
||
complete = true;
|
||
bail(error);
|
||
result = file2;
|
||
}
|
||
}
|
||
/**
|
||
* Run *transformers* on a syntax tree.
|
||
*
|
||
* > 👉 **Note**: `run` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `run` performs the run phase, not other phases.
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {Compatible | undefined} file
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {Compatible | undefined} [file]
|
||
* @returns {Promise<TailTree extends undefined ? Node : TailTree>}
|
||
*
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* Tree to transform and inspect.
|
||
* @param {(
|
||
* RunCallback<TailTree extends undefined ? Node : TailTree> |
|
||
* Compatible
|
||
* )} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} [done]
|
||
* Callback (optional).
|
||
* @returns {Promise<TailTree extends undefined ? Node : TailTree> | undefined}
|
||
* Nothing if `done` is given.
|
||
* Otherwise, a promise rejected with a fatal error or resolved with the
|
||
* transformed tree.
|
||
*/
|
||
run(tree, file, done) {
|
||
assertNode(tree);
|
||
this.freeze();
|
||
const transformers = this.transformers;
|
||
if (!done && typeof file === "function") {
|
||
done = file;
|
||
file = void 0;
|
||
}
|
||
return done ? executor(void 0, done) : new Promise(executor);
|
||
function executor(resolve, reject) {
|
||
ok2(
|
||
typeof file !== "function",
|
||
"`file` can\u2019t be a `done` anymore, we checked"
|
||
);
|
||
const realFile = vfile(file);
|
||
transformers.run(tree, realFile, realDone);
|
||
function realDone(error, outputTree, file2) {
|
||
const resultingTree = (
|
||
/** @type {TailTree extends undefined ? Node : TailTree} */
|
||
outputTree || tree
|
||
);
|
||
if (error) {
|
||
reject(error);
|
||
} else if (resolve) {
|
||
resolve(resultingTree);
|
||
} else {
|
||
ok2(done, "`done` is defined if `resolve` is not");
|
||
done(void 0, resultingTree, file2);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Run *transformers* on a syntax tree.
|
||
*
|
||
* An error is thrown if asynchronous transforms are configured.
|
||
*
|
||
* > 👉 **Note**: `runSync` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `runSync` performs the run phase, not other phases.
|
||
*
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* Tree to transform and inspect.
|
||
* @param {Compatible | undefined} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @returns {TailTree extends undefined ? Node : TailTree}
|
||
* Transformed tree.
|
||
*/
|
||
runSync(tree, file) {
|
||
let complete = false;
|
||
let result;
|
||
this.run(tree, file, realDone);
|
||
assertDone("runSync", "run", complete);
|
||
ok2(result, "we either bailed on an error or have a tree");
|
||
return result;
|
||
function realDone(error, tree2) {
|
||
bail(error);
|
||
result = tree2;
|
||
complete = true;
|
||
}
|
||
}
|
||
/**
|
||
* Compile a syntax tree.
|
||
*
|
||
* > 👉 **Note**: `stringify` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `stringify` performs the stringify phase, not the run phase
|
||
* > or other phases.
|
||
*
|
||
* @param {CompileTree extends undefined ? Node : CompileTree} tree
|
||
* Tree to compile.
|
||
* @param {Compatible | undefined} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @returns {CompileResult extends undefined ? Value : CompileResult}
|
||
* Textual representation of the tree (see note).
|
||
*
|
||
* > 👉 **Note**: unified typically compiles by serializing: most compilers
|
||
* > return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@link CompileResultMap `CompileResultMap`}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
stringify(tree, file) {
|
||
this.freeze();
|
||
const realFile = vfile(file);
|
||
const compiler2 = this.compiler || this.Compiler;
|
||
assertCompiler("stringify", compiler2);
|
||
assertNode(tree);
|
||
return compiler2(tree, realFile);
|
||
}
|
||
/**
|
||
* Configure the processor to use a plugin, a list of usable values, or a
|
||
* preset.
|
||
*
|
||
* If the processor is already using a plugin, the previous plugin
|
||
* configuration is changed based on the options that are passed in.
|
||
* In other words, the plugin is not added a second time.
|
||
*
|
||
* > 👉 **Note**: `use` cannot be called on *frozen* processors.
|
||
* > Call the processor first to create a new unfrozen processor.
|
||
*
|
||
* @example
|
||
* There are many ways to pass plugins to `.use()`.
|
||
* This example gives an overview:
|
||
*
|
||
* ```js
|
||
* import {unified} from 'unified'
|
||
*
|
||
* unified()
|
||
* // Plugin with options:
|
||
* .use(pluginA, {x: true, y: true})
|
||
* // Passing the same plugin again merges configuration (to `{x: true, y: false, z: true}`):
|
||
* .use(pluginA, {y: false, z: true})
|
||
* // Plugins:
|
||
* .use([pluginB, pluginC])
|
||
* // Two plugins, the second with options:
|
||
* .use([pluginD, [pluginE, {}]])
|
||
* // Preset with plugins and settings:
|
||
* .use({plugins: [pluginF, [pluginG, {}]], settings: {position: false}})
|
||
* // Settings only:
|
||
* .use({settings: {position: false}})
|
||
* ```
|
||
*
|
||
* @template {Array<unknown>} [Parameters=[]]
|
||
* @template {Node | string | undefined} [Input=undefined]
|
||
* @template [Output=Input]
|
||
*
|
||
* @overload
|
||
* @param {Preset | null | undefined} [preset]
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {PluggableList} list
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {Plugin<Parameters, Input, Output>} plugin
|
||
* @param {...(Parameters | [boolean])} parameters
|
||
* @returns {UsePlugin<ParseTree, HeadTree, TailTree, CompileTree, CompileResult, Input, Output>}
|
||
*
|
||
* @param {PluggableList | Plugin | Preset | null | undefined} value
|
||
* Usable value.
|
||
* @param {...unknown} parameters
|
||
* Parameters, when a plugin is given as a usable value.
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* Current processor.
|
||
*/
|
||
use(value, ...parameters) {
|
||
const attachers = this.attachers;
|
||
const namespace = this.namespace;
|
||
assertUnfrozen("use", this.frozen);
|
||
if (value === null || value === void 0) {
|
||
} else if (typeof value === "function") {
|
||
addPlugin(value, parameters);
|
||
} else if (typeof value === "object") {
|
||
if (Array.isArray(value)) {
|
||
addList(value);
|
||
} else {
|
||
addPreset(value);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected usable value, not `" + value + "`");
|
||
}
|
||
return this;
|
||
function add(value2) {
|
||
if (typeof value2 === "function") {
|
||
addPlugin(value2, []);
|
||
} else if (typeof value2 === "object") {
|
||
if (Array.isArray(value2)) {
|
||
const [plugin, ...parameters2] = (
|
||
/** @type {PluginTuple<Array<unknown>>} */
|
||
value2
|
||
);
|
||
addPlugin(plugin, parameters2);
|
||
} else {
|
||
addPreset(value2);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected usable value, not `" + value2 + "`");
|
||
}
|
||
}
|
||
function addPreset(result) {
|
||
if (!("plugins" in result) && !("settings" in result)) {
|
||
throw new Error(
|
||
"Expected usable value but received an empty preset, which is probably a mistake: presets typically come with `plugins` and sometimes with `settings`, but this has neither"
|
||
);
|
||
}
|
||
addList(result.plugins);
|
||
if (result.settings) {
|
||
namespace.settings = (0, import_extend.default)(true, namespace.settings, result.settings);
|
||
}
|
||
}
|
||
function addList(plugins) {
|
||
let index2 = -1;
|
||
if (plugins === null || plugins === void 0) {
|
||
} else if (Array.isArray(plugins)) {
|
||
while (++index2 < plugins.length) {
|
||
const thing = plugins[index2];
|
||
add(thing);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected a list of plugins, not `" + plugins + "`");
|
||
}
|
||
}
|
||
function addPlugin(plugin, parameters2) {
|
||
let index2 = -1;
|
||
let entryIndex = -1;
|
||
while (++index2 < attachers.length) {
|
||
if (attachers[index2][0] === plugin) {
|
||
entryIndex = index2;
|
||
break;
|
||
}
|
||
}
|
||
if (entryIndex === -1) {
|
||
attachers.push([plugin, ...parameters2]);
|
||
} else if (parameters2.length > 0) {
|
||
let [primary, ...rest] = parameters2;
|
||
const currentPrimary = attachers[entryIndex][1];
|
||
if (isPlainObject(currentPrimary) && isPlainObject(primary)) {
|
||
primary = (0, import_extend.default)(true, currentPrimary, primary);
|
||
}
|
||
attachers[entryIndex] = [plugin, primary, ...rest];
|
||
}
|
||
}
|
||
}
|
||
};
|
||
var unified = new Processor().freeze();
|
||
function assertParser(name, value) {
|
||
if (typeof value !== "function") {
|
||
throw new TypeError("Cannot `" + name + "` without `parser`");
|
||
}
|
||
}
|
||
function assertCompiler(name, value) {
|
||
if (typeof value !== "function") {
|
||
throw new TypeError("Cannot `" + name + "` without `compiler`");
|
||
}
|
||
}
|
||
function assertUnfrozen(name, frozen) {
|
||
if (frozen) {
|
||
throw new Error(
|
||
"Cannot call `" + name + "` on a frozen processor.\nCreate a new processor first, by calling it: use `processor()` instead of `processor`."
|
||
);
|
||
}
|
||
}
|
||
function assertNode(node2) {
|
||
if (!isPlainObject(node2) || typeof node2.type !== "string") {
|
||
throw new TypeError("Expected node, got `" + node2 + "`");
|
||
}
|
||
}
|
||
function assertDone(name, asyncName, complete) {
|
||
if (!complete) {
|
||
throw new Error(
|
||
"`" + name + "` finished async. Use `" + asyncName + "` instead"
|
||
);
|
||
}
|
||
}
|
||
function vfile(value) {
|
||
return looksLikeAVFile(value) ? value : new VFile(value);
|
||
}
|
||
function looksLikeAVFile(value) {
|
||
return Boolean(
|
||
value && typeof value === "object" && "message" in value && "messages" in value
|
||
);
|
||
}
|
||
function looksLikeAValue(value) {
|
||
return typeof value === "string" || isUint8Array2(value);
|
||
}
|
||
function isUint8Array2(value) {
|
||
return Boolean(
|
||
value && typeof value === "object" && "byteLength" in value && "byteOffset" in value
|
||
);
|
||
}
|
||
|
||
// node_modules/remark/index.js
|
||
var remark1501 = unified().use(remarkParse).use(remarkStringify).freeze();
|
||
// Annotate the CommonJS export names for ESM import in node:
|
||
0 && (module.exports = {
|
||
remark: remark1501
|
||
});
|