16260 lines
540 KiB
JavaScript
16260 lines
540 KiB
JavaScript
/*
|
||
THIS IS A GENERATED/BUNDLED FILE BY ESBUILD
|
||
if you want to view the source, please visit the github repository of this plugin
|
||
*/
|
||
|
||
var __create = Object.create;
|
||
var __defProp = Object.defineProperty;
|
||
var __defProps = Object.defineProperties;
|
||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
|
||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
||
var __getProtoOf = Object.getPrototypeOf;
|
||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||
var __spreadValues = (a, b) => {
|
||
for (var prop in b || (b = {}))
|
||
if (__hasOwnProp.call(b, prop))
|
||
__defNormalProp(a, prop, b[prop]);
|
||
if (__getOwnPropSymbols)
|
||
for (var prop of __getOwnPropSymbols(b)) {
|
||
if (__propIsEnum.call(b, prop))
|
||
__defNormalProp(a, prop, b[prop]);
|
||
}
|
||
return a;
|
||
};
|
||
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
|
||
var __objRest = (source, exclude) => {
|
||
var target = {};
|
||
for (var prop in source)
|
||
if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
|
||
target[prop] = source[prop];
|
||
if (source != null && __getOwnPropSymbols)
|
||
for (var prop of __getOwnPropSymbols(source)) {
|
||
if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
|
||
target[prop] = source[prop];
|
||
}
|
||
return target;
|
||
};
|
||
var __commonJS = (cb, mod) => function __require() {
|
||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
||
};
|
||
var __export = (target, all3) => {
|
||
for (var name in all3)
|
||
__defProp(target, name, { get: all3[name], enumerable: true });
|
||
};
|
||
var __copyProps = (to, from, except, desc) => {
|
||
if (from && typeof from === "object" || typeof from === "function") {
|
||
for (let key of __getOwnPropNames(from))
|
||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||
}
|
||
return to;
|
||
};
|
||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||
// If the importer is in node compatibility mode or this is not an ESM
|
||
// file that has been converted to a CommonJS file using a Babel-
|
||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||
mod
|
||
));
|
||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
||
var __async = (__this, __arguments, generator) => {
|
||
return new Promise((resolve, reject) => {
|
||
var fulfilled = (value) => {
|
||
try {
|
||
step(generator.next(value));
|
||
} catch (e) {
|
||
reject(e);
|
||
}
|
||
};
|
||
var rejected = (value) => {
|
||
try {
|
||
step(generator.throw(value));
|
||
} catch (e) {
|
||
reject(e);
|
||
}
|
||
};
|
||
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
|
||
step((generator = generator.apply(__this, __arguments)).next());
|
||
});
|
||
};
|
||
|
||
// node_modules/kind-of/index.js
|
||
var require_kind_of = __commonJS({
|
||
"node_modules/kind-of/index.js"(exports2, module2) {
|
||
var toString3 = Object.prototype.toString;
|
||
module2.exports = function kindOf(val) {
|
||
if (val === void 0) return "undefined";
|
||
if (val === null) return "null";
|
||
var type = typeof val;
|
||
if (type === "boolean") return "boolean";
|
||
if (type === "string") return "string";
|
||
if (type === "number") return "number";
|
||
if (type === "symbol") return "symbol";
|
||
if (type === "function") {
|
||
return isGeneratorFn(val) ? "generatorfunction" : "function";
|
||
}
|
||
if (isArray(val)) return "array";
|
||
if (isBuffer(val)) return "buffer";
|
||
if (isArguments(val)) return "arguments";
|
||
if (isDate(val)) return "date";
|
||
if (isError(val)) return "error";
|
||
if (isRegexp(val)) return "regexp";
|
||
switch (ctorName(val)) {
|
||
case "Symbol":
|
||
return "symbol";
|
||
case "Promise":
|
||
return "promise";
|
||
// Set, Map, WeakSet, WeakMap
|
||
case "WeakMap":
|
||
return "weakmap";
|
||
case "WeakSet":
|
||
return "weakset";
|
||
case "Map":
|
||
return "map";
|
||
case "Set":
|
||
return "set";
|
||
// 8-bit typed arrays
|
||
case "Int8Array":
|
||
return "int8array";
|
||
case "Uint8Array":
|
||
return "uint8array";
|
||
case "Uint8ClampedArray":
|
||
return "uint8clampedarray";
|
||
// 16-bit typed arrays
|
||
case "Int16Array":
|
||
return "int16array";
|
||
case "Uint16Array":
|
||
return "uint16array";
|
||
// 32-bit typed arrays
|
||
case "Int32Array":
|
||
return "int32array";
|
||
case "Uint32Array":
|
||
return "uint32array";
|
||
case "Float32Array":
|
||
return "float32array";
|
||
case "Float64Array":
|
||
return "float64array";
|
||
}
|
||
if (isGeneratorObj(val)) {
|
||
return "generator";
|
||
}
|
||
type = toString3.call(val);
|
||
switch (type) {
|
||
case "[object Object]":
|
||
return "object";
|
||
// iterators
|
||
case "[object Map Iterator]":
|
||
return "mapiterator";
|
||
case "[object Set Iterator]":
|
||
return "setiterator";
|
||
case "[object String Iterator]":
|
||
return "stringiterator";
|
||
case "[object Array Iterator]":
|
||
return "arrayiterator";
|
||
}
|
||
return type.slice(8, -1).toLowerCase().replace(/\s/g, "");
|
||
};
|
||
function ctorName(val) {
|
||
return typeof val.constructor === "function" ? val.constructor.name : null;
|
||
}
|
||
function isArray(val) {
|
||
if (Array.isArray) return Array.isArray(val);
|
||
return val instanceof Array;
|
||
}
|
||
function isError(val) {
|
||
return val instanceof Error || typeof val.message === "string" && val.constructor && typeof val.constructor.stackTraceLimit === "number";
|
||
}
|
||
function isDate(val) {
|
||
if (val instanceof Date) return true;
|
||
return typeof val.toDateString === "function" && typeof val.getDate === "function" && typeof val.setDate === "function";
|
||
}
|
||
function isRegexp(val) {
|
||
if (val instanceof RegExp) return true;
|
||
return typeof val.flags === "string" && typeof val.ignoreCase === "boolean" && typeof val.multiline === "boolean" && typeof val.global === "boolean";
|
||
}
|
||
function isGeneratorFn(name, val) {
|
||
return ctorName(name) === "GeneratorFunction";
|
||
}
|
||
function isGeneratorObj(val) {
|
||
return typeof val.throw === "function" && typeof val.return === "function" && typeof val.next === "function";
|
||
}
|
||
function isArguments(val) {
|
||
try {
|
||
if (typeof val.length === "number" && typeof val.callee === "function") {
|
||
return true;
|
||
}
|
||
} catch (err) {
|
||
if (err.message.indexOf("callee") !== -1) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
function isBuffer(val) {
|
||
if (val.constructor && typeof val.constructor.isBuffer === "function") {
|
||
return val.constructor.isBuffer(val);
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
});
|
||
|
||
// node_modules/is-extendable/index.js
|
||
var require_is_extendable = __commonJS({
|
||
"node_modules/is-extendable/index.js"(exports2, module2) {
|
||
"use strict";
|
||
module2.exports = function isExtendable(val) {
|
||
return typeof val !== "undefined" && val !== null && (typeof val === "object" || typeof val === "function");
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/extend-shallow/index.js
|
||
var require_extend_shallow = __commonJS({
|
||
"node_modules/extend-shallow/index.js"(exports2, module2) {
|
||
"use strict";
|
||
var isObject = require_is_extendable();
|
||
module2.exports = function extend2(o) {
|
||
if (!isObject(o)) {
|
||
o = {};
|
||
}
|
||
var len = arguments.length;
|
||
for (var i = 1; i < len; i++) {
|
||
var obj = arguments[i];
|
||
if (isObject(obj)) {
|
||
assign(o, obj);
|
||
}
|
||
}
|
||
return o;
|
||
};
|
||
function assign(a, b) {
|
||
for (var key in b) {
|
||
if (hasOwn(b, key)) {
|
||
a[key] = b[key];
|
||
}
|
||
}
|
||
}
|
||
function hasOwn(obj, key) {
|
||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||
}
|
||
}
|
||
});
|
||
|
||
// node_modules/section-matter/index.js
|
||
var require_section_matter = __commonJS({
|
||
"node_modules/section-matter/index.js"(exports2, module2) {
|
||
"use strict";
|
||
var typeOf2 = require_kind_of();
|
||
var extend2 = require_extend_shallow();
|
||
module2.exports = function(input, options2) {
|
||
if (typeof options2 === "function") {
|
||
options2 = { parse: options2 };
|
||
}
|
||
var file = toObject(input);
|
||
var defaults = { section_delimiter: "---", parse: identity };
|
||
var opts = extend2({}, defaults, options2);
|
||
var delim = opts.section_delimiter;
|
||
var lines = file.content.split(/\r?\n/);
|
||
var sections = null;
|
||
var section = createSection();
|
||
var content3 = [];
|
||
var stack = [];
|
||
function initSections(val) {
|
||
file.content = val;
|
||
sections = [];
|
||
content3 = [];
|
||
}
|
||
function closeSection(val) {
|
||
if (stack.length) {
|
||
section.key = getKey(stack[0], delim);
|
||
section.content = val;
|
||
opts.parse(section, sections);
|
||
sections.push(section);
|
||
section = createSection();
|
||
content3 = [];
|
||
stack = [];
|
||
}
|
||
}
|
||
for (var i = 0; i < lines.length; i++) {
|
||
var line = lines[i];
|
||
var len = stack.length;
|
||
var ln = line.trim();
|
||
if (isDelimiter(ln, delim)) {
|
||
if (ln.length === 3 && i !== 0) {
|
||
if (len === 0 || len === 2) {
|
||
content3.push(line);
|
||
continue;
|
||
}
|
||
stack.push(ln);
|
||
section.data = content3.join("\n");
|
||
content3 = [];
|
||
continue;
|
||
}
|
||
if (sections === null) {
|
||
initSections(content3.join("\n"));
|
||
}
|
||
if (len === 2) {
|
||
closeSection(content3.join("\n"));
|
||
}
|
||
stack.push(ln);
|
||
continue;
|
||
}
|
||
content3.push(line);
|
||
}
|
||
if (sections === null) {
|
||
initSections(content3.join("\n"));
|
||
} else {
|
||
closeSection(content3.join("\n"));
|
||
}
|
||
file.sections = sections;
|
||
return file;
|
||
};
|
||
function isDelimiter(line, delim) {
|
||
if (line.slice(0, delim.length) !== delim) {
|
||
return false;
|
||
}
|
||
if (line.charAt(delim.length + 1) === delim.slice(-1)) {
|
||
return false;
|
||
}
|
||
return true;
|
||
}
|
||
function toObject(input) {
|
||
if (typeOf2(input) !== "object") {
|
||
input = { content: input };
|
||
}
|
||
if (typeof input.content !== "string" && !isBuffer(input.content)) {
|
||
throw new TypeError("expected a buffer or string");
|
||
}
|
||
input.content = input.content.toString();
|
||
input.sections = [];
|
||
return input;
|
||
}
|
||
function getKey(val, delim) {
|
||
return val ? val.slice(delim.length).trim() : "";
|
||
}
|
||
function createSection() {
|
||
return { key: "", data: "", content: "" };
|
||
}
|
||
function identity(val) {
|
||
return val;
|
||
}
|
||
function isBuffer(val) {
|
||
if (val && val.constructor && typeof val.constructor.isBuffer === "function") {
|
||
return val.constructor.isBuffer(val);
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/common.js
|
||
var require_common = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/common.js"(exports2, module2) {
|
||
"use strict";
|
||
function isNothing(subject) {
|
||
return typeof subject === "undefined" || subject === null;
|
||
}
|
||
function isObject(subject) {
|
||
return typeof subject === "object" && subject !== null;
|
||
}
|
||
function toArray(sequence) {
|
||
if (Array.isArray(sequence)) return sequence;
|
||
else if (isNothing(sequence)) return [];
|
||
return [sequence];
|
||
}
|
||
function extend2(target, source) {
|
||
var index2, length, key, sourceKeys;
|
||
if (source) {
|
||
sourceKeys = Object.keys(source);
|
||
for (index2 = 0, length = sourceKeys.length; index2 < length; index2 += 1) {
|
||
key = sourceKeys[index2];
|
||
target[key] = source[key];
|
||
}
|
||
}
|
||
return target;
|
||
}
|
||
function repeat(string3, count) {
|
||
var result = "", cycle;
|
||
for (cycle = 0; cycle < count; cycle += 1) {
|
||
result += string3;
|
||
}
|
||
return result;
|
||
}
|
||
function isNegativeZero(number) {
|
||
return number === 0 && Number.NEGATIVE_INFINITY === 1 / number;
|
||
}
|
||
module2.exports.isNothing = isNothing;
|
||
module2.exports.isObject = isObject;
|
||
module2.exports.toArray = toArray;
|
||
module2.exports.repeat = repeat;
|
||
module2.exports.isNegativeZero = isNegativeZero;
|
||
module2.exports.extend = extend2;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/exception.js
|
||
var require_exception = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/exception.js"(exports2, module2) {
|
||
"use strict";
|
||
function YAMLException(reason, mark) {
|
||
Error.call(this);
|
||
this.name = "YAMLException";
|
||
this.reason = reason;
|
||
this.mark = mark;
|
||
this.message = (this.reason || "(unknown reason)") + (this.mark ? " " + this.mark.toString() : "");
|
||
if (Error.captureStackTrace) {
|
||
Error.captureStackTrace(this, this.constructor);
|
||
} else {
|
||
this.stack = new Error().stack || "";
|
||
}
|
||
}
|
||
YAMLException.prototype = Object.create(Error.prototype);
|
||
YAMLException.prototype.constructor = YAMLException;
|
||
YAMLException.prototype.toString = function toString3(compact) {
|
||
var result = this.name + ": ";
|
||
result += this.reason || "(unknown reason)";
|
||
if (!compact && this.mark) {
|
||
result += " " + this.mark.toString();
|
||
}
|
||
return result;
|
||
};
|
||
module2.exports = YAMLException;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/mark.js
|
||
var require_mark = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/mark.js"(exports2, module2) {
|
||
"use strict";
|
||
var common = require_common();
|
||
function Mark(name, buffer, position2, line, column) {
|
||
this.name = name;
|
||
this.buffer = buffer;
|
||
this.position = position2;
|
||
this.line = line;
|
||
this.column = column;
|
||
}
|
||
Mark.prototype.getSnippet = function getSnippet(indent, maxLength) {
|
||
var head, start, tail, end, snippet;
|
||
if (!this.buffer) return null;
|
||
indent = indent || 4;
|
||
maxLength = maxLength || 75;
|
||
head = "";
|
||
start = this.position;
|
||
while (start > 0 && "\0\r\n\x85\u2028\u2029".indexOf(this.buffer.charAt(start - 1)) === -1) {
|
||
start -= 1;
|
||
if (this.position - start > maxLength / 2 - 1) {
|
||
head = " ... ";
|
||
start += 5;
|
||
break;
|
||
}
|
||
}
|
||
tail = "";
|
||
end = this.position;
|
||
while (end < this.buffer.length && "\0\r\n\x85\u2028\u2029".indexOf(this.buffer.charAt(end)) === -1) {
|
||
end += 1;
|
||
if (end - this.position > maxLength / 2 - 1) {
|
||
tail = " ... ";
|
||
end -= 5;
|
||
break;
|
||
}
|
||
}
|
||
snippet = this.buffer.slice(start, end);
|
||
return common.repeat(" ", indent) + head + snippet + tail + "\n" + common.repeat(" ", indent + this.position - start + head.length) + "^";
|
||
};
|
||
Mark.prototype.toString = function toString3(compact) {
|
||
var snippet, where = "";
|
||
if (this.name) {
|
||
where += 'in "' + this.name + '" ';
|
||
}
|
||
where += "at line " + (this.line + 1) + ", column " + (this.column + 1);
|
||
if (!compact) {
|
||
snippet = this.getSnippet();
|
||
if (snippet) {
|
||
where += ":\n" + snippet;
|
||
}
|
||
}
|
||
return where;
|
||
};
|
||
module2.exports = Mark;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type.js
|
||
var require_type = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type.js"(exports2, module2) {
|
||
"use strict";
|
||
var YAMLException = require_exception();
|
||
var TYPE_CONSTRUCTOR_OPTIONS = [
|
||
"kind",
|
||
"resolve",
|
||
"construct",
|
||
"instanceOf",
|
||
"predicate",
|
||
"represent",
|
||
"defaultStyle",
|
||
"styleAliases"
|
||
];
|
||
var YAML_NODE_KINDS = [
|
||
"scalar",
|
||
"sequence",
|
||
"mapping"
|
||
];
|
||
function compileStyleAliases(map4) {
|
||
var result = {};
|
||
if (map4 !== null) {
|
||
Object.keys(map4).forEach(function(style) {
|
||
map4[style].forEach(function(alias) {
|
||
result[String(alias)] = style;
|
||
});
|
||
});
|
||
}
|
||
return result;
|
||
}
|
||
function Type(tag, options2) {
|
||
options2 = options2 || {};
|
||
Object.keys(options2).forEach(function(name) {
|
||
if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) {
|
||
throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.');
|
||
}
|
||
});
|
||
this.tag = tag;
|
||
this.kind = options2["kind"] || null;
|
||
this.resolve = options2["resolve"] || function() {
|
||
return true;
|
||
};
|
||
this.construct = options2["construct"] || function(data) {
|
||
return data;
|
||
};
|
||
this.instanceOf = options2["instanceOf"] || null;
|
||
this.predicate = options2["predicate"] || null;
|
||
this.represent = options2["represent"] || null;
|
||
this.defaultStyle = options2["defaultStyle"] || null;
|
||
this.styleAliases = compileStyleAliases(options2["styleAliases"] || null);
|
||
if (YAML_NODE_KINDS.indexOf(this.kind) === -1) {
|
||
throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.');
|
||
}
|
||
}
|
||
module2.exports = Type;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema.js
|
||
var require_schema = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema.js"(exports2, module2) {
|
||
"use strict";
|
||
var common = require_common();
|
||
var YAMLException = require_exception();
|
||
var Type = require_type();
|
||
function compileList(schema, name, result) {
|
||
var exclude = [];
|
||
schema.include.forEach(function(includedSchema) {
|
||
result = compileList(includedSchema, name, result);
|
||
});
|
||
schema[name].forEach(function(currentType) {
|
||
result.forEach(function(previousType, previousIndex) {
|
||
if (previousType.tag === currentType.tag && previousType.kind === currentType.kind) {
|
||
exclude.push(previousIndex);
|
||
}
|
||
});
|
||
result.push(currentType);
|
||
});
|
||
return result.filter(function(type, index2) {
|
||
return exclude.indexOf(index2) === -1;
|
||
});
|
||
}
|
||
function compileMap() {
|
||
var result = {
|
||
scalar: {},
|
||
sequence: {},
|
||
mapping: {},
|
||
fallback: {}
|
||
}, index2, length;
|
||
function collectType(type) {
|
||
result[type.kind][type.tag] = result["fallback"][type.tag] = type;
|
||
}
|
||
for (index2 = 0, length = arguments.length; index2 < length; index2 += 1) {
|
||
arguments[index2].forEach(collectType);
|
||
}
|
||
return result;
|
||
}
|
||
function Schema(definition3) {
|
||
this.include = definition3.include || [];
|
||
this.implicit = definition3.implicit || [];
|
||
this.explicit = definition3.explicit || [];
|
||
this.implicit.forEach(function(type) {
|
||
if (type.loadKind && type.loadKind !== "scalar") {
|
||
throw new YAMLException("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.");
|
||
}
|
||
});
|
||
this.compiledImplicit = compileList(this, "implicit", []);
|
||
this.compiledExplicit = compileList(this, "explicit", []);
|
||
this.compiledTypeMap = compileMap(this.compiledImplicit, this.compiledExplicit);
|
||
}
|
||
Schema.DEFAULT = null;
|
||
Schema.create = function createSchema() {
|
||
var schemas, types;
|
||
switch (arguments.length) {
|
||
case 1:
|
||
schemas = Schema.DEFAULT;
|
||
types = arguments[0];
|
||
break;
|
||
case 2:
|
||
schemas = arguments[0];
|
||
types = arguments[1];
|
||
break;
|
||
default:
|
||
throw new YAMLException("Wrong number of arguments for Schema.create function");
|
||
}
|
||
schemas = common.toArray(schemas);
|
||
types = common.toArray(types);
|
||
if (!schemas.every(function(schema) {
|
||
return schema instanceof Schema;
|
||
})) {
|
||
throw new YAMLException("Specified list of super schemas (or a single Schema object) contains a non-Schema object.");
|
||
}
|
||
if (!types.every(function(type) {
|
||
return type instanceof Type;
|
||
})) {
|
||
throw new YAMLException("Specified list of YAML types (or a single Type object) contains a non-Type object.");
|
||
}
|
||
return new Schema({
|
||
include: schemas,
|
||
explicit: types
|
||
});
|
||
};
|
||
module2.exports = Schema;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/str.js
|
||
var require_str = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/str.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
module2.exports = new Type("tag:yaml.org,2002:str", {
|
||
kind: "scalar",
|
||
construct: function(data) {
|
||
return data !== null ? data : "";
|
||
}
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/seq.js
|
||
var require_seq = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/seq.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
module2.exports = new Type("tag:yaml.org,2002:seq", {
|
||
kind: "sequence",
|
||
construct: function(data) {
|
||
return data !== null ? data : [];
|
||
}
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/map.js
|
||
var require_map = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/map.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
module2.exports = new Type("tag:yaml.org,2002:map", {
|
||
kind: "mapping",
|
||
construct: function(data) {
|
||
return data !== null ? data : {};
|
||
}
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/failsafe.js
|
||
var require_failsafe = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/failsafe.js"(exports2, module2) {
|
||
"use strict";
|
||
var Schema = require_schema();
|
||
module2.exports = new Schema({
|
||
explicit: [
|
||
require_str(),
|
||
require_seq(),
|
||
require_map()
|
||
]
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/null.js
|
||
var require_null = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/null.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
function resolveYamlNull(data) {
|
||
if (data === null) return true;
|
||
var max = data.length;
|
||
return max === 1 && data === "~" || max === 4 && (data === "null" || data === "Null" || data === "NULL");
|
||
}
|
||
function constructYamlNull() {
|
||
return null;
|
||
}
|
||
function isNull(object) {
|
||
return object === null;
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:null", {
|
||
kind: "scalar",
|
||
resolve: resolveYamlNull,
|
||
construct: constructYamlNull,
|
||
predicate: isNull,
|
||
represent: {
|
||
canonical: function() {
|
||
return "~";
|
||
},
|
||
lowercase: function() {
|
||
return "null";
|
||
},
|
||
uppercase: function() {
|
||
return "NULL";
|
||
},
|
||
camelcase: function() {
|
||
return "Null";
|
||
}
|
||
},
|
||
defaultStyle: "lowercase"
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/bool.js
|
||
var require_bool = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/bool.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
function resolveYamlBoolean(data) {
|
||
if (data === null) return false;
|
||
var max = data.length;
|
||
return max === 4 && (data === "true" || data === "True" || data === "TRUE") || max === 5 && (data === "false" || data === "False" || data === "FALSE");
|
||
}
|
||
function constructYamlBoolean(data) {
|
||
return data === "true" || data === "True" || data === "TRUE";
|
||
}
|
||
function isBoolean(object) {
|
||
return Object.prototype.toString.call(object) === "[object Boolean]";
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:bool", {
|
||
kind: "scalar",
|
||
resolve: resolveYamlBoolean,
|
||
construct: constructYamlBoolean,
|
||
predicate: isBoolean,
|
||
represent: {
|
||
lowercase: function(object) {
|
||
return object ? "true" : "false";
|
||
},
|
||
uppercase: function(object) {
|
||
return object ? "TRUE" : "FALSE";
|
||
},
|
||
camelcase: function(object) {
|
||
return object ? "True" : "False";
|
||
}
|
||
},
|
||
defaultStyle: "lowercase"
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/int.js
|
||
var require_int = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/int.js"(exports2, module2) {
|
||
"use strict";
|
||
var common = require_common();
|
||
var Type = require_type();
|
||
function isHexCode(c) {
|
||
return 48 <= c && c <= 57 || 65 <= c && c <= 70 || 97 <= c && c <= 102;
|
||
}
|
||
function isOctCode(c) {
|
||
return 48 <= c && c <= 55;
|
||
}
|
||
function isDecCode(c) {
|
||
return 48 <= c && c <= 57;
|
||
}
|
||
function resolveYamlInteger(data) {
|
||
if (data === null) return false;
|
||
var max = data.length, index2 = 0, hasDigits = false, ch;
|
||
if (!max) return false;
|
||
ch = data[index2];
|
||
if (ch === "-" || ch === "+") {
|
||
ch = data[++index2];
|
||
}
|
||
if (ch === "0") {
|
||
if (index2 + 1 === max) return true;
|
||
ch = data[++index2];
|
||
if (ch === "b") {
|
||
index2++;
|
||
for (; index2 < max; index2++) {
|
||
ch = data[index2];
|
||
if (ch === "_") continue;
|
||
if (ch !== "0" && ch !== "1") return false;
|
||
hasDigits = true;
|
||
}
|
||
return hasDigits && ch !== "_";
|
||
}
|
||
if (ch === "x") {
|
||
index2++;
|
||
for (; index2 < max; index2++) {
|
||
ch = data[index2];
|
||
if (ch === "_") continue;
|
||
if (!isHexCode(data.charCodeAt(index2))) return false;
|
||
hasDigits = true;
|
||
}
|
||
return hasDigits && ch !== "_";
|
||
}
|
||
for (; index2 < max; index2++) {
|
||
ch = data[index2];
|
||
if (ch === "_") continue;
|
||
if (!isOctCode(data.charCodeAt(index2))) return false;
|
||
hasDigits = true;
|
||
}
|
||
return hasDigits && ch !== "_";
|
||
}
|
||
if (ch === "_") return false;
|
||
for (; index2 < max; index2++) {
|
||
ch = data[index2];
|
||
if (ch === "_") continue;
|
||
if (ch === ":") break;
|
||
if (!isDecCode(data.charCodeAt(index2))) {
|
||
return false;
|
||
}
|
||
hasDigits = true;
|
||
}
|
||
if (!hasDigits || ch === "_") return false;
|
||
if (ch !== ":") return true;
|
||
return /^(:[0-5]?[0-9])+$/.test(data.slice(index2));
|
||
}
|
||
function constructYamlInteger(data) {
|
||
var value = data, sign = 1, ch, base, digits = [];
|
||
if (value.indexOf("_") !== -1) {
|
||
value = value.replace(/_/g, "");
|
||
}
|
||
ch = value[0];
|
||
if (ch === "-" || ch === "+") {
|
||
if (ch === "-") sign = -1;
|
||
value = value.slice(1);
|
||
ch = value[0];
|
||
}
|
||
if (value === "0") return 0;
|
||
if (ch === "0") {
|
||
if (value[1] === "b") return sign * parseInt(value.slice(2), 2);
|
||
if (value[1] === "x") return sign * parseInt(value, 16);
|
||
return sign * parseInt(value, 8);
|
||
}
|
||
if (value.indexOf(":") !== -1) {
|
||
value.split(":").forEach(function(v) {
|
||
digits.unshift(parseInt(v, 10));
|
||
});
|
||
value = 0;
|
||
base = 1;
|
||
digits.forEach(function(d) {
|
||
value += d * base;
|
||
base *= 60;
|
||
});
|
||
return sign * value;
|
||
}
|
||
return sign * parseInt(value, 10);
|
||
}
|
||
function isInteger(object) {
|
||
return Object.prototype.toString.call(object) === "[object Number]" && (object % 1 === 0 && !common.isNegativeZero(object));
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:int", {
|
||
kind: "scalar",
|
||
resolve: resolveYamlInteger,
|
||
construct: constructYamlInteger,
|
||
predicate: isInteger,
|
||
represent: {
|
||
binary: function(obj) {
|
||
return obj >= 0 ? "0b" + obj.toString(2) : "-0b" + obj.toString(2).slice(1);
|
||
},
|
||
octal: function(obj) {
|
||
return obj >= 0 ? "0" + obj.toString(8) : "-0" + obj.toString(8).slice(1);
|
||
},
|
||
decimal: function(obj) {
|
||
return obj.toString(10);
|
||
},
|
||
/* eslint-disable max-len */
|
||
hexadecimal: function(obj) {
|
||
return obj >= 0 ? "0x" + obj.toString(16).toUpperCase() : "-0x" + obj.toString(16).toUpperCase().slice(1);
|
||
}
|
||
},
|
||
defaultStyle: "decimal",
|
||
styleAliases: {
|
||
binary: [2, "bin"],
|
||
octal: [8, "oct"],
|
||
decimal: [10, "dec"],
|
||
hexadecimal: [16, "hex"]
|
||
}
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/float.js
|
||
var require_float = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/float.js"(exports2, module2) {
|
||
"use strict";
|
||
var common = require_common();
|
||
var Type = require_type();
|
||
var YAML_FLOAT_PATTERN = new RegExp(
|
||
// 2.5e4, 2.5 and integers
|
||
"^(?:[-+]?(?:0|[1-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*|[-+]?\\.(?:inf|Inf|INF)|\\.(?:nan|NaN|NAN))$"
|
||
);
|
||
function resolveYamlFloat(data) {
|
||
if (data === null) return false;
|
||
if (!YAML_FLOAT_PATTERN.test(data) || // Quick hack to not allow integers end with `_`
|
||
// Probably should update regexp & check speed
|
||
data[data.length - 1] === "_") {
|
||
return false;
|
||
}
|
||
return true;
|
||
}
|
||
function constructYamlFloat(data) {
|
||
var value, sign, base, digits;
|
||
value = data.replace(/_/g, "").toLowerCase();
|
||
sign = value[0] === "-" ? -1 : 1;
|
||
digits = [];
|
||
if ("+-".indexOf(value[0]) >= 0) {
|
||
value = value.slice(1);
|
||
}
|
||
if (value === ".inf") {
|
||
return sign === 1 ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY;
|
||
} else if (value === ".nan") {
|
||
return NaN;
|
||
} else if (value.indexOf(":") >= 0) {
|
||
value.split(":").forEach(function(v) {
|
||
digits.unshift(parseFloat(v, 10));
|
||
});
|
||
value = 0;
|
||
base = 1;
|
||
digits.forEach(function(d) {
|
||
value += d * base;
|
||
base *= 60;
|
||
});
|
||
return sign * value;
|
||
}
|
||
return sign * parseFloat(value, 10);
|
||
}
|
||
var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/;
|
||
function representYamlFloat(object, style) {
|
||
var res;
|
||
if (isNaN(object)) {
|
||
switch (style) {
|
||
case "lowercase":
|
||
return ".nan";
|
||
case "uppercase":
|
||
return ".NAN";
|
||
case "camelcase":
|
||
return ".NaN";
|
||
}
|
||
} else if (Number.POSITIVE_INFINITY === object) {
|
||
switch (style) {
|
||
case "lowercase":
|
||
return ".inf";
|
||
case "uppercase":
|
||
return ".INF";
|
||
case "camelcase":
|
||
return ".Inf";
|
||
}
|
||
} else if (Number.NEGATIVE_INFINITY === object) {
|
||
switch (style) {
|
||
case "lowercase":
|
||
return "-.inf";
|
||
case "uppercase":
|
||
return "-.INF";
|
||
case "camelcase":
|
||
return "-.Inf";
|
||
}
|
||
} else if (common.isNegativeZero(object)) {
|
||
return "-0.0";
|
||
}
|
||
res = object.toString(10);
|
||
return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace("e", ".e") : res;
|
||
}
|
||
function isFloat(object) {
|
||
return Object.prototype.toString.call(object) === "[object Number]" && (object % 1 !== 0 || common.isNegativeZero(object));
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:float", {
|
||
kind: "scalar",
|
||
resolve: resolveYamlFloat,
|
||
construct: constructYamlFloat,
|
||
predicate: isFloat,
|
||
represent: representYamlFloat,
|
||
defaultStyle: "lowercase"
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/json.js
|
||
var require_json = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/json.js"(exports2, module2) {
|
||
"use strict";
|
||
var Schema = require_schema();
|
||
module2.exports = new Schema({
|
||
include: [
|
||
require_failsafe()
|
||
],
|
||
implicit: [
|
||
require_null(),
|
||
require_bool(),
|
||
require_int(),
|
||
require_float()
|
||
]
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/core.js
|
||
var require_core = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/core.js"(exports2, module2) {
|
||
"use strict";
|
||
var Schema = require_schema();
|
||
module2.exports = new Schema({
|
||
include: [
|
||
require_json()
|
||
]
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/timestamp.js
|
||
var require_timestamp = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/timestamp.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
var YAML_DATE_REGEXP = new RegExp(
|
||
"^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$"
|
||
);
|
||
var YAML_TIMESTAMP_REGEXP = new RegExp(
|
||
"^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)(?:[Tt]|[ \\t]+)([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(?:\\.([0-9]*))?(?:[ \\t]*(Z|([-+])([0-9][0-9]?)(?::([0-9][0-9]))?))?$"
|
||
);
|
||
function resolveYamlTimestamp(data) {
|
||
if (data === null) return false;
|
||
if (YAML_DATE_REGEXP.exec(data) !== null) return true;
|
||
if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true;
|
||
return false;
|
||
}
|
||
function constructYamlTimestamp(data) {
|
||
var match, year, month, day, hour, minute, second, fraction = 0, delta = null, tz_hour, tz_minute, date;
|
||
match = YAML_DATE_REGEXP.exec(data);
|
||
if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data);
|
||
if (match === null) throw new Error("Date resolve error");
|
||
year = +match[1];
|
||
month = +match[2] - 1;
|
||
day = +match[3];
|
||
if (!match[4]) {
|
||
return new Date(Date.UTC(year, month, day));
|
||
}
|
||
hour = +match[4];
|
||
minute = +match[5];
|
||
second = +match[6];
|
||
if (match[7]) {
|
||
fraction = match[7].slice(0, 3);
|
||
while (fraction.length < 3) {
|
||
fraction += "0";
|
||
}
|
||
fraction = +fraction;
|
||
}
|
||
if (match[9]) {
|
||
tz_hour = +match[10];
|
||
tz_minute = +(match[11] || 0);
|
||
delta = (tz_hour * 60 + tz_minute) * 6e4;
|
||
if (match[9] === "-") delta = -delta;
|
||
}
|
||
date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction));
|
||
if (delta) date.setTime(date.getTime() - delta);
|
||
return date;
|
||
}
|
||
function representYamlTimestamp(object) {
|
||
return object.toISOString();
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:timestamp", {
|
||
kind: "scalar",
|
||
resolve: resolveYamlTimestamp,
|
||
construct: constructYamlTimestamp,
|
||
instanceOf: Date,
|
||
represent: representYamlTimestamp
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/merge.js
|
||
var require_merge = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/merge.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
function resolveYamlMerge(data) {
|
||
return data === "<<" || data === null;
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:merge", {
|
||
kind: "scalar",
|
||
resolve: resolveYamlMerge
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/binary.js
|
||
var require_binary = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/binary.js"(exports2, module2) {
|
||
"use strict";
|
||
var NodeBuffer;
|
||
try {
|
||
_require = require;
|
||
NodeBuffer = _require("buffer").Buffer;
|
||
} catch (__) {
|
||
}
|
||
var _require;
|
||
var Type = require_type();
|
||
var BASE64_MAP = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r";
|
||
function resolveYamlBinary(data) {
|
||
if (data === null) return false;
|
||
var code2, idx, bitlen = 0, max = data.length, map4 = BASE64_MAP;
|
||
for (idx = 0; idx < max; idx++) {
|
||
code2 = map4.indexOf(data.charAt(idx));
|
||
if (code2 > 64) continue;
|
||
if (code2 < 0) return false;
|
||
bitlen += 6;
|
||
}
|
||
return bitlen % 8 === 0;
|
||
}
|
||
function constructYamlBinary(data) {
|
||
var idx, tailbits, input = data.replace(/[\r\n=]/g, ""), max = input.length, map4 = BASE64_MAP, bits = 0, result = [];
|
||
for (idx = 0; idx < max; idx++) {
|
||
if (idx % 4 === 0 && idx) {
|
||
result.push(bits >> 16 & 255);
|
||
result.push(bits >> 8 & 255);
|
||
result.push(bits & 255);
|
||
}
|
||
bits = bits << 6 | map4.indexOf(input.charAt(idx));
|
||
}
|
||
tailbits = max % 4 * 6;
|
||
if (tailbits === 0) {
|
||
result.push(bits >> 16 & 255);
|
||
result.push(bits >> 8 & 255);
|
||
result.push(bits & 255);
|
||
} else if (tailbits === 18) {
|
||
result.push(bits >> 10 & 255);
|
||
result.push(bits >> 2 & 255);
|
||
} else if (tailbits === 12) {
|
||
result.push(bits >> 4 & 255);
|
||
}
|
||
if (NodeBuffer) {
|
||
return NodeBuffer.from ? NodeBuffer.from(result) : new NodeBuffer(result);
|
||
}
|
||
return result;
|
||
}
|
||
function representYamlBinary(object) {
|
||
var result = "", bits = 0, idx, tail, max = object.length, map4 = BASE64_MAP;
|
||
for (idx = 0; idx < max; idx++) {
|
||
if (idx % 3 === 0 && idx) {
|
||
result += map4[bits >> 18 & 63];
|
||
result += map4[bits >> 12 & 63];
|
||
result += map4[bits >> 6 & 63];
|
||
result += map4[bits & 63];
|
||
}
|
||
bits = (bits << 8) + object[idx];
|
||
}
|
||
tail = max % 3;
|
||
if (tail === 0) {
|
||
result += map4[bits >> 18 & 63];
|
||
result += map4[bits >> 12 & 63];
|
||
result += map4[bits >> 6 & 63];
|
||
result += map4[bits & 63];
|
||
} else if (tail === 2) {
|
||
result += map4[bits >> 10 & 63];
|
||
result += map4[bits >> 4 & 63];
|
||
result += map4[bits << 2 & 63];
|
||
result += map4[64];
|
||
} else if (tail === 1) {
|
||
result += map4[bits >> 2 & 63];
|
||
result += map4[bits << 4 & 63];
|
||
result += map4[64];
|
||
result += map4[64];
|
||
}
|
||
return result;
|
||
}
|
||
function isBinary(object) {
|
||
return NodeBuffer && NodeBuffer.isBuffer(object);
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:binary", {
|
||
kind: "scalar",
|
||
resolve: resolveYamlBinary,
|
||
construct: constructYamlBinary,
|
||
predicate: isBinary,
|
||
represent: representYamlBinary
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/omap.js
|
||
var require_omap = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/omap.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
var _hasOwnProperty = Object.prototype.hasOwnProperty;
|
||
var _toString = Object.prototype.toString;
|
||
function resolveYamlOmap(data) {
|
||
if (data === null) return true;
|
||
var objectKeys = [], index2, length, pair, pairKey, pairHasKey, object = data;
|
||
for (index2 = 0, length = object.length; index2 < length; index2 += 1) {
|
||
pair = object[index2];
|
||
pairHasKey = false;
|
||
if (_toString.call(pair) !== "[object Object]") return false;
|
||
for (pairKey in pair) {
|
||
if (_hasOwnProperty.call(pair, pairKey)) {
|
||
if (!pairHasKey) pairHasKey = true;
|
||
else return false;
|
||
}
|
||
}
|
||
if (!pairHasKey) return false;
|
||
if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey);
|
||
else return false;
|
||
}
|
||
return true;
|
||
}
|
||
function constructYamlOmap(data) {
|
||
return data !== null ? data : [];
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:omap", {
|
||
kind: "sequence",
|
||
resolve: resolveYamlOmap,
|
||
construct: constructYamlOmap
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/pairs.js
|
||
var require_pairs = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/pairs.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
var _toString = Object.prototype.toString;
|
||
function resolveYamlPairs(data) {
|
||
if (data === null) return true;
|
||
var index2, length, pair, keys2, result, object = data;
|
||
result = new Array(object.length);
|
||
for (index2 = 0, length = object.length; index2 < length; index2 += 1) {
|
||
pair = object[index2];
|
||
if (_toString.call(pair) !== "[object Object]") return false;
|
||
keys2 = Object.keys(pair);
|
||
if (keys2.length !== 1) return false;
|
||
result[index2] = [keys2[0], pair[keys2[0]]];
|
||
}
|
||
return true;
|
||
}
|
||
function constructYamlPairs(data) {
|
||
if (data === null) return [];
|
||
var index2, length, pair, keys2, result, object = data;
|
||
result = new Array(object.length);
|
||
for (index2 = 0, length = object.length; index2 < length; index2 += 1) {
|
||
pair = object[index2];
|
||
keys2 = Object.keys(pair);
|
||
result[index2] = [keys2[0], pair[keys2[0]]];
|
||
}
|
||
return result;
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:pairs", {
|
||
kind: "sequence",
|
||
resolve: resolveYamlPairs,
|
||
construct: constructYamlPairs
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/set.js
|
||
var require_set = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/set.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
var _hasOwnProperty = Object.prototype.hasOwnProperty;
|
||
function resolveYamlSet(data) {
|
||
if (data === null) return true;
|
||
var key, object = data;
|
||
for (key in object) {
|
||
if (_hasOwnProperty.call(object, key)) {
|
||
if (object[key] !== null) return false;
|
||
}
|
||
}
|
||
return true;
|
||
}
|
||
function constructYamlSet(data) {
|
||
return data !== null ? data : {};
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:set", {
|
||
kind: "mapping",
|
||
resolve: resolveYamlSet,
|
||
construct: constructYamlSet
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/default_safe.js
|
||
var require_default_safe = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/default_safe.js"(exports2, module2) {
|
||
"use strict";
|
||
var Schema = require_schema();
|
||
module2.exports = new Schema({
|
||
include: [
|
||
require_core()
|
||
],
|
||
implicit: [
|
||
require_timestamp(),
|
||
require_merge()
|
||
],
|
||
explicit: [
|
||
require_binary(),
|
||
require_omap(),
|
||
require_pairs(),
|
||
require_set()
|
||
]
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/js/undefined.js
|
||
var require_undefined = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/js/undefined.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
function resolveJavascriptUndefined() {
|
||
return true;
|
||
}
|
||
function constructJavascriptUndefined() {
|
||
return void 0;
|
||
}
|
||
function representJavascriptUndefined() {
|
||
return "";
|
||
}
|
||
function isUndefined(object) {
|
||
return typeof object === "undefined";
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:js/undefined", {
|
||
kind: "scalar",
|
||
resolve: resolveJavascriptUndefined,
|
||
construct: constructJavascriptUndefined,
|
||
predicate: isUndefined,
|
||
represent: representJavascriptUndefined
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/js/regexp.js
|
||
var require_regexp = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/js/regexp.js"(exports2, module2) {
|
||
"use strict";
|
||
var Type = require_type();
|
||
function resolveJavascriptRegExp(data) {
|
||
if (data === null) return false;
|
||
if (data.length === 0) return false;
|
||
var regexp = data, tail = /\/([gim]*)$/.exec(data), modifiers = "";
|
||
if (regexp[0] === "/") {
|
||
if (tail) modifiers = tail[1];
|
||
if (modifiers.length > 3) return false;
|
||
if (regexp[regexp.length - modifiers.length - 1] !== "/") return false;
|
||
}
|
||
return true;
|
||
}
|
||
function constructJavascriptRegExp(data) {
|
||
var regexp = data, tail = /\/([gim]*)$/.exec(data), modifiers = "";
|
||
if (regexp[0] === "/") {
|
||
if (tail) modifiers = tail[1];
|
||
regexp = regexp.slice(1, regexp.length - modifiers.length - 1);
|
||
}
|
||
return new RegExp(regexp, modifiers);
|
||
}
|
||
function representJavascriptRegExp(object) {
|
||
var result = "/" + object.source + "/";
|
||
if (object.global) result += "g";
|
||
if (object.multiline) result += "m";
|
||
if (object.ignoreCase) result += "i";
|
||
return result;
|
||
}
|
||
function isRegExp(object) {
|
||
return Object.prototype.toString.call(object) === "[object RegExp]";
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:js/regexp", {
|
||
kind: "scalar",
|
||
resolve: resolveJavascriptRegExp,
|
||
construct: constructJavascriptRegExp,
|
||
predicate: isRegExp,
|
||
represent: representJavascriptRegExp
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/js/function.js
|
||
var require_function = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/type/js/function.js"(exports2, module2) {
|
||
"use strict";
|
||
var esprima;
|
||
try {
|
||
_require = require;
|
||
esprima = _require("esprima");
|
||
} catch (_) {
|
||
if (typeof window !== "undefined") esprima = window.esprima;
|
||
}
|
||
var _require;
|
||
var Type = require_type();
|
||
function resolveJavascriptFunction(data) {
|
||
if (data === null) return false;
|
||
try {
|
||
var source = "(" + data + ")", ast = esprima.parse(source, { range: true });
|
||
if (ast.type !== "Program" || ast.body.length !== 1 || ast.body[0].type !== "ExpressionStatement" || ast.body[0].expression.type !== "ArrowFunctionExpression" && ast.body[0].expression.type !== "FunctionExpression") {
|
||
return false;
|
||
}
|
||
return true;
|
||
} catch (err) {
|
||
return false;
|
||
}
|
||
}
|
||
function constructJavascriptFunction(data) {
|
||
var source = "(" + data + ")", ast = esprima.parse(source, { range: true }), params = [], body;
|
||
if (ast.type !== "Program" || ast.body.length !== 1 || ast.body[0].type !== "ExpressionStatement" || ast.body[0].expression.type !== "ArrowFunctionExpression" && ast.body[0].expression.type !== "FunctionExpression") {
|
||
throw new Error("Failed to resolve function");
|
||
}
|
||
ast.body[0].expression.params.forEach(function(param) {
|
||
params.push(param.name);
|
||
});
|
||
body = ast.body[0].expression.body.range;
|
||
if (ast.body[0].expression.body.type === "BlockStatement") {
|
||
return new Function(params, source.slice(body[0] + 1, body[1] - 1));
|
||
}
|
||
return new Function(params, "return " + source.slice(body[0], body[1]));
|
||
}
|
||
function representJavascriptFunction(object) {
|
||
return object.toString();
|
||
}
|
||
function isFunction(object) {
|
||
return Object.prototype.toString.call(object) === "[object Function]";
|
||
}
|
||
module2.exports = new Type("tag:yaml.org,2002:js/function", {
|
||
kind: "scalar",
|
||
resolve: resolveJavascriptFunction,
|
||
construct: constructJavascriptFunction,
|
||
predicate: isFunction,
|
||
represent: representJavascriptFunction
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/default_full.js
|
||
var require_default_full = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/schema/default_full.js"(exports2, module2) {
|
||
"use strict";
|
||
var Schema = require_schema();
|
||
module2.exports = Schema.DEFAULT = new Schema({
|
||
include: [
|
||
require_default_safe()
|
||
],
|
||
explicit: [
|
||
require_undefined(),
|
||
require_regexp(),
|
||
require_function()
|
||
]
|
||
});
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/loader.js
|
||
var require_loader = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/loader.js"(exports2, module2) {
|
||
"use strict";
|
||
var common = require_common();
|
||
var YAMLException = require_exception();
|
||
var Mark = require_mark();
|
||
var DEFAULT_SAFE_SCHEMA = require_default_safe();
|
||
var DEFAULT_FULL_SCHEMA = require_default_full();
|
||
var _hasOwnProperty = Object.prototype.hasOwnProperty;
|
||
var CONTEXT_FLOW_IN = 1;
|
||
var CONTEXT_FLOW_OUT = 2;
|
||
var CONTEXT_BLOCK_IN = 3;
|
||
var CONTEXT_BLOCK_OUT = 4;
|
||
var CHOMPING_CLIP = 1;
|
||
var CHOMPING_STRIP = 2;
|
||
var CHOMPING_KEEP = 3;
|
||
var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/;
|
||
var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/;
|
||
var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/;
|
||
var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i;
|
||
var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i;
|
||
function _class(obj) {
|
||
return Object.prototype.toString.call(obj);
|
||
}
|
||
function is_EOL(c) {
|
||
return c === 10 || c === 13;
|
||
}
|
||
function is_WHITE_SPACE(c) {
|
||
return c === 9 || c === 32;
|
||
}
|
||
function is_WS_OR_EOL(c) {
|
||
return c === 9 || c === 32 || c === 10 || c === 13;
|
||
}
|
||
function is_FLOW_INDICATOR(c) {
|
||
return c === 44 || c === 91 || c === 93 || c === 123 || c === 125;
|
||
}
|
||
function fromHexCode(c) {
|
||
var lc;
|
||
if (48 <= c && c <= 57) {
|
||
return c - 48;
|
||
}
|
||
lc = c | 32;
|
||
if (97 <= lc && lc <= 102) {
|
||
return lc - 97 + 10;
|
||
}
|
||
return -1;
|
||
}
|
||
function escapedHexLen(c) {
|
||
if (c === 120) {
|
||
return 2;
|
||
}
|
||
if (c === 117) {
|
||
return 4;
|
||
}
|
||
if (c === 85) {
|
||
return 8;
|
||
}
|
||
return 0;
|
||
}
|
||
function fromDecimalCode(c) {
|
||
if (48 <= c && c <= 57) {
|
||
return c - 48;
|
||
}
|
||
return -1;
|
||
}
|
||
function simpleEscapeSequence(c) {
|
||
return c === 48 ? "\0" : c === 97 ? "\x07" : c === 98 ? "\b" : c === 116 ? " " : c === 9 ? " " : c === 110 ? "\n" : c === 118 ? "\v" : c === 102 ? "\f" : c === 114 ? "\r" : c === 101 ? "\x1B" : c === 32 ? " " : c === 34 ? '"' : c === 47 ? "/" : c === 92 ? "\\" : c === 78 ? "\x85" : c === 95 ? "\xA0" : c === 76 ? "\u2028" : c === 80 ? "\u2029" : "";
|
||
}
|
||
function charFromCodepoint(c) {
|
||
if (c <= 65535) {
|
||
return String.fromCharCode(c);
|
||
}
|
||
return String.fromCharCode(
|
||
(c - 65536 >> 10) + 55296,
|
||
(c - 65536 & 1023) + 56320
|
||
);
|
||
}
|
||
var simpleEscapeCheck = new Array(256);
|
||
var simpleEscapeMap = new Array(256);
|
||
for (i = 0; i < 256; i++) {
|
||
simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0;
|
||
simpleEscapeMap[i] = simpleEscapeSequence(i);
|
||
}
|
||
var i;
|
||
function State(input, options2) {
|
||
this.input = input;
|
||
this.filename = options2["filename"] || null;
|
||
this.schema = options2["schema"] || DEFAULT_FULL_SCHEMA;
|
||
this.onWarning = options2["onWarning"] || null;
|
||
this.legacy = options2["legacy"] || false;
|
||
this.json = options2["json"] || false;
|
||
this.listener = options2["listener"] || null;
|
||
this.implicitTypes = this.schema.compiledImplicit;
|
||
this.typeMap = this.schema.compiledTypeMap;
|
||
this.length = input.length;
|
||
this.position = 0;
|
||
this.line = 0;
|
||
this.lineStart = 0;
|
||
this.lineIndent = 0;
|
||
this.documents = [];
|
||
}
|
||
function generateError(state, message) {
|
||
return new YAMLException(
|
||
message,
|
||
new Mark(state.filename, state.input, state.position, state.line, state.position - state.lineStart)
|
||
);
|
||
}
|
||
function throwError(state, message) {
|
||
throw generateError(state, message);
|
||
}
|
||
function throwWarning(state, message) {
|
||
if (state.onWarning) {
|
||
state.onWarning.call(null, generateError(state, message));
|
||
}
|
||
}
|
||
var directiveHandlers = {
|
||
YAML: function handleYamlDirective(state, name, args) {
|
||
var match, major, minor;
|
||
if (state.version !== null) {
|
||
throwError(state, "duplication of %YAML directive");
|
||
}
|
||
if (args.length !== 1) {
|
||
throwError(state, "YAML directive accepts exactly one argument");
|
||
}
|
||
match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]);
|
||
if (match === null) {
|
||
throwError(state, "ill-formed argument of the YAML directive");
|
||
}
|
||
major = parseInt(match[1], 10);
|
||
minor = parseInt(match[2], 10);
|
||
if (major !== 1) {
|
||
throwError(state, "unacceptable YAML version of the document");
|
||
}
|
||
state.version = args[0];
|
||
state.checkLineBreaks = minor < 2;
|
||
if (minor !== 1 && minor !== 2) {
|
||
throwWarning(state, "unsupported YAML version of the document");
|
||
}
|
||
},
|
||
TAG: function handleTagDirective(state, name, args) {
|
||
var handle2, prefix;
|
||
if (args.length !== 2) {
|
||
throwError(state, "TAG directive accepts exactly two arguments");
|
||
}
|
||
handle2 = args[0];
|
||
prefix = args[1];
|
||
if (!PATTERN_TAG_HANDLE.test(handle2)) {
|
||
throwError(state, "ill-formed tag handle (first argument) of the TAG directive");
|
||
}
|
||
if (_hasOwnProperty.call(state.tagMap, handle2)) {
|
||
throwError(state, 'there is a previously declared suffix for "' + handle2 + '" tag handle');
|
||
}
|
||
if (!PATTERN_TAG_URI.test(prefix)) {
|
||
throwError(state, "ill-formed tag prefix (second argument) of the TAG directive");
|
||
}
|
||
state.tagMap[handle2] = prefix;
|
||
}
|
||
};
|
||
function captureSegment(state, start, end, checkJson) {
|
||
var _position, _length, _character, _result;
|
||
if (start < end) {
|
||
_result = state.input.slice(start, end);
|
||
if (checkJson) {
|
||
for (_position = 0, _length = _result.length; _position < _length; _position += 1) {
|
||
_character = _result.charCodeAt(_position);
|
||
if (!(_character === 9 || 32 <= _character && _character <= 1114111)) {
|
||
throwError(state, "expected valid JSON character");
|
||
}
|
||
}
|
||
} else if (PATTERN_NON_PRINTABLE.test(_result)) {
|
||
throwError(state, "the stream contains non-printable characters");
|
||
}
|
||
state.result += _result;
|
||
}
|
||
}
|
||
function mergeMappings(state, destination, source, overridableKeys) {
|
||
var sourceKeys, key, index2, quantity;
|
||
if (!common.isObject(source)) {
|
||
throwError(state, "cannot merge mappings; the provided source object is unacceptable");
|
||
}
|
||
sourceKeys = Object.keys(source);
|
||
for (index2 = 0, quantity = sourceKeys.length; index2 < quantity; index2 += 1) {
|
||
key = sourceKeys[index2];
|
||
if (!_hasOwnProperty.call(destination, key)) {
|
||
destination[key] = source[key];
|
||
overridableKeys[key] = true;
|
||
}
|
||
}
|
||
}
|
||
function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startPos) {
|
||
var index2, quantity;
|
||
if (Array.isArray(keyNode)) {
|
||
keyNode = Array.prototype.slice.call(keyNode);
|
||
for (index2 = 0, quantity = keyNode.length; index2 < quantity; index2 += 1) {
|
||
if (Array.isArray(keyNode[index2])) {
|
||
throwError(state, "nested arrays are not supported inside keys");
|
||
}
|
||
if (typeof keyNode === "object" && _class(keyNode[index2]) === "[object Object]") {
|
||
keyNode[index2] = "[object Object]";
|
||
}
|
||
}
|
||
}
|
||
if (typeof keyNode === "object" && _class(keyNode) === "[object Object]") {
|
||
keyNode = "[object Object]";
|
||
}
|
||
keyNode = String(keyNode);
|
||
if (_result === null) {
|
||
_result = {};
|
||
}
|
||
if (keyTag === "tag:yaml.org,2002:merge") {
|
||
if (Array.isArray(valueNode)) {
|
||
for (index2 = 0, quantity = valueNode.length; index2 < quantity; index2 += 1) {
|
||
mergeMappings(state, _result, valueNode[index2], overridableKeys);
|
||
}
|
||
} else {
|
||
mergeMappings(state, _result, valueNode, overridableKeys);
|
||
}
|
||
} else {
|
||
if (!state.json && !_hasOwnProperty.call(overridableKeys, keyNode) && _hasOwnProperty.call(_result, keyNode)) {
|
||
state.line = startLine || state.line;
|
||
state.position = startPos || state.position;
|
||
throwError(state, "duplicated mapping key");
|
||
}
|
||
_result[keyNode] = valueNode;
|
||
delete overridableKeys[keyNode];
|
||
}
|
||
return _result;
|
||
}
|
||
function readLineBreak(state) {
|
||
var ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch === 10) {
|
||
state.position++;
|
||
} else if (ch === 13) {
|
||
state.position++;
|
||
if (state.input.charCodeAt(state.position) === 10) {
|
||
state.position++;
|
||
}
|
||
} else {
|
||
throwError(state, "a line break is expected");
|
||
}
|
||
state.line += 1;
|
||
state.lineStart = state.position;
|
||
}
|
||
function skipSeparationSpace(state, allowComments, checkIndent) {
|
||
var lineBreaks = 0, ch = state.input.charCodeAt(state.position);
|
||
while (ch !== 0) {
|
||
while (is_WHITE_SPACE(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
if (allowComments && ch === 35) {
|
||
do {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} while (ch !== 10 && ch !== 13 && ch !== 0);
|
||
}
|
||
if (is_EOL(ch)) {
|
||
readLineBreak(state);
|
||
ch = state.input.charCodeAt(state.position);
|
||
lineBreaks++;
|
||
state.lineIndent = 0;
|
||
while (ch === 32) {
|
||
state.lineIndent++;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) {
|
||
throwWarning(state, "deficient indentation");
|
||
}
|
||
return lineBreaks;
|
||
}
|
||
function testDocumentSeparator(state) {
|
||
var _position = state.position, ch;
|
||
ch = state.input.charCodeAt(_position);
|
||
if ((ch === 45 || ch === 46) && ch === state.input.charCodeAt(_position + 1) && ch === state.input.charCodeAt(_position + 2)) {
|
||
_position += 3;
|
||
ch = state.input.charCodeAt(_position);
|
||
if (ch === 0 || is_WS_OR_EOL(ch)) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
function writeFoldedLines(state, count) {
|
||
if (count === 1) {
|
||
state.result += " ";
|
||
} else if (count > 1) {
|
||
state.result += common.repeat("\n", count - 1);
|
||
}
|
||
}
|
||
function readPlainScalar(state, nodeIndent, withinFlowCollection) {
|
||
var preceding, following, captureStart, captureEnd, hasPendingContent, _line, _lineStart, _lineIndent, _kind = state.kind, _result = state.result, ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (is_WS_OR_EOL(ch) || is_FLOW_INDICATOR(ch) || ch === 35 || ch === 38 || ch === 42 || ch === 33 || ch === 124 || ch === 62 || ch === 39 || ch === 34 || ch === 37 || ch === 64 || ch === 96) {
|
||
return false;
|
||
}
|
||
if (ch === 63 || ch === 45) {
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) {
|
||
return false;
|
||
}
|
||
}
|
||
state.kind = "scalar";
|
||
state.result = "";
|
||
captureStart = captureEnd = state.position;
|
||
hasPendingContent = false;
|
||
while (ch !== 0) {
|
||
if (ch === 58) {
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) {
|
||
break;
|
||
}
|
||
} else if (ch === 35) {
|
||
preceding = state.input.charCodeAt(state.position - 1);
|
||
if (is_WS_OR_EOL(preceding)) {
|
||
break;
|
||
}
|
||
} else if (state.position === state.lineStart && testDocumentSeparator(state) || withinFlowCollection && is_FLOW_INDICATOR(ch)) {
|
||
break;
|
||
} else if (is_EOL(ch)) {
|
||
_line = state.line;
|
||
_lineStart = state.lineStart;
|
||
_lineIndent = state.lineIndent;
|
||
skipSeparationSpace(state, false, -1);
|
||
if (state.lineIndent >= nodeIndent) {
|
||
hasPendingContent = true;
|
||
ch = state.input.charCodeAt(state.position);
|
||
continue;
|
||
} else {
|
||
state.position = captureEnd;
|
||
state.line = _line;
|
||
state.lineStart = _lineStart;
|
||
state.lineIndent = _lineIndent;
|
||
break;
|
||
}
|
||
}
|
||
if (hasPendingContent) {
|
||
captureSegment(state, captureStart, captureEnd, false);
|
||
writeFoldedLines(state, state.line - _line);
|
||
captureStart = captureEnd = state.position;
|
||
hasPendingContent = false;
|
||
}
|
||
if (!is_WHITE_SPACE(ch)) {
|
||
captureEnd = state.position + 1;
|
||
}
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
captureSegment(state, captureStart, captureEnd, false);
|
||
if (state.result) {
|
||
return true;
|
||
}
|
||
state.kind = _kind;
|
||
state.result = _result;
|
||
return false;
|
||
}
|
||
function readSingleQuotedScalar(state, nodeIndent) {
|
||
var ch, captureStart, captureEnd;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch !== 39) {
|
||
return false;
|
||
}
|
||
state.kind = "scalar";
|
||
state.result = "";
|
||
state.position++;
|
||
captureStart = captureEnd = state.position;
|
||
while ((ch = state.input.charCodeAt(state.position)) !== 0) {
|
||
if (ch === 39) {
|
||
captureSegment(state, captureStart, state.position, true);
|
||
ch = state.input.charCodeAt(++state.position);
|
||
if (ch === 39) {
|
||
captureStart = state.position;
|
||
state.position++;
|
||
captureEnd = state.position;
|
||
} else {
|
||
return true;
|
||
}
|
||
} else if (is_EOL(ch)) {
|
||
captureSegment(state, captureStart, captureEnd, true);
|
||
writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));
|
||
captureStart = captureEnd = state.position;
|
||
} else if (state.position === state.lineStart && testDocumentSeparator(state)) {
|
||
throwError(state, "unexpected end of the document within a single quoted scalar");
|
||
} else {
|
||
state.position++;
|
||
captureEnd = state.position;
|
||
}
|
||
}
|
||
throwError(state, "unexpected end of the stream within a single quoted scalar");
|
||
}
|
||
function readDoubleQuotedScalar(state, nodeIndent) {
|
||
var captureStart, captureEnd, hexLength, hexResult, tmp, ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch !== 34) {
|
||
return false;
|
||
}
|
||
state.kind = "scalar";
|
||
state.result = "";
|
||
state.position++;
|
||
captureStart = captureEnd = state.position;
|
||
while ((ch = state.input.charCodeAt(state.position)) !== 0) {
|
||
if (ch === 34) {
|
||
captureSegment(state, captureStart, state.position, true);
|
||
state.position++;
|
||
return true;
|
||
} else if (ch === 92) {
|
||
captureSegment(state, captureStart, state.position, true);
|
||
ch = state.input.charCodeAt(++state.position);
|
||
if (is_EOL(ch)) {
|
||
skipSeparationSpace(state, false, nodeIndent);
|
||
} else if (ch < 256 && simpleEscapeCheck[ch]) {
|
||
state.result += simpleEscapeMap[ch];
|
||
state.position++;
|
||
} else if ((tmp = escapedHexLen(ch)) > 0) {
|
||
hexLength = tmp;
|
||
hexResult = 0;
|
||
for (; hexLength > 0; hexLength--) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
if ((tmp = fromHexCode(ch)) >= 0) {
|
||
hexResult = (hexResult << 4) + tmp;
|
||
} else {
|
||
throwError(state, "expected hexadecimal character");
|
||
}
|
||
}
|
||
state.result += charFromCodepoint(hexResult);
|
||
state.position++;
|
||
} else {
|
||
throwError(state, "unknown escape sequence");
|
||
}
|
||
captureStart = captureEnd = state.position;
|
||
} else if (is_EOL(ch)) {
|
||
captureSegment(state, captureStart, captureEnd, true);
|
||
writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));
|
||
captureStart = captureEnd = state.position;
|
||
} else if (state.position === state.lineStart && testDocumentSeparator(state)) {
|
||
throwError(state, "unexpected end of the document within a double quoted scalar");
|
||
} else {
|
||
state.position++;
|
||
captureEnd = state.position;
|
||
}
|
||
}
|
||
throwError(state, "unexpected end of the stream within a double quoted scalar");
|
||
}
|
||
function readFlowCollection(state, nodeIndent) {
|
||
var readNext = true, _line, _tag = state.tag, _result, _anchor = state.anchor, following, terminator, isPair, isExplicitPair, isMapping, overridableKeys = {}, keyNode, keyTag, valueNode, ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch === 91) {
|
||
terminator = 93;
|
||
isMapping = false;
|
||
_result = [];
|
||
} else if (ch === 123) {
|
||
terminator = 125;
|
||
isMapping = true;
|
||
_result = {};
|
||
} else {
|
||
return false;
|
||
}
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = _result;
|
||
}
|
||
ch = state.input.charCodeAt(++state.position);
|
||
while (ch !== 0) {
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch === terminator) {
|
||
state.position++;
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
state.kind = isMapping ? "mapping" : "sequence";
|
||
state.result = _result;
|
||
return true;
|
||
} else if (!readNext) {
|
||
throwError(state, "missed comma between flow collection entries");
|
||
}
|
||
keyTag = keyNode = valueNode = null;
|
||
isPair = isExplicitPair = false;
|
||
if (ch === 63) {
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
if (is_WS_OR_EOL(following)) {
|
||
isPair = isExplicitPair = true;
|
||
state.position++;
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
}
|
||
}
|
||
_line = state.line;
|
||
composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);
|
||
keyTag = state.tag;
|
||
keyNode = state.result;
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
ch = state.input.charCodeAt(state.position);
|
||
if ((isExplicitPair || state.line === _line) && ch === 58) {
|
||
isPair = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);
|
||
valueNode = state.result;
|
||
}
|
||
if (isMapping) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode);
|
||
} else if (isPair) {
|
||
_result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode));
|
||
} else {
|
||
_result.push(keyNode);
|
||
}
|
||
skipSeparationSpace(state, true, nodeIndent);
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch === 44) {
|
||
readNext = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} else {
|
||
readNext = false;
|
||
}
|
||
}
|
||
throwError(state, "unexpected end of the stream within a flow collection");
|
||
}
|
||
function readBlockScalar(state, nodeIndent) {
|
||
var captureStart, folding, chomping = CHOMPING_CLIP, didReadContent = false, detectedIndent = false, textIndent = nodeIndent, emptyLines = 0, atMoreIndented = false, tmp, ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch === 124) {
|
||
folding = false;
|
||
} else if (ch === 62) {
|
||
folding = true;
|
||
} else {
|
||
return false;
|
||
}
|
||
state.kind = "scalar";
|
||
state.result = "";
|
||
while (ch !== 0) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
if (ch === 43 || ch === 45) {
|
||
if (CHOMPING_CLIP === chomping) {
|
||
chomping = ch === 43 ? CHOMPING_KEEP : CHOMPING_STRIP;
|
||
} else {
|
||
throwError(state, "repeat of a chomping mode identifier");
|
||
}
|
||
} else if ((tmp = fromDecimalCode(ch)) >= 0) {
|
||
if (tmp === 0) {
|
||
throwError(state, "bad explicit indentation width of a block scalar; it cannot be less than one");
|
||
} else if (!detectedIndent) {
|
||
textIndent = nodeIndent + tmp - 1;
|
||
detectedIndent = true;
|
||
} else {
|
||
throwError(state, "repeat of an indentation width identifier");
|
||
}
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (is_WHITE_SPACE(ch)) {
|
||
do {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} while (is_WHITE_SPACE(ch));
|
||
if (ch === 35) {
|
||
do {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} while (!is_EOL(ch) && ch !== 0);
|
||
}
|
||
}
|
||
while (ch !== 0) {
|
||
readLineBreak(state);
|
||
state.lineIndent = 0;
|
||
ch = state.input.charCodeAt(state.position);
|
||
while ((!detectedIndent || state.lineIndent < textIndent) && ch === 32) {
|
||
state.lineIndent++;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
if (!detectedIndent && state.lineIndent > textIndent) {
|
||
textIndent = state.lineIndent;
|
||
}
|
||
if (is_EOL(ch)) {
|
||
emptyLines++;
|
||
continue;
|
||
}
|
||
if (state.lineIndent < textIndent) {
|
||
if (chomping === CHOMPING_KEEP) {
|
||
state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines);
|
||
} else if (chomping === CHOMPING_CLIP) {
|
||
if (didReadContent) {
|
||
state.result += "\n";
|
||
}
|
||
}
|
||
break;
|
||
}
|
||
if (folding) {
|
||
if (is_WHITE_SPACE(ch)) {
|
||
atMoreIndented = true;
|
||
state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines);
|
||
} else if (atMoreIndented) {
|
||
atMoreIndented = false;
|
||
state.result += common.repeat("\n", emptyLines + 1);
|
||
} else if (emptyLines === 0) {
|
||
if (didReadContent) {
|
||
state.result += " ";
|
||
}
|
||
} else {
|
||
state.result += common.repeat("\n", emptyLines);
|
||
}
|
||
} else {
|
||
state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines);
|
||
}
|
||
didReadContent = true;
|
||
detectedIndent = true;
|
||
emptyLines = 0;
|
||
captureStart = state.position;
|
||
while (!is_EOL(ch) && ch !== 0) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
captureSegment(state, captureStart, state.position, false);
|
||
}
|
||
return true;
|
||
}
|
||
function readBlockSequence(state, nodeIndent) {
|
||
var _line, _tag = state.tag, _anchor = state.anchor, _result = [], following, detected = false, ch;
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = _result;
|
||
}
|
||
ch = state.input.charCodeAt(state.position);
|
||
while (ch !== 0) {
|
||
if (ch !== 45) {
|
||
break;
|
||
}
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
if (!is_WS_OR_EOL(following)) {
|
||
break;
|
||
}
|
||
detected = true;
|
||
state.position++;
|
||
if (skipSeparationSpace(state, true, -1)) {
|
||
if (state.lineIndent <= nodeIndent) {
|
||
_result.push(null);
|
||
ch = state.input.charCodeAt(state.position);
|
||
continue;
|
||
}
|
||
}
|
||
_line = state.line;
|
||
composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true);
|
||
_result.push(state.result);
|
||
skipSeparationSpace(state, true, -1);
|
||
ch = state.input.charCodeAt(state.position);
|
||
if ((state.line === _line || state.lineIndent > nodeIndent) && ch !== 0) {
|
||
throwError(state, "bad indentation of a sequence entry");
|
||
} else if (state.lineIndent < nodeIndent) {
|
||
break;
|
||
}
|
||
}
|
||
if (detected) {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
state.kind = "sequence";
|
||
state.result = _result;
|
||
return true;
|
||
}
|
||
return false;
|
||
}
|
||
function readBlockMapping(state, nodeIndent, flowIndent) {
|
||
var following, allowCompact, _line, _pos, _tag = state.tag, _anchor = state.anchor, _result = {}, overridableKeys = {}, keyTag = null, keyNode = null, valueNode = null, atExplicitKey = false, detected = false, ch;
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = _result;
|
||
}
|
||
ch = state.input.charCodeAt(state.position);
|
||
while (ch !== 0) {
|
||
following = state.input.charCodeAt(state.position + 1);
|
||
_line = state.line;
|
||
_pos = state.position;
|
||
if ((ch === 63 || ch === 58) && is_WS_OR_EOL(following)) {
|
||
if (ch === 63) {
|
||
if (atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null);
|
||
keyTag = keyNode = valueNode = null;
|
||
}
|
||
detected = true;
|
||
atExplicitKey = true;
|
||
allowCompact = true;
|
||
} else if (atExplicitKey) {
|
||
atExplicitKey = false;
|
||
allowCompact = true;
|
||
} else {
|
||
throwError(state, "incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line");
|
||
}
|
||
state.position += 1;
|
||
ch = following;
|
||
} else if (composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) {
|
||
if (state.line === _line) {
|
||
ch = state.input.charCodeAt(state.position);
|
||
while (is_WHITE_SPACE(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
if (ch === 58) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
if (!is_WS_OR_EOL(ch)) {
|
||
throwError(state, "a whitespace character is expected after the key-value separator within a block mapping");
|
||
}
|
||
if (atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null);
|
||
keyTag = keyNode = valueNode = null;
|
||
}
|
||
detected = true;
|
||
atExplicitKey = false;
|
||
allowCompact = false;
|
||
keyTag = state.tag;
|
||
keyNode = state.result;
|
||
} else if (detected) {
|
||
throwError(state, "can not read an implicit mapping pair; a colon is missed");
|
||
} else {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
return true;
|
||
}
|
||
} else if (detected) {
|
||
throwError(state, "can not read a block mapping entry; a multiline key may not be an implicit key");
|
||
} else {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
return true;
|
||
}
|
||
} else {
|
||
break;
|
||
}
|
||
if (state.line === _line || state.lineIndent > nodeIndent) {
|
||
if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) {
|
||
if (atExplicitKey) {
|
||
keyNode = state.result;
|
||
} else {
|
||
valueNode = state.result;
|
||
}
|
||
}
|
||
if (!atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _pos);
|
||
keyTag = keyNode = valueNode = null;
|
||
}
|
||
skipSeparationSpace(state, true, -1);
|
||
ch = state.input.charCodeAt(state.position);
|
||
}
|
||
if (state.lineIndent > nodeIndent && ch !== 0) {
|
||
throwError(state, "bad indentation of a mapping entry");
|
||
} else if (state.lineIndent < nodeIndent) {
|
||
break;
|
||
}
|
||
}
|
||
if (atExplicitKey) {
|
||
storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null);
|
||
}
|
||
if (detected) {
|
||
state.tag = _tag;
|
||
state.anchor = _anchor;
|
||
state.kind = "mapping";
|
||
state.result = _result;
|
||
}
|
||
return detected;
|
||
}
|
||
function readTagProperty(state) {
|
||
var _position, isVerbatim = false, isNamed = false, tagHandle, tagName, ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch !== 33) return false;
|
||
if (state.tag !== null) {
|
||
throwError(state, "duplication of a tag property");
|
||
}
|
||
ch = state.input.charCodeAt(++state.position);
|
||
if (ch === 60) {
|
||
isVerbatim = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} else if (ch === 33) {
|
||
isNamed = true;
|
||
tagHandle = "!!";
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} else {
|
||
tagHandle = "!";
|
||
}
|
||
_position = state.position;
|
||
if (isVerbatim) {
|
||
do {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} while (ch !== 0 && ch !== 62);
|
||
if (state.position < state.length) {
|
||
tagName = state.input.slice(_position, state.position);
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} else {
|
||
throwError(state, "unexpected end of the stream within a verbatim tag");
|
||
}
|
||
} else {
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch)) {
|
||
if (ch === 33) {
|
||
if (!isNamed) {
|
||
tagHandle = state.input.slice(_position - 1, state.position + 1);
|
||
if (!PATTERN_TAG_HANDLE.test(tagHandle)) {
|
||
throwError(state, "named tag handle cannot contain such characters");
|
||
}
|
||
isNamed = true;
|
||
_position = state.position + 1;
|
||
} else {
|
||
throwError(state, "tag suffix cannot contain exclamation marks");
|
||
}
|
||
}
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
tagName = state.input.slice(_position, state.position);
|
||
if (PATTERN_FLOW_INDICATORS.test(tagName)) {
|
||
throwError(state, "tag suffix cannot contain flow indicator characters");
|
||
}
|
||
}
|
||
if (tagName && !PATTERN_TAG_URI.test(tagName)) {
|
||
throwError(state, "tag name cannot contain such characters: " + tagName);
|
||
}
|
||
if (isVerbatim) {
|
||
state.tag = tagName;
|
||
} else if (_hasOwnProperty.call(state.tagMap, tagHandle)) {
|
||
state.tag = state.tagMap[tagHandle] + tagName;
|
||
} else if (tagHandle === "!") {
|
||
state.tag = "!" + tagName;
|
||
} else if (tagHandle === "!!") {
|
||
state.tag = "tag:yaml.org,2002:" + tagName;
|
||
} else {
|
||
throwError(state, 'undeclared tag handle "' + tagHandle + '"');
|
||
}
|
||
return true;
|
||
}
|
||
function readAnchorProperty(state) {
|
||
var _position, ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch !== 38) return false;
|
||
if (state.anchor !== null) {
|
||
throwError(state, "duplication of an anchor property");
|
||
}
|
||
ch = state.input.charCodeAt(++state.position);
|
||
_position = state.position;
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
if (state.position === _position) {
|
||
throwError(state, "name of an anchor node must contain at least one character");
|
||
}
|
||
state.anchor = state.input.slice(_position, state.position);
|
||
return true;
|
||
}
|
||
function readAlias(state) {
|
||
var _position, alias, ch;
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (ch !== 42) return false;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
_position = state.position;
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
if (state.position === _position) {
|
||
throwError(state, "name of an alias node must contain at least one character");
|
||
}
|
||
alias = state.input.slice(_position, state.position);
|
||
if (!_hasOwnProperty.call(state.anchorMap, alias)) {
|
||
throwError(state, 'unidentified alias "' + alias + '"');
|
||
}
|
||
state.result = state.anchorMap[alias];
|
||
skipSeparationSpace(state, true, -1);
|
||
return true;
|
||
}
|
||
function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) {
|
||
var allowBlockStyles, allowBlockScalars, allowBlockCollections, indentStatus = 1, atNewLine = false, hasContent = false, typeIndex, typeQuantity, type, flowIndent, blockIndent;
|
||
if (state.listener !== null) {
|
||
state.listener("open", state);
|
||
}
|
||
state.tag = null;
|
||
state.anchor = null;
|
||
state.kind = null;
|
||
state.result = null;
|
||
allowBlockStyles = allowBlockScalars = allowBlockCollections = CONTEXT_BLOCK_OUT === nodeContext || CONTEXT_BLOCK_IN === nodeContext;
|
||
if (allowToSeek) {
|
||
if (skipSeparationSpace(state, true, -1)) {
|
||
atNewLine = true;
|
||
if (state.lineIndent > parentIndent) {
|
||
indentStatus = 1;
|
||
} else if (state.lineIndent === parentIndent) {
|
||
indentStatus = 0;
|
||
} else if (state.lineIndent < parentIndent) {
|
||
indentStatus = -1;
|
||
}
|
||
}
|
||
}
|
||
if (indentStatus === 1) {
|
||
while (readTagProperty(state) || readAnchorProperty(state)) {
|
||
if (skipSeparationSpace(state, true, -1)) {
|
||
atNewLine = true;
|
||
allowBlockCollections = allowBlockStyles;
|
||
if (state.lineIndent > parentIndent) {
|
||
indentStatus = 1;
|
||
} else if (state.lineIndent === parentIndent) {
|
||
indentStatus = 0;
|
||
} else if (state.lineIndent < parentIndent) {
|
||
indentStatus = -1;
|
||
}
|
||
} else {
|
||
allowBlockCollections = false;
|
||
}
|
||
}
|
||
}
|
||
if (allowBlockCollections) {
|
||
allowBlockCollections = atNewLine || allowCompact;
|
||
}
|
||
if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) {
|
||
if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) {
|
||
flowIndent = parentIndent;
|
||
} else {
|
||
flowIndent = parentIndent + 1;
|
||
}
|
||
blockIndent = state.position - state.lineStart;
|
||
if (indentStatus === 1) {
|
||
if (allowBlockCollections && (readBlockSequence(state, blockIndent) || readBlockMapping(state, blockIndent, flowIndent)) || readFlowCollection(state, flowIndent)) {
|
||
hasContent = true;
|
||
} else {
|
||
if (allowBlockScalars && readBlockScalar(state, flowIndent) || readSingleQuotedScalar(state, flowIndent) || readDoubleQuotedScalar(state, flowIndent)) {
|
||
hasContent = true;
|
||
} else if (readAlias(state)) {
|
||
hasContent = true;
|
||
if (state.tag !== null || state.anchor !== null) {
|
||
throwError(state, "alias node should not have any properties");
|
||
}
|
||
} else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) {
|
||
hasContent = true;
|
||
if (state.tag === null) {
|
||
state.tag = "?";
|
||
}
|
||
}
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = state.result;
|
||
}
|
||
}
|
||
} else if (indentStatus === 0) {
|
||
hasContent = allowBlockCollections && readBlockSequence(state, blockIndent);
|
||
}
|
||
}
|
||
if (state.tag !== null && state.tag !== "!") {
|
||
if (state.tag === "?") {
|
||
if (state.result !== null && state.kind !== "scalar") {
|
||
throwError(state, 'unacceptable node kind for !<?> tag; it should be "scalar", not "' + state.kind + '"');
|
||
}
|
||
for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) {
|
||
type = state.implicitTypes[typeIndex];
|
||
if (type.resolve(state.result)) {
|
||
state.result = type.construct(state.result);
|
||
state.tag = type.tag;
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = state.result;
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
} else if (_hasOwnProperty.call(state.typeMap[state.kind || "fallback"], state.tag)) {
|
||
type = state.typeMap[state.kind || "fallback"][state.tag];
|
||
if (state.result !== null && type.kind !== state.kind) {
|
||
throwError(state, "unacceptable node kind for !<" + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"');
|
||
}
|
||
if (!type.resolve(state.result)) {
|
||
throwError(state, "cannot resolve a node with !<" + state.tag + "> explicit tag");
|
||
} else {
|
||
state.result = type.construct(state.result);
|
||
if (state.anchor !== null) {
|
||
state.anchorMap[state.anchor] = state.result;
|
||
}
|
||
}
|
||
} else {
|
||
throwError(state, "unknown tag !<" + state.tag + ">");
|
||
}
|
||
}
|
||
if (state.listener !== null) {
|
||
state.listener("close", state);
|
||
}
|
||
return state.tag !== null || state.anchor !== null || hasContent;
|
||
}
|
||
function readDocument(state) {
|
||
var documentStart = state.position, _position, directiveName, directiveArgs, hasDirectives = false, ch;
|
||
state.version = null;
|
||
state.checkLineBreaks = state.legacy;
|
||
state.tagMap = {};
|
||
state.anchorMap = {};
|
||
while ((ch = state.input.charCodeAt(state.position)) !== 0) {
|
||
skipSeparationSpace(state, true, -1);
|
||
ch = state.input.charCodeAt(state.position);
|
||
if (state.lineIndent > 0 || ch !== 37) {
|
||
break;
|
||
}
|
||
hasDirectives = true;
|
||
ch = state.input.charCodeAt(++state.position);
|
||
_position = state.position;
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
directiveName = state.input.slice(_position, state.position);
|
||
directiveArgs = [];
|
||
if (directiveName.length < 1) {
|
||
throwError(state, "directive name must not be less than one character in length");
|
||
}
|
||
while (ch !== 0) {
|
||
while (is_WHITE_SPACE(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
if (ch === 35) {
|
||
do {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
} while (ch !== 0 && !is_EOL(ch));
|
||
break;
|
||
}
|
||
if (is_EOL(ch)) break;
|
||
_position = state.position;
|
||
while (ch !== 0 && !is_WS_OR_EOL(ch)) {
|
||
ch = state.input.charCodeAt(++state.position);
|
||
}
|
||
directiveArgs.push(state.input.slice(_position, state.position));
|
||
}
|
||
if (ch !== 0) readLineBreak(state);
|
||
if (_hasOwnProperty.call(directiveHandlers, directiveName)) {
|
||
directiveHandlers[directiveName](state, directiveName, directiveArgs);
|
||
} else {
|
||
throwWarning(state, 'unknown document directive "' + directiveName + '"');
|
||
}
|
||
}
|
||
skipSeparationSpace(state, true, -1);
|
||
if (state.lineIndent === 0 && state.input.charCodeAt(state.position) === 45 && state.input.charCodeAt(state.position + 1) === 45 && state.input.charCodeAt(state.position + 2) === 45) {
|
||
state.position += 3;
|
||
skipSeparationSpace(state, true, -1);
|
||
} else if (hasDirectives) {
|
||
throwError(state, "directives end mark is expected");
|
||
}
|
||
composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true);
|
||
skipSeparationSpace(state, true, -1);
|
||
if (state.checkLineBreaks && PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) {
|
||
throwWarning(state, "non-ASCII line breaks are interpreted as content");
|
||
}
|
||
state.documents.push(state.result);
|
||
if (state.position === state.lineStart && testDocumentSeparator(state)) {
|
||
if (state.input.charCodeAt(state.position) === 46) {
|
||
state.position += 3;
|
||
skipSeparationSpace(state, true, -1);
|
||
}
|
||
return;
|
||
}
|
||
if (state.position < state.length - 1) {
|
||
throwError(state, "end of the stream or a document separator is expected");
|
||
} else {
|
||
return;
|
||
}
|
||
}
|
||
function loadDocuments(input, options2) {
|
||
input = String(input);
|
||
options2 = options2 || {};
|
||
if (input.length !== 0) {
|
||
if (input.charCodeAt(input.length - 1) !== 10 && input.charCodeAt(input.length - 1) !== 13) {
|
||
input += "\n";
|
||
}
|
||
if (input.charCodeAt(0) === 65279) {
|
||
input = input.slice(1);
|
||
}
|
||
}
|
||
var state = new State(input, options2);
|
||
var nullpos = input.indexOf("\0");
|
||
if (nullpos !== -1) {
|
||
state.position = nullpos;
|
||
throwError(state, "null byte is not allowed in input");
|
||
}
|
||
state.input += "\0";
|
||
while (state.input.charCodeAt(state.position) === 32) {
|
||
state.lineIndent += 1;
|
||
state.position += 1;
|
||
}
|
||
while (state.position < state.length - 1) {
|
||
readDocument(state);
|
||
}
|
||
return state.documents;
|
||
}
|
||
function loadAll(input, iterator2, options2) {
|
||
if (iterator2 !== null && typeof iterator2 === "object" && typeof options2 === "undefined") {
|
||
options2 = iterator2;
|
||
iterator2 = null;
|
||
}
|
||
var documents = loadDocuments(input, options2);
|
||
if (typeof iterator2 !== "function") {
|
||
return documents;
|
||
}
|
||
for (var index2 = 0, length = documents.length; index2 < length; index2 += 1) {
|
||
iterator2(documents[index2]);
|
||
}
|
||
}
|
||
function load(input, options2) {
|
||
var documents = loadDocuments(input, options2);
|
||
if (documents.length === 0) {
|
||
return void 0;
|
||
} else if (documents.length === 1) {
|
||
return documents[0];
|
||
}
|
||
throw new YAMLException("expected a single document in the stream, but found more");
|
||
}
|
||
function safeLoadAll(input, iterator2, options2) {
|
||
if (typeof iterator2 === "object" && iterator2 !== null && typeof options2 === "undefined") {
|
||
options2 = iterator2;
|
||
iterator2 = null;
|
||
}
|
||
return loadAll(input, iterator2, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options2));
|
||
}
|
||
function safeLoad(input, options2) {
|
||
return load(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options2));
|
||
}
|
||
module2.exports.loadAll = loadAll;
|
||
module2.exports.load = load;
|
||
module2.exports.safeLoadAll = safeLoadAll;
|
||
module2.exports.safeLoad = safeLoad;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/dumper.js
|
||
var require_dumper = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml/dumper.js"(exports2, module2) {
|
||
"use strict";
|
||
var common = require_common();
|
||
var YAMLException = require_exception();
|
||
var DEFAULT_FULL_SCHEMA = require_default_full();
|
||
var DEFAULT_SAFE_SCHEMA = require_default_safe();
|
||
var _toString = Object.prototype.toString;
|
||
var _hasOwnProperty = Object.prototype.hasOwnProperty;
|
||
var CHAR_TAB = 9;
|
||
var CHAR_LINE_FEED = 10;
|
||
var CHAR_CARRIAGE_RETURN = 13;
|
||
var CHAR_SPACE = 32;
|
||
var CHAR_EXCLAMATION = 33;
|
||
var CHAR_DOUBLE_QUOTE = 34;
|
||
var CHAR_SHARP = 35;
|
||
var CHAR_PERCENT = 37;
|
||
var CHAR_AMPERSAND = 38;
|
||
var CHAR_SINGLE_QUOTE = 39;
|
||
var CHAR_ASTERISK = 42;
|
||
var CHAR_COMMA = 44;
|
||
var CHAR_MINUS = 45;
|
||
var CHAR_COLON = 58;
|
||
var CHAR_EQUALS = 61;
|
||
var CHAR_GREATER_THAN = 62;
|
||
var CHAR_QUESTION = 63;
|
||
var CHAR_COMMERCIAL_AT = 64;
|
||
var CHAR_LEFT_SQUARE_BRACKET = 91;
|
||
var CHAR_RIGHT_SQUARE_BRACKET = 93;
|
||
var CHAR_GRAVE_ACCENT = 96;
|
||
var CHAR_LEFT_CURLY_BRACKET = 123;
|
||
var CHAR_VERTICAL_LINE = 124;
|
||
var CHAR_RIGHT_CURLY_BRACKET = 125;
|
||
var ESCAPE_SEQUENCES = {};
|
||
ESCAPE_SEQUENCES[0] = "\\0";
|
||
ESCAPE_SEQUENCES[7] = "\\a";
|
||
ESCAPE_SEQUENCES[8] = "\\b";
|
||
ESCAPE_SEQUENCES[9] = "\\t";
|
||
ESCAPE_SEQUENCES[10] = "\\n";
|
||
ESCAPE_SEQUENCES[11] = "\\v";
|
||
ESCAPE_SEQUENCES[12] = "\\f";
|
||
ESCAPE_SEQUENCES[13] = "\\r";
|
||
ESCAPE_SEQUENCES[27] = "\\e";
|
||
ESCAPE_SEQUENCES[34] = '\\"';
|
||
ESCAPE_SEQUENCES[92] = "\\\\";
|
||
ESCAPE_SEQUENCES[133] = "\\N";
|
||
ESCAPE_SEQUENCES[160] = "\\_";
|
||
ESCAPE_SEQUENCES[8232] = "\\L";
|
||
ESCAPE_SEQUENCES[8233] = "\\P";
|
||
var DEPRECATED_BOOLEANS_SYNTAX = [
|
||
"y",
|
||
"Y",
|
||
"yes",
|
||
"Yes",
|
||
"YES",
|
||
"on",
|
||
"On",
|
||
"ON",
|
||
"n",
|
||
"N",
|
||
"no",
|
||
"No",
|
||
"NO",
|
||
"off",
|
||
"Off",
|
||
"OFF"
|
||
];
|
||
function compileStyleMap(schema, map4) {
|
||
var result, keys2, index2, length, tag, style, type;
|
||
if (map4 === null) return {};
|
||
result = {};
|
||
keys2 = Object.keys(map4);
|
||
for (index2 = 0, length = keys2.length; index2 < length; index2 += 1) {
|
||
tag = keys2[index2];
|
||
style = String(map4[tag]);
|
||
if (tag.slice(0, 2) === "!!") {
|
||
tag = "tag:yaml.org,2002:" + tag.slice(2);
|
||
}
|
||
type = schema.compiledTypeMap["fallback"][tag];
|
||
if (type && _hasOwnProperty.call(type.styleAliases, style)) {
|
||
style = type.styleAliases[style];
|
||
}
|
||
result[tag] = style;
|
||
}
|
||
return result;
|
||
}
|
||
function encodeHex(character) {
|
||
var string3, handle2, length;
|
||
string3 = character.toString(16).toUpperCase();
|
||
if (character <= 255) {
|
||
handle2 = "x";
|
||
length = 2;
|
||
} else if (character <= 65535) {
|
||
handle2 = "u";
|
||
length = 4;
|
||
} else if (character <= 4294967295) {
|
||
handle2 = "U";
|
||
length = 8;
|
||
} else {
|
||
throw new YAMLException("code point within a string may not be greater than 0xFFFFFFFF");
|
||
}
|
||
return "\\" + handle2 + common.repeat("0", length - string3.length) + string3;
|
||
}
|
||
function State(options2) {
|
||
this.schema = options2["schema"] || DEFAULT_FULL_SCHEMA;
|
||
this.indent = Math.max(1, options2["indent"] || 2);
|
||
this.noArrayIndent = options2["noArrayIndent"] || false;
|
||
this.skipInvalid = options2["skipInvalid"] || false;
|
||
this.flowLevel = common.isNothing(options2["flowLevel"]) ? -1 : options2["flowLevel"];
|
||
this.styleMap = compileStyleMap(this.schema, options2["styles"] || null);
|
||
this.sortKeys = options2["sortKeys"] || false;
|
||
this.lineWidth = options2["lineWidth"] || 80;
|
||
this.noRefs = options2["noRefs"] || false;
|
||
this.noCompatMode = options2["noCompatMode"] || false;
|
||
this.condenseFlow = options2["condenseFlow"] || false;
|
||
this.implicitTypes = this.schema.compiledImplicit;
|
||
this.explicitTypes = this.schema.compiledExplicit;
|
||
this.tag = null;
|
||
this.result = "";
|
||
this.duplicates = [];
|
||
this.usedDuplicates = null;
|
||
}
|
||
function indentString(string3, spaces) {
|
||
var ind = common.repeat(" ", spaces), position2 = 0, next = -1, result = "", line, length = string3.length;
|
||
while (position2 < length) {
|
||
next = string3.indexOf("\n", position2);
|
||
if (next === -1) {
|
||
line = string3.slice(position2);
|
||
position2 = length;
|
||
} else {
|
||
line = string3.slice(position2, next + 1);
|
||
position2 = next + 1;
|
||
}
|
||
if (line.length && line !== "\n") result += ind;
|
||
result += line;
|
||
}
|
||
return result;
|
||
}
|
||
function generateNextLine(state, level) {
|
||
return "\n" + common.repeat(" ", state.indent * level);
|
||
}
|
||
function testImplicitResolving(state, str2) {
|
||
var index2, length, type;
|
||
for (index2 = 0, length = state.implicitTypes.length; index2 < length; index2 += 1) {
|
||
type = state.implicitTypes[index2];
|
||
if (type.resolve(str2)) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
function isWhitespace(c) {
|
||
return c === CHAR_SPACE || c === CHAR_TAB;
|
||
}
|
||
function isPrintable(c) {
|
||
return 32 <= c && c <= 126 || 161 <= c && c <= 55295 && c !== 8232 && c !== 8233 || 57344 <= c && c <= 65533 && c !== 65279 || 65536 <= c && c <= 1114111;
|
||
}
|
||
function isNsChar(c) {
|
||
return isPrintable(c) && !isWhitespace(c) && c !== 65279 && c !== CHAR_CARRIAGE_RETURN && c !== CHAR_LINE_FEED;
|
||
}
|
||
function isPlainSafe(c, prev) {
|
||
return isPrintable(c) && c !== 65279 && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET && c !== CHAR_COLON && (c !== CHAR_SHARP || prev && isNsChar(prev));
|
||
}
|
||
function isPlainSafeFirst(c) {
|
||
return isPrintable(c) && c !== 65279 && !isWhitespace(c) && c !== CHAR_MINUS && c !== CHAR_QUESTION && c !== CHAR_COLON && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET && c !== CHAR_SHARP && c !== CHAR_AMPERSAND && c !== CHAR_ASTERISK && c !== CHAR_EXCLAMATION && c !== CHAR_VERTICAL_LINE && c !== CHAR_EQUALS && c !== CHAR_GREATER_THAN && c !== CHAR_SINGLE_QUOTE && c !== CHAR_DOUBLE_QUOTE && c !== CHAR_PERCENT && c !== CHAR_COMMERCIAL_AT && c !== CHAR_GRAVE_ACCENT;
|
||
}
|
||
function needIndentIndicator(string3) {
|
||
var leadingSpaceRe = /^\n* /;
|
||
return leadingSpaceRe.test(string3);
|
||
}
|
||
var STYLE_PLAIN = 1;
|
||
var STYLE_SINGLE = 2;
|
||
var STYLE_LITERAL = 3;
|
||
var STYLE_FOLDED = 4;
|
||
var STYLE_DOUBLE = 5;
|
||
function chooseScalarStyle(string3, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType) {
|
||
var i;
|
||
var char, prev_char;
|
||
var hasLineBreak = false;
|
||
var hasFoldableLine = false;
|
||
var shouldTrackWidth = lineWidth !== -1;
|
||
var previousLineBreak = -1;
|
||
var plain = isPlainSafeFirst(string3.charCodeAt(0)) && !isWhitespace(string3.charCodeAt(string3.length - 1));
|
||
if (singleLineOnly) {
|
||
for (i = 0; i < string3.length; i++) {
|
||
char = string3.charCodeAt(i);
|
||
if (!isPrintable(char)) {
|
||
return STYLE_DOUBLE;
|
||
}
|
||
prev_char = i > 0 ? string3.charCodeAt(i - 1) : null;
|
||
plain = plain && isPlainSafe(char, prev_char);
|
||
}
|
||
} else {
|
||
for (i = 0; i < string3.length; i++) {
|
||
char = string3.charCodeAt(i);
|
||
if (char === CHAR_LINE_FEED) {
|
||
hasLineBreak = true;
|
||
if (shouldTrackWidth) {
|
||
hasFoldableLine = hasFoldableLine || // Foldable line = too long, and not more-indented.
|
||
i - previousLineBreak - 1 > lineWidth && string3[previousLineBreak + 1] !== " ";
|
||
previousLineBreak = i;
|
||
}
|
||
} else if (!isPrintable(char)) {
|
||
return STYLE_DOUBLE;
|
||
}
|
||
prev_char = i > 0 ? string3.charCodeAt(i - 1) : null;
|
||
plain = plain && isPlainSafe(char, prev_char);
|
||
}
|
||
hasFoldableLine = hasFoldableLine || shouldTrackWidth && (i - previousLineBreak - 1 > lineWidth && string3[previousLineBreak + 1] !== " ");
|
||
}
|
||
if (!hasLineBreak && !hasFoldableLine) {
|
||
return plain && !testAmbiguousType(string3) ? STYLE_PLAIN : STYLE_SINGLE;
|
||
}
|
||
if (indentPerLevel > 9 && needIndentIndicator(string3)) {
|
||
return STYLE_DOUBLE;
|
||
}
|
||
return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL;
|
||
}
|
||
function writeScalar(state, string3, level, iskey) {
|
||
state.dump = function() {
|
||
if (string3.length === 0) {
|
||
return "''";
|
||
}
|
||
if (!state.noCompatMode && DEPRECATED_BOOLEANS_SYNTAX.indexOf(string3) !== -1) {
|
||
return "'" + string3 + "'";
|
||
}
|
||
var indent = state.indent * Math.max(1, level);
|
||
var lineWidth = state.lineWidth === -1 ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent);
|
||
var singleLineOnly = iskey || state.flowLevel > -1 && level >= state.flowLevel;
|
||
function testAmbiguity(string4) {
|
||
return testImplicitResolving(state, string4);
|
||
}
|
||
switch (chooseScalarStyle(string3, singleLineOnly, state.indent, lineWidth, testAmbiguity)) {
|
||
case STYLE_PLAIN:
|
||
return string3;
|
||
case STYLE_SINGLE:
|
||
return "'" + string3.replace(/'/g, "''") + "'";
|
||
case STYLE_LITERAL:
|
||
return "|" + blockHeader(string3, state.indent) + dropEndingNewline(indentString(string3, indent));
|
||
case STYLE_FOLDED:
|
||
return ">" + blockHeader(string3, state.indent) + dropEndingNewline(indentString(foldString(string3, lineWidth), indent));
|
||
case STYLE_DOUBLE:
|
||
return '"' + escapeString(string3, lineWidth) + '"';
|
||
default:
|
||
throw new YAMLException("impossible error: invalid scalar style");
|
||
}
|
||
}();
|
||
}
|
||
function blockHeader(string3, indentPerLevel) {
|
||
var indentIndicator = needIndentIndicator(string3) ? String(indentPerLevel) : "";
|
||
var clip = string3[string3.length - 1] === "\n";
|
||
var keep = clip && (string3[string3.length - 2] === "\n" || string3 === "\n");
|
||
var chomp = keep ? "+" : clip ? "" : "-";
|
||
return indentIndicator + chomp + "\n";
|
||
}
|
||
function dropEndingNewline(string3) {
|
||
return string3[string3.length - 1] === "\n" ? string3.slice(0, -1) : string3;
|
||
}
|
||
function foldString(string3, width) {
|
||
var lineRe = /(\n+)([^\n]*)/g;
|
||
var result = function() {
|
||
var nextLF = string3.indexOf("\n");
|
||
nextLF = nextLF !== -1 ? nextLF : string3.length;
|
||
lineRe.lastIndex = nextLF;
|
||
return foldLine(string3.slice(0, nextLF), width);
|
||
}();
|
||
var prevMoreIndented = string3[0] === "\n" || string3[0] === " ";
|
||
var moreIndented;
|
||
var match;
|
||
while (match = lineRe.exec(string3)) {
|
||
var prefix = match[1], line = match[2];
|
||
moreIndented = line[0] === " ";
|
||
result += prefix + (!prevMoreIndented && !moreIndented && line !== "" ? "\n" : "") + foldLine(line, width);
|
||
prevMoreIndented = moreIndented;
|
||
}
|
||
return result;
|
||
}
|
||
function foldLine(line, width) {
|
||
if (line === "" || line[0] === " ") return line;
|
||
var breakRe = / [^ ]/g;
|
||
var match;
|
||
var start = 0, end, curr = 0, next = 0;
|
||
var result = "";
|
||
while (match = breakRe.exec(line)) {
|
||
next = match.index;
|
||
if (next - start > width) {
|
||
end = curr > start ? curr : next;
|
||
result += "\n" + line.slice(start, end);
|
||
start = end + 1;
|
||
}
|
||
curr = next;
|
||
}
|
||
result += "\n";
|
||
if (line.length - start > width && curr > start) {
|
||
result += line.slice(start, curr) + "\n" + line.slice(curr + 1);
|
||
} else {
|
||
result += line.slice(start);
|
||
}
|
||
return result.slice(1);
|
||
}
|
||
function escapeString(string3) {
|
||
var result = "";
|
||
var char, nextChar;
|
||
var escapeSeq;
|
||
for (var i = 0; i < string3.length; i++) {
|
||
char = string3.charCodeAt(i);
|
||
if (char >= 55296 && char <= 56319) {
|
||
nextChar = string3.charCodeAt(i + 1);
|
||
if (nextChar >= 56320 && nextChar <= 57343) {
|
||
result += encodeHex((char - 55296) * 1024 + nextChar - 56320 + 65536);
|
||
i++;
|
||
continue;
|
||
}
|
||
}
|
||
escapeSeq = ESCAPE_SEQUENCES[char];
|
||
result += !escapeSeq && isPrintable(char) ? string3[i] : escapeSeq || encodeHex(char);
|
||
}
|
||
return result;
|
||
}
|
||
function writeFlowSequence(state, level, object) {
|
||
var _result = "", _tag = state.tag, index2, length;
|
||
for (index2 = 0, length = object.length; index2 < length; index2 += 1) {
|
||
if (writeNode(state, level, object[index2], false, false)) {
|
||
if (index2 !== 0) _result += "," + (!state.condenseFlow ? " " : "");
|
||
_result += state.dump;
|
||
}
|
||
}
|
||
state.tag = _tag;
|
||
state.dump = "[" + _result + "]";
|
||
}
|
||
function writeBlockSequence(state, level, object, compact) {
|
||
var _result = "", _tag = state.tag, index2, length;
|
||
for (index2 = 0, length = object.length; index2 < length; index2 += 1) {
|
||
if (writeNode(state, level + 1, object[index2], true, true)) {
|
||
if (!compact || index2 !== 0) {
|
||
_result += generateNextLine(state, level);
|
||
}
|
||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||
_result += "-";
|
||
} else {
|
||
_result += "- ";
|
||
}
|
||
_result += state.dump;
|
||
}
|
||
}
|
||
state.tag = _tag;
|
||
state.dump = _result || "[]";
|
||
}
|
||
function writeFlowMapping(state, level, object) {
|
||
var _result = "", _tag = state.tag, objectKeyList = Object.keys(object), index2, length, objectKey, objectValue, pairBuffer;
|
||
for (index2 = 0, length = objectKeyList.length; index2 < length; index2 += 1) {
|
||
pairBuffer = "";
|
||
if (index2 !== 0) pairBuffer += ", ";
|
||
if (state.condenseFlow) pairBuffer += '"';
|
||
objectKey = objectKeyList[index2];
|
||
objectValue = object[objectKey];
|
||
if (!writeNode(state, level, objectKey, false, false)) {
|
||
continue;
|
||
}
|
||
if (state.dump.length > 1024) pairBuffer += "? ";
|
||
pairBuffer += state.dump + (state.condenseFlow ? '"' : "") + ":" + (state.condenseFlow ? "" : " ");
|
||
if (!writeNode(state, level, objectValue, false, false)) {
|
||
continue;
|
||
}
|
||
pairBuffer += state.dump;
|
||
_result += pairBuffer;
|
||
}
|
||
state.tag = _tag;
|
||
state.dump = "{" + _result + "}";
|
||
}
|
||
function writeBlockMapping(state, level, object, compact) {
|
||
var _result = "", _tag = state.tag, objectKeyList = Object.keys(object), index2, length, objectKey, objectValue, explicitPair, pairBuffer;
|
||
if (state.sortKeys === true) {
|
||
objectKeyList.sort();
|
||
} else if (typeof state.sortKeys === "function") {
|
||
objectKeyList.sort(state.sortKeys);
|
||
} else if (state.sortKeys) {
|
||
throw new YAMLException("sortKeys must be a boolean or a function");
|
||
}
|
||
for (index2 = 0, length = objectKeyList.length; index2 < length; index2 += 1) {
|
||
pairBuffer = "";
|
||
if (!compact || index2 !== 0) {
|
||
pairBuffer += generateNextLine(state, level);
|
||
}
|
||
objectKey = objectKeyList[index2];
|
||
objectValue = object[objectKey];
|
||
if (!writeNode(state, level + 1, objectKey, true, true, true)) {
|
||
continue;
|
||
}
|
||
explicitPair = state.tag !== null && state.tag !== "?" || state.dump && state.dump.length > 1024;
|
||
if (explicitPair) {
|
||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||
pairBuffer += "?";
|
||
} else {
|
||
pairBuffer += "? ";
|
||
}
|
||
}
|
||
pairBuffer += state.dump;
|
||
if (explicitPair) {
|
||
pairBuffer += generateNextLine(state, level);
|
||
}
|
||
if (!writeNode(state, level + 1, objectValue, true, explicitPair)) {
|
||
continue;
|
||
}
|
||
if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {
|
||
pairBuffer += ":";
|
||
} else {
|
||
pairBuffer += ": ";
|
||
}
|
||
pairBuffer += state.dump;
|
||
_result += pairBuffer;
|
||
}
|
||
state.tag = _tag;
|
||
state.dump = _result || "{}";
|
||
}
|
||
function detectType(state, object, explicit) {
|
||
var _result, typeList, index2, length, type, style;
|
||
typeList = explicit ? state.explicitTypes : state.implicitTypes;
|
||
for (index2 = 0, length = typeList.length; index2 < length; index2 += 1) {
|
||
type = typeList[index2];
|
||
if ((type.instanceOf || type.predicate) && (!type.instanceOf || typeof object === "object" && object instanceof type.instanceOf) && (!type.predicate || type.predicate(object))) {
|
||
state.tag = explicit ? type.tag : "?";
|
||
if (type.represent) {
|
||
style = state.styleMap[type.tag] || type.defaultStyle;
|
||
if (_toString.call(type.represent) === "[object Function]") {
|
||
_result = type.represent(object, style);
|
||
} else if (_hasOwnProperty.call(type.represent, style)) {
|
||
_result = type.represent[style](object, style);
|
||
} else {
|
||
throw new YAMLException("!<" + type.tag + '> tag resolver accepts not "' + style + '" style');
|
||
}
|
||
state.dump = _result;
|
||
}
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
function writeNode(state, level, object, block, compact, iskey) {
|
||
state.tag = null;
|
||
state.dump = object;
|
||
if (!detectType(state, object, false)) {
|
||
detectType(state, object, true);
|
||
}
|
||
var type = _toString.call(state.dump);
|
||
if (block) {
|
||
block = state.flowLevel < 0 || state.flowLevel > level;
|
||
}
|
||
var objectOrArray = type === "[object Object]" || type === "[object Array]", duplicateIndex, duplicate;
|
||
if (objectOrArray) {
|
||
duplicateIndex = state.duplicates.indexOf(object);
|
||
duplicate = duplicateIndex !== -1;
|
||
}
|
||
if (state.tag !== null && state.tag !== "?" || duplicate || state.indent !== 2 && level > 0) {
|
||
compact = false;
|
||
}
|
||
if (duplicate && state.usedDuplicates[duplicateIndex]) {
|
||
state.dump = "*ref_" + duplicateIndex;
|
||
} else {
|
||
if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) {
|
||
state.usedDuplicates[duplicateIndex] = true;
|
||
}
|
||
if (type === "[object Object]") {
|
||
if (block && Object.keys(state.dump).length !== 0) {
|
||
writeBlockMapping(state, level, state.dump, compact);
|
||
if (duplicate) {
|
||
state.dump = "&ref_" + duplicateIndex + state.dump;
|
||
}
|
||
} else {
|
||
writeFlowMapping(state, level, state.dump);
|
||
if (duplicate) {
|
||
state.dump = "&ref_" + duplicateIndex + " " + state.dump;
|
||
}
|
||
}
|
||
} else if (type === "[object Array]") {
|
||
var arrayLevel = state.noArrayIndent && level > 0 ? level - 1 : level;
|
||
if (block && state.dump.length !== 0) {
|
||
writeBlockSequence(state, arrayLevel, state.dump, compact);
|
||
if (duplicate) {
|
||
state.dump = "&ref_" + duplicateIndex + state.dump;
|
||
}
|
||
} else {
|
||
writeFlowSequence(state, arrayLevel, state.dump);
|
||
if (duplicate) {
|
||
state.dump = "&ref_" + duplicateIndex + " " + state.dump;
|
||
}
|
||
}
|
||
} else if (type === "[object String]") {
|
||
if (state.tag !== "?") {
|
||
writeScalar(state, state.dump, level, iskey);
|
||
}
|
||
} else {
|
||
if (state.skipInvalid) return false;
|
||
throw new YAMLException("unacceptable kind of an object to dump " + type);
|
||
}
|
||
if (state.tag !== null && state.tag !== "?") {
|
||
state.dump = "!<" + state.tag + "> " + state.dump;
|
||
}
|
||
}
|
||
return true;
|
||
}
|
||
function getDuplicateReferences(object, state) {
|
||
var objects = [], duplicatesIndexes = [], index2, length;
|
||
inspectNode(object, objects, duplicatesIndexes);
|
||
for (index2 = 0, length = duplicatesIndexes.length; index2 < length; index2 += 1) {
|
||
state.duplicates.push(objects[duplicatesIndexes[index2]]);
|
||
}
|
||
state.usedDuplicates = new Array(length);
|
||
}
|
||
function inspectNode(object, objects, duplicatesIndexes) {
|
||
var objectKeyList, index2, length;
|
||
if (object !== null && typeof object === "object") {
|
||
index2 = objects.indexOf(object);
|
||
if (index2 !== -1) {
|
||
if (duplicatesIndexes.indexOf(index2) === -1) {
|
||
duplicatesIndexes.push(index2);
|
||
}
|
||
} else {
|
||
objects.push(object);
|
||
if (Array.isArray(object)) {
|
||
for (index2 = 0, length = object.length; index2 < length; index2 += 1) {
|
||
inspectNode(object[index2], objects, duplicatesIndexes);
|
||
}
|
||
} else {
|
||
objectKeyList = Object.keys(object);
|
||
for (index2 = 0, length = objectKeyList.length; index2 < length; index2 += 1) {
|
||
inspectNode(object[objectKeyList[index2]], objects, duplicatesIndexes);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function dump(input, options2) {
|
||
options2 = options2 || {};
|
||
var state = new State(options2);
|
||
if (!state.noRefs) getDuplicateReferences(input, state);
|
||
if (writeNode(state, 0, input, true, true)) return state.dump + "\n";
|
||
return "";
|
||
}
|
||
function safeDump(input, options2) {
|
||
return dump(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options2));
|
||
}
|
||
module2.exports.dump = dump;
|
||
module2.exports.safeDump = safeDump;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml.js
|
||
var require_js_yaml = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/lib/js-yaml.js"(exports2, module2) {
|
||
"use strict";
|
||
var loader = require_loader();
|
||
var dumper = require_dumper();
|
||
function deprecated(name) {
|
||
return function() {
|
||
throw new Error("Function " + name + " is deprecated and cannot be used.");
|
||
};
|
||
}
|
||
module2.exports.Type = require_type();
|
||
module2.exports.Schema = require_schema();
|
||
module2.exports.FAILSAFE_SCHEMA = require_failsafe();
|
||
module2.exports.JSON_SCHEMA = require_json();
|
||
module2.exports.CORE_SCHEMA = require_core();
|
||
module2.exports.DEFAULT_SAFE_SCHEMA = require_default_safe();
|
||
module2.exports.DEFAULT_FULL_SCHEMA = require_default_full();
|
||
module2.exports.load = loader.load;
|
||
module2.exports.loadAll = loader.loadAll;
|
||
module2.exports.safeLoad = loader.safeLoad;
|
||
module2.exports.safeLoadAll = loader.safeLoadAll;
|
||
module2.exports.dump = dumper.dump;
|
||
module2.exports.safeDump = dumper.safeDump;
|
||
module2.exports.YAMLException = require_exception();
|
||
module2.exports.MINIMAL_SCHEMA = require_failsafe();
|
||
module2.exports.SAFE_SCHEMA = require_default_safe();
|
||
module2.exports.DEFAULT_SCHEMA = require_default_full();
|
||
module2.exports.scan = deprecated("scan");
|
||
module2.exports.parse = deprecated("parse");
|
||
module2.exports.compose = deprecated("compose");
|
||
module2.exports.addConstructor = deprecated("addConstructor");
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/node_modules/js-yaml/index.js
|
||
var require_js_yaml2 = __commonJS({
|
||
"node_modules/gray-matter/node_modules/js-yaml/index.js"(exports2, module2) {
|
||
"use strict";
|
||
var yaml2 = require_js_yaml();
|
||
module2.exports = yaml2;
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/engines.js
|
||
var require_engines = __commonJS({
|
||
"node_modules/gray-matter/lib/engines.js"(exports, module) {
|
||
"use strict";
|
||
var yaml = require_js_yaml2();
|
||
var engines = exports = module.exports;
|
||
engines.yaml = {
|
||
parse: yaml.safeLoad.bind(yaml),
|
||
stringify: yaml.safeDump.bind(yaml)
|
||
};
|
||
engines.json = {
|
||
parse: JSON.parse.bind(JSON),
|
||
stringify: function(obj, options2) {
|
||
const opts = Object.assign({ replacer: null, space: 2 }, options2);
|
||
return JSON.stringify(obj, opts.replacer, opts.space);
|
||
}
|
||
};
|
||
engines.javascript = {
|
||
parse: function parse(str, options, wrap) {
|
||
try {
|
||
if (wrap !== false) {
|
||
str = "(function() {\nreturn " + str.trim() + ";\n}());";
|
||
}
|
||
return eval(str) || {};
|
||
} catch (err) {
|
||
if (wrap !== false && /(unexpected|identifier)/i.test(err.message)) {
|
||
return parse(str, options, false);
|
||
}
|
||
throw new SyntaxError(err);
|
||
}
|
||
},
|
||
stringify: function() {
|
||
throw new Error("stringifying JavaScript is not supported");
|
||
}
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/strip-bom-string/index.js
|
||
var require_strip_bom_string = __commonJS({
|
||
"node_modules/strip-bom-string/index.js"(exports2, module2) {
|
||
"use strict";
|
||
module2.exports = function(str2) {
|
||
if (typeof str2 === "string" && str2.charAt(0) === "\uFEFF") {
|
||
return str2.slice(1);
|
||
}
|
||
return str2;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/utils.js
|
||
var require_utils = __commonJS({
|
||
"node_modules/gray-matter/lib/utils.js"(exports2) {
|
||
"use strict";
|
||
var stripBom = require_strip_bom_string();
|
||
var typeOf2 = require_kind_of();
|
||
exports2.define = function(obj, key, val) {
|
||
Reflect.defineProperty(obj, key, {
|
||
enumerable: false,
|
||
configurable: true,
|
||
writable: true,
|
||
value: val
|
||
});
|
||
};
|
||
exports2.isBuffer = function(val) {
|
||
return typeOf2(val) === "buffer";
|
||
};
|
||
exports2.isObject = function(val) {
|
||
return typeOf2(val) === "object";
|
||
};
|
||
exports2.toBuffer = function(input) {
|
||
return typeof input === "string" ? Buffer.from(input) : input;
|
||
};
|
||
exports2.toString = function(input) {
|
||
if (exports2.isBuffer(input)) return stripBom(String(input));
|
||
if (typeof input !== "string") {
|
||
throw new TypeError("expected input to be a string or buffer");
|
||
}
|
||
return stripBom(input);
|
||
};
|
||
exports2.arrayify = function(val) {
|
||
return val ? Array.isArray(val) ? val : [val] : [];
|
||
};
|
||
exports2.startsWith = function(str2, substr, len) {
|
||
if (typeof len !== "number") len = substr.length;
|
||
return str2.slice(0, len) === substr;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/defaults.js
|
||
var require_defaults = __commonJS({
|
||
"node_modules/gray-matter/lib/defaults.js"(exports2, module2) {
|
||
"use strict";
|
||
var engines2 = require_engines();
|
||
var utils = require_utils();
|
||
module2.exports = function(options2) {
|
||
const opts = Object.assign({}, options2);
|
||
opts.delimiters = utils.arrayify(opts.delims || opts.delimiters || "---");
|
||
if (opts.delimiters.length === 1) {
|
||
opts.delimiters.push(opts.delimiters[0]);
|
||
}
|
||
opts.language = (opts.language || opts.lang || "yaml").toLowerCase();
|
||
opts.engines = Object.assign({}, engines2, opts.parsers, opts.engines);
|
||
return opts;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/engine.js
|
||
var require_engine = __commonJS({
|
||
"node_modules/gray-matter/lib/engine.js"(exports2, module2) {
|
||
"use strict";
|
||
module2.exports = function(name, options2) {
|
||
let engine = options2.engines[name] || options2.engines[aliase(name)];
|
||
if (typeof engine === "undefined") {
|
||
throw new Error('gray-matter engine "' + name + '" is not registered');
|
||
}
|
||
if (typeof engine === "function") {
|
||
engine = { parse: engine };
|
||
}
|
||
return engine;
|
||
};
|
||
function aliase(name) {
|
||
switch (name.toLowerCase()) {
|
||
case "js":
|
||
case "javascript":
|
||
return "javascript";
|
||
case "coffee":
|
||
case "coffeescript":
|
||
case "cson":
|
||
return "coffee";
|
||
case "yaml":
|
||
case "yml":
|
||
return "yaml";
|
||
default: {
|
||
return name;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/stringify.js
|
||
var require_stringify = __commonJS({
|
||
"node_modules/gray-matter/lib/stringify.js"(exports2, module2) {
|
||
"use strict";
|
||
var typeOf2 = require_kind_of();
|
||
var getEngine = require_engine();
|
||
var defaults = require_defaults();
|
||
module2.exports = function(file, data, options2) {
|
||
if (data == null && options2 == null) {
|
||
switch (typeOf2(file)) {
|
||
case "object":
|
||
data = file.data;
|
||
options2 = {};
|
||
break;
|
||
case "string":
|
||
return file;
|
||
default: {
|
||
throw new TypeError("expected file to be a string or object");
|
||
}
|
||
}
|
||
}
|
||
const str2 = file.content;
|
||
const opts = defaults(options2);
|
||
if (data == null) {
|
||
if (!opts.data) return file;
|
||
data = opts.data;
|
||
}
|
||
const language = file.language || opts.language;
|
||
const engine = getEngine(language, opts);
|
||
if (typeof engine.stringify !== "function") {
|
||
throw new TypeError('expected "' + language + '.stringify" to be a function');
|
||
}
|
||
data = Object.assign({}, file.data, data);
|
||
const open = opts.delimiters[0];
|
||
const close = opts.delimiters[1];
|
||
const matter3 = engine.stringify(data, options2).trim();
|
||
let buf = "";
|
||
if (matter3 !== "{}") {
|
||
buf = newline(open) + newline(matter3) + newline(close);
|
||
}
|
||
if (typeof file.excerpt === "string" && file.excerpt !== "") {
|
||
if (str2.indexOf(file.excerpt.trim()) === -1) {
|
||
buf += newline(file.excerpt) + newline(close);
|
||
}
|
||
}
|
||
return buf + newline(str2);
|
||
};
|
||
function newline(str2) {
|
||
return str2.slice(-1) !== "\n" ? str2 + "\n" : str2;
|
||
}
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/excerpt.js
|
||
var require_excerpt = __commonJS({
|
||
"node_modules/gray-matter/lib/excerpt.js"(exports2, module2) {
|
||
"use strict";
|
||
var defaults = require_defaults();
|
||
module2.exports = function(file, options2) {
|
||
const opts = defaults(options2);
|
||
if (file.data == null) {
|
||
file.data = {};
|
||
}
|
||
if (typeof opts.excerpt === "function") {
|
||
return opts.excerpt(file, opts);
|
||
}
|
||
const sep = file.data.excerpt_separator || opts.excerpt_separator;
|
||
if (sep == null && (opts.excerpt === false || opts.excerpt == null)) {
|
||
return file;
|
||
}
|
||
const delimiter = typeof opts.excerpt === "string" ? opts.excerpt : sep || opts.delimiters[0];
|
||
const idx = file.content.indexOf(delimiter);
|
||
if (idx !== -1) {
|
||
file.excerpt = file.content.slice(0, idx);
|
||
}
|
||
return file;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/to-file.js
|
||
var require_to_file = __commonJS({
|
||
"node_modules/gray-matter/lib/to-file.js"(exports2, module2) {
|
||
"use strict";
|
||
var typeOf2 = require_kind_of();
|
||
var stringify = require_stringify();
|
||
var utils = require_utils();
|
||
module2.exports = function(file) {
|
||
if (typeOf2(file) !== "object") {
|
||
file = { content: file };
|
||
}
|
||
if (typeOf2(file.data) !== "object") {
|
||
file.data = {};
|
||
}
|
||
if (file.contents && file.content == null) {
|
||
file.content = file.contents;
|
||
}
|
||
utils.define(file, "orig", utils.toBuffer(file.content));
|
||
utils.define(file, "language", file.language || "");
|
||
utils.define(file, "matter", file.matter || "");
|
||
utils.define(file, "stringify", function(data, options2) {
|
||
if (options2 && options2.language) {
|
||
file.language = options2.language;
|
||
}
|
||
return stringify(file, data, options2);
|
||
});
|
||
file.content = utils.toString(file.content);
|
||
file.isEmpty = false;
|
||
file.excerpt = "";
|
||
return file;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/lib/parse.js
|
||
var require_parse = __commonJS({
|
||
"node_modules/gray-matter/lib/parse.js"(exports2, module2) {
|
||
"use strict";
|
||
var getEngine = require_engine();
|
||
var defaults = require_defaults();
|
||
module2.exports = function(language, str2, options2) {
|
||
const opts = defaults(options2);
|
||
const engine = getEngine(language, opts);
|
||
if (typeof engine.parse !== "function") {
|
||
throw new TypeError('expected "' + language + '.parse" to be a function');
|
||
}
|
||
return engine.parse(str2, opts);
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/gray-matter/index.js
|
||
var require_gray_matter = __commonJS({
|
||
"node_modules/gray-matter/index.js"(exports2, module2) {
|
||
"use strict";
|
||
var fs = require("fs");
|
||
var sections = require_section_matter();
|
||
var defaults = require_defaults();
|
||
var stringify = require_stringify();
|
||
var excerpt = require_excerpt();
|
||
var engines2 = require_engines();
|
||
var toFile = require_to_file();
|
||
var parse4 = require_parse();
|
||
var utils = require_utils();
|
||
function matter3(input, options2) {
|
||
if (input === "") {
|
||
return { data: {}, content: input, excerpt: "", orig: input };
|
||
}
|
||
let file = toFile(input);
|
||
const cached = matter3.cache[file.content];
|
||
if (!options2) {
|
||
if (cached) {
|
||
file = Object.assign({}, cached);
|
||
file.orig = cached.orig;
|
||
return file;
|
||
}
|
||
matter3.cache[file.content] = file;
|
||
}
|
||
return parseMatter(file, options2);
|
||
}
|
||
function parseMatter(file, options2) {
|
||
const opts = defaults(options2);
|
||
const open = opts.delimiters[0];
|
||
const close = "\n" + opts.delimiters[1];
|
||
let str2 = file.content;
|
||
if (opts.language) {
|
||
file.language = opts.language;
|
||
}
|
||
const openLen = open.length;
|
||
if (!utils.startsWith(str2, open, openLen)) {
|
||
excerpt(file, opts);
|
||
return file;
|
||
}
|
||
if (str2.charAt(openLen) === open.slice(-1)) {
|
||
return file;
|
||
}
|
||
str2 = str2.slice(openLen);
|
||
const len = str2.length;
|
||
const language = matter3.language(str2, opts);
|
||
if (language.name) {
|
||
file.language = language.name;
|
||
str2 = str2.slice(language.raw.length);
|
||
}
|
||
let closeIndex = str2.indexOf(close);
|
||
if (closeIndex === -1) {
|
||
closeIndex = len;
|
||
}
|
||
file.matter = str2.slice(0, closeIndex);
|
||
const block = file.matter.replace(/^\s*#[^\n]+/gm, "").trim();
|
||
if (block === "") {
|
||
file.isEmpty = true;
|
||
file.empty = file.content;
|
||
file.data = {};
|
||
} else {
|
||
file.data = parse4(file.language, file.matter, opts);
|
||
}
|
||
if (closeIndex === len) {
|
||
file.content = "";
|
||
} else {
|
||
file.content = str2.slice(closeIndex + close.length);
|
||
if (file.content[0] === "\r") {
|
||
file.content = file.content.slice(1);
|
||
}
|
||
if (file.content[0] === "\n") {
|
||
file.content = file.content.slice(1);
|
||
}
|
||
}
|
||
excerpt(file, opts);
|
||
if (opts.sections === true || typeof opts.section === "function") {
|
||
sections(file, opts.section);
|
||
}
|
||
return file;
|
||
}
|
||
matter3.engines = engines2;
|
||
matter3.stringify = function(file, data, options2) {
|
||
if (typeof file === "string") file = matter3(file, options2);
|
||
return stringify(file, data, options2);
|
||
};
|
||
matter3.read = function(filepath, options2) {
|
||
const str2 = fs.readFileSync(filepath, "utf8");
|
||
const file = matter3(str2, options2);
|
||
file.path = filepath;
|
||
return file;
|
||
};
|
||
matter3.test = function(str2, options2) {
|
||
return utils.startsWith(str2, defaults(options2).delimiters[0]);
|
||
};
|
||
matter3.language = function(str2, options2) {
|
||
const opts = defaults(options2);
|
||
const open = opts.delimiters[0];
|
||
if (matter3.test(str2)) {
|
||
str2 = str2.slice(open.length);
|
||
}
|
||
const language = str2.slice(0, str2.search(/\r?\n/));
|
||
return {
|
||
raw: language,
|
||
name: language ? language.trim() : ""
|
||
};
|
||
};
|
||
matter3.cache = {};
|
||
matter3.clearCache = function() {
|
||
matter3.cache = {};
|
||
};
|
||
module2.exports = matter3;
|
||
}
|
||
});
|
||
|
||
// node_modules/extend/index.js
|
||
var require_extend = __commonJS({
|
||
"node_modules/extend/index.js"(exports2, module2) {
|
||
"use strict";
|
||
var hasOwn = Object.prototype.hasOwnProperty;
|
||
var toStr = Object.prototype.toString;
|
||
var defineProperty = Object.defineProperty;
|
||
var gOPD = Object.getOwnPropertyDescriptor;
|
||
var isArray = function isArray2(arr) {
|
||
if (typeof Array.isArray === "function") {
|
||
return Array.isArray(arr);
|
||
}
|
||
return toStr.call(arr) === "[object Array]";
|
||
};
|
||
var isPlainObject4 = function isPlainObject5(obj) {
|
||
if (!obj || toStr.call(obj) !== "[object Object]") {
|
||
return false;
|
||
}
|
||
var hasOwnConstructor = hasOwn.call(obj, "constructor");
|
||
var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, "isPrototypeOf");
|
||
if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {
|
||
return false;
|
||
}
|
||
var key;
|
||
for (key in obj) {
|
||
}
|
||
return typeof key === "undefined" || hasOwn.call(obj, key);
|
||
};
|
||
var setProperty = function setProperty2(target, options2) {
|
||
if (defineProperty && options2.name === "__proto__") {
|
||
defineProperty(target, options2.name, {
|
||
enumerable: true,
|
||
configurable: true,
|
||
value: options2.newValue,
|
||
writable: true
|
||
});
|
||
} else {
|
||
target[options2.name] = options2.newValue;
|
||
}
|
||
};
|
||
var getProperty = function getProperty2(obj, name) {
|
||
if (name === "__proto__") {
|
||
if (!hasOwn.call(obj, name)) {
|
||
return void 0;
|
||
} else if (gOPD) {
|
||
return gOPD(obj, name).value;
|
||
}
|
||
}
|
||
return obj[name];
|
||
};
|
||
module2.exports = function extend2() {
|
||
var options2, name, src, copy, copyIsArray, clone;
|
||
var target = arguments[0];
|
||
var i = 1;
|
||
var length = arguments.length;
|
||
var deep = false;
|
||
if (typeof target === "boolean") {
|
||
deep = target;
|
||
target = arguments[1] || {};
|
||
i = 2;
|
||
}
|
||
if (target == null || typeof target !== "object" && typeof target !== "function") {
|
||
target = {};
|
||
}
|
||
for (; i < length; ++i) {
|
||
options2 = arguments[i];
|
||
if (options2 != null) {
|
||
for (name in options2) {
|
||
src = getProperty(target, name);
|
||
copy = getProperty(options2, name);
|
||
if (target !== copy) {
|
||
if (deep && copy && (isPlainObject4(copy) || (copyIsArray = isArray(copy)))) {
|
||
if (copyIsArray) {
|
||
copyIsArray = false;
|
||
clone = src && isArray(src) ? src : [];
|
||
} else {
|
||
clone = src && isPlainObject4(src) ? src : {};
|
||
}
|
||
setProperty(target, { name, newValue: extend2(deep, clone, copy) });
|
||
} else if (typeof copy !== "undefined") {
|
||
setProperty(target, { name, newValue: copy });
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return target;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/fast-content-type-parse/index.js
|
||
var require_fast_content_type_parse = __commonJS({
|
||
"node_modules/fast-content-type-parse/index.js"(exports2, module2) {
|
||
"use strict";
|
||
var NullObject = function NullObject2() {
|
||
};
|
||
NullObject.prototype = /* @__PURE__ */ Object.create(null);
|
||
var paramRE = /; *([!#$%&'*+.^\w`|~-]+)=("(?:[\v\u0020\u0021\u0023-\u005b\u005d-\u007e\u0080-\u00ff]|\\[\v\u0020-\u00ff])*"|[!#$%&'*+.^\w`|~-]+) */gu;
|
||
var quotedPairRE = /\\([\v\u0020-\u00ff])/gu;
|
||
var mediaTypeRE = /^[!#$%&'*+.^\w|~-]+\/[!#$%&'*+.^\w|~-]+$/u;
|
||
var defaultContentType = { type: "", parameters: new NullObject() };
|
||
Object.freeze(defaultContentType.parameters);
|
||
Object.freeze(defaultContentType);
|
||
function parse4(header) {
|
||
if (typeof header !== "string") {
|
||
throw new TypeError("argument header is required and must be a string");
|
||
}
|
||
let index2 = header.indexOf(";");
|
||
const type = index2 !== -1 ? header.slice(0, index2).trim() : header.trim();
|
||
if (mediaTypeRE.test(type) === false) {
|
||
throw new TypeError("invalid media type");
|
||
}
|
||
const result = {
|
||
type: type.toLowerCase(),
|
||
parameters: new NullObject()
|
||
};
|
||
if (index2 === -1) {
|
||
return result;
|
||
}
|
||
let key;
|
||
let match;
|
||
let value;
|
||
paramRE.lastIndex = index2;
|
||
while (match = paramRE.exec(header)) {
|
||
if (match.index !== index2) {
|
||
throw new TypeError("invalid parameter format");
|
||
}
|
||
index2 += match[0].length;
|
||
key = match[1].toLowerCase();
|
||
value = match[2];
|
||
if (value[0] === '"') {
|
||
value = value.slice(1, value.length - 1);
|
||
quotedPairRE.test(value) && (value = value.replace(quotedPairRE, "$1"));
|
||
}
|
||
result.parameters[key] = value;
|
||
}
|
||
if (index2 !== header.length) {
|
||
throw new TypeError("invalid parameter format");
|
||
}
|
||
return result;
|
||
}
|
||
function safeParse2(header) {
|
||
if (typeof header !== "string") {
|
||
return defaultContentType;
|
||
}
|
||
let index2 = header.indexOf(";");
|
||
const type = index2 !== -1 ? header.slice(0, index2).trim() : header.trim();
|
||
if (mediaTypeRE.test(type) === false) {
|
||
return defaultContentType;
|
||
}
|
||
const result = {
|
||
type: type.toLowerCase(),
|
||
parameters: new NullObject()
|
||
};
|
||
if (index2 === -1) {
|
||
return result;
|
||
}
|
||
let key;
|
||
let match;
|
||
let value;
|
||
paramRE.lastIndex = index2;
|
||
while (match = paramRE.exec(header)) {
|
||
if (match.index !== index2) {
|
||
return defaultContentType;
|
||
}
|
||
index2 += match[0].length;
|
||
key = match[1].toLowerCase();
|
||
value = match[2];
|
||
if (value[0] === '"') {
|
||
value = value.slice(1, value.length - 1);
|
||
quotedPairRE.test(value) && (value = value.replace(quotedPairRE, "$1"));
|
||
}
|
||
result.parameters[key] = value;
|
||
}
|
||
if (index2 !== header.length) {
|
||
return defaultContentType;
|
||
}
|
||
return result;
|
||
}
|
||
module2.exports.default = { parse: parse4, safeParse: safeParse2 };
|
||
module2.exports.parse = parse4;
|
||
module2.exports.safeParse = safeParse2;
|
||
module2.exports.defaultContentType = defaultContentType;
|
||
}
|
||
});
|
||
|
||
// main.ts
|
||
var main_exports = {};
|
||
__export(main_exports, {
|
||
default: () => ShareAsGistPlugin
|
||
});
|
||
module.exports = __toCommonJS(main_exports);
|
||
var import_obsidian = require("obsidian");
|
||
var import_gray_matter2 = __toESM(require_gray_matter());
|
||
|
||
// node_modules/mdast-util-to-string/lib/index.js
|
||
var emptyOptions = {};
|
||
function toString(value, options2) {
|
||
const settings = options2 || emptyOptions;
|
||
const includeImageAlt = typeof settings.includeImageAlt === "boolean" ? settings.includeImageAlt : true;
|
||
const includeHtml = typeof settings.includeHtml === "boolean" ? settings.includeHtml : true;
|
||
return one(value, includeImageAlt, includeHtml);
|
||
}
|
||
function one(value, includeImageAlt, includeHtml) {
|
||
if (node(value)) {
|
||
if ("value" in value) {
|
||
return value.type === "html" && !includeHtml ? "" : value.value;
|
||
}
|
||
if (includeImageAlt && "alt" in value && value.alt) {
|
||
return value.alt;
|
||
}
|
||
if ("children" in value) {
|
||
return all(value.children, includeImageAlt, includeHtml);
|
||
}
|
||
}
|
||
if (Array.isArray(value)) {
|
||
return all(value, includeImageAlt, includeHtml);
|
||
}
|
||
return "";
|
||
}
|
||
function all(values, includeImageAlt, includeHtml) {
|
||
const result = [];
|
||
let index2 = -1;
|
||
while (++index2 < values.length) {
|
||
result[index2] = one(values[index2], includeImageAlt, includeHtml);
|
||
}
|
||
return result.join("");
|
||
}
|
||
function node(value) {
|
||
return Boolean(value && typeof value === "object");
|
||
}
|
||
|
||
// node_modules/decode-named-character-reference/index.dom.js
|
||
var element = document.createElement("i");
|
||
function decodeNamedCharacterReference(value) {
|
||
const characterReference2 = "&" + value + ";";
|
||
element.innerHTML = characterReference2;
|
||
const char = element.textContent;
|
||
if (char.charCodeAt(char.length - 1) === 59 && value !== "semi") {
|
||
return false;
|
||
}
|
||
return char === characterReference2 ? false : char;
|
||
}
|
||
|
||
// node_modules/micromark-util-chunked/index.js
|
||
function splice(list4, start, remove, items) {
|
||
const end = list4.length;
|
||
let chunkStart = 0;
|
||
let parameters;
|
||
if (start < 0) {
|
||
start = -start > end ? 0 : end + start;
|
||
} else {
|
||
start = start > end ? end : start;
|
||
}
|
||
remove = remove > 0 ? remove : 0;
|
||
if (items.length < 1e4) {
|
||
parameters = Array.from(items);
|
||
parameters.unshift(start, remove);
|
||
list4.splice(...parameters);
|
||
} else {
|
||
if (remove) list4.splice(start, remove);
|
||
while (chunkStart < items.length) {
|
||
parameters = items.slice(chunkStart, chunkStart + 1e4);
|
||
parameters.unshift(start, 0);
|
||
list4.splice(...parameters);
|
||
chunkStart += 1e4;
|
||
start += 1e4;
|
||
}
|
||
}
|
||
}
|
||
function push(list4, items) {
|
||
if (list4.length > 0) {
|
||
splice(list4, list4.length, 0, items);
|
||
return list4;
|
||
}
|
||
return items;
|
||
}
|
||
|
||
// node_modules/micromark-util-combine-extensions/index.js
|
||
var hasOwnProperty = {}.hasOwnProperty;
|
||
function combineExtensions(extensions) {
|
||
const all3 = {};
|
||
let index2 = -1;
|
||
while (++index2 < extensions.length) {
|
||
syntaxExtension(all3, extensions[index2]);
|
||
}
|
||
return all3;
|
||
}
|
||
function syntaxExtension(all3, extension2) {
|
||
let hook2;
|
||
for (hook2 in extension2) {
|
||
const maybe = hasOwnProperty.call(all3, hook2) ? all3[hook2] : void 0;
|
||
const left = maybe || (all3[hook2] = {});
|
||
const right = extension2[hook2];
|
||
let code2;
|
||
if (right) {
|
||
for (code2 in right) {
|
||
if (!hasOwnProperty.call(left, code2)) left[code2] = [];
|
||
const value = right[code2];
|
||
constructs(
|
||
// @ts-expect-error Looks like a list.
|
||
left[code2],
|
||
Array.isArray(value) ? value : value ? [value] : []
|
||
);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function constructs(existing, list4) {
|
||
let index2 = -1;
|
||
const before = [];
|
||
while (++index2 < list4.length) {
|
||
;
|
||
(list4[index2].add === "after" ? existing : before).push(list4[index2]);
|
||
}
|
||
splice(existing, 0, 0, before);
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-numeric-character-reference/index.js
|
||
function decodeNumericCharacterReference(value, base) {
|
||
const code2 = Number.parseInt(value, base);
|
||
if (
|
||
// C0 except for HT, LF, FF, CR, space.
|
||
code2 < 9 || code2 === 11 || code2 > 13 && code2 < 32 || // Control character (DEL) of C0, and C1 controls.
|
||
code2 > 126 && code2 < 160 || // Lone high surrogates and low surrogates.
|
||
code2 > 55295 && code2 < 57344 || // Noncharacters.
|
||
code2 > 64975 && code2 < 65008 || /* eslint-disable no-bitwise */
|
||
(code2 & 65535) === 65535 || (code2 & 65535) === 65534 || /* eslint-enable no-bitwise */
|
||
// Out of range
|
||
code2 > 1114111
|
||
) {
|
||
return "\uFFFD";
|
||
}
|
||
return String.fromCodePoint(code2);
|
||
}
|
||
|
||
// node_modules/micromark-util-normalize-identifier/index.js
|
||
function normalizeIdentifier(value) {
|
||
return value.replace(/[\t\n\r ]+/g, " ").replace(/^ | $/g, "").toLowerCase().toUpperCase();
|
||
}
|
||
|
||
// node_modules/micromark-util-character/index.js
|
||
var asciiAlpha = regexCheck(/[A-Za-z]/);
|
||
var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/);
|
||
var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/);
|
||
function asciiControl(code2) {
|
||
return (
|
||
// Special whitespace codes (which have negative values), C0 and Control
|
||
// character DEL
|
||
code2 !== null && (code2 < 32 || code2 === 127)
|
||
);
|
||
}
|
||
var asciiDigit = regexCheck(/\d/);
|
||
var asciiHexDigit = regexCheck(/[\dA-Fa-f]/);
|
||
var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/);
|
||
function markdownLineEnding(code2) {
|
||
return code2 !== null && code2 < -2;
|
||
}
|
||
function markdownLineEndingOrSpace(code2) {
|
||
return code2 !== null && (code2 < 0 || code2 === 32);
|
||
}
|
||
function markdownSpace(code2) {
|
||
return code2 === -2 || code2 === -1 || code2 === 32;
|
||
}
|
||
var unicodePunctuation = regexCheck(new RegExp("\\p{P}|\\p{S}", "u"));
|
||
var unicodeWhitespace = regexCheck(/\s/);
|
||
function regexCheck(regex2) {
|
||
return check;
|
||
function check(code2) {
|
||
return code2 !== null && code2 > -1 && regex2.test(String.fromCharCode(code2));
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-space/index.js
|
||
function factorySpace(effects, ok3, type, max) {
|
||
const limit = max ? max - 1 : Number.POSITIVE_INFINITY;
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter(type);
|
||
return prefix(code2);
|
||
}
|
||
return ok3(code2);
|
||
}
|
||
function prefix(code2) {
|
||
if (markdownSpace(code2) && size++ < limit) {
|
||
effects.consume(code2);
|
||
return prefix;
|
||
}
|
||
effects.exit(type);
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/content.js
|
||
var content = {
|
||
tokenize: initializeContent
|
||
};
|
||
function initializeContent(effects) {
|
||
const contentStart = effects.attempt(this.parser.constructs.contentInitial, afterContentStartConstruct, paragraphInitial);
|
||
let previous2;
|
||
return contentStart;
|
||
function afterContentStartConstruct(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, contentStart, "linePrefix");
|
||
}
|
||
function paragraphInitial(code2) {
|
||
effects.enter("paragraph");
|
||
return lineStart(code2);
|
||
}
|
||
function lineStart(code2) {
|
||
const token = effects.enter("chunkText", {
|
||
contentType: "text",
|
||
previous: previous2
|
||
});
|
||
if (previous2) {
|
||
previous2.next = token;
|
||
}
|
||
previous2 = token;
|
||
return data(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null) {
|
||
effects.exit("chunkText");
|
||
effects.exit("paragraph");
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.consume(code2);
|
||
effects.exit("chunkText");
|
||
return lineStart;
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/document.js
|
||
var document2 = {
|
||
tokenize: initializeDocument
|
||
};
|
||
var containerConstruct = {
|
||
tokenize: tokenizeContainer
|
||
};
|
||
function initializeDocument(effects) {
|
||
const self2 = this;
|
||
const stack = [];
|
||
let continued = 0;
|
||
let childFlow;
|
||
let childToken;
|
||
let lineStartOffset;
|
||
return start;
|
||
function start(code2) {
|
||
if (continued < stack.length) {
|
||
const item = stack[continued];
|
||
self2.containerState = item[1];
|
||
return effects.attempt(item[0].continuation, documentContinue, checkNewContainers)(code2);
|
||
}
|
||
return checkNewContainers(code2);
|
||
}
|
||
function documentContinue(code2) {
|
||
continued++;
|
||
if (self2.containerState._closeFlow) {
|
||
self2.containerState._closeFlow = void 0;
|
||
if (childFlow) {
|
||
closeFlow();
|
||
}
|
||
const indexBeforeExits = self2.events.length;
|
||
let indexBeforeFlow = indexBeforeExits;
|
||
let point3;
|
||
while (indexBeforeFlow--) {
|
||
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||
point3 = self2.events[indexBeforeFlow][1].end;
|
||
break;
|
||
}
|
||
}
|
||
exitContainers(continued);
|
||
let index2 = indexBeforeExits;
|
||
while (index2 < self2.events.length) {
|
||
self2.events[index2][1].end = __spreadValues({}, point3);
|
||
index2++;
|
||
}
|
||
splice(self2.events, indexBeforeFlow + 1, 0, self2.events.slice(indexBeforeExits));
|
||
self2.events.length = index2;
|
||
return checkNewContainers(code2);
|
||
}
|
||
return start(code2);
|
||
}
|
||
function checkNewContainers(code2) {
|
||
if (continued === stack.length) {
|
||
if (!childFlow) {
|
||
return documentContinued(code2);
|
||
}
|
||
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
|
||
return flowStart(code2);
|
||
}
|
||
self2.interrupt = Boolean(childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack);
|
||
}
|
||
self2.containerState = {};
|
||
return effects.check(containerConstruct, thereIsANewContainer, thereIsNoNewContainer)(code2);
|
||
}
|
||
function thereIsANewContainer(code2) {
|
||
if (childFlow) closeFlow();
|
||
exitContainers(continued);
|
||
return documentContinued(code2);
|
||
}
|
||
function thereIsNoNewContainer(code2) {
|
||
self2.parser.lazy[self2.now().line] = continued !== stack.length;
|
||
lineStartOffset = self2.now().offset;
|
||
return flowStart(code2);
|
||
}
|
||
function documentContinued(code2) {
|
||
self2.containerState = {};
|
||
return effects.attempt(containerConstruct, containerContinue, flowStart)(code2);
|
||
}
|
||
function containerContinue(code2) {
|
||
continued++;
|
||
stack.push([self2.currentConstruct, self2.containerState]);
|
||
return documentContinued(code2);
|
||
}
|
||
function flowStart(code2) {
|
||
if (code2 === null) {
|
||
if (childFlow) closeFlow();
|
||
exitContainers(0);
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
childFlow = childFlow || self2.parser.flow(self2.now());
|
||
effects.enter("chunkFlow", {
|
||
_tokenizer: childFlow,
|
||
contentType: "flow",
|
||
previous: childToken
|
||
});
|
||
return flowContinue(code2);
|
||
}
|
||
function flowContinue(code2) {
|
||
if (code2 === null) {
|
||
writeToChild(effects.exit("chunkFlow"), true);
|
||
exitContainers(0);
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.consume(code2);
|
||
writeToChild(effects.exit("chunkFlow"));
|
||
continued = 0;
|
||
self2.interrupt = void 0;
|
||
return start;
|
||
}
|
||
effects.consume(code2);
|
||
return flowContinue;
|
||
}
|
||
function writeToChild(token, endOfFile) {
|
||
const stream = self2.sliceStream(token);
|
||
if (endOfFile) stream.push(null);
|
||
token.previous = childToken;
|
||
if (childToken) childToken.next = token;
|
||
childToken = token;
|
||
childFlow.defineSkip(token.start);
|
||
childFlow.write(stream);
|
||
if (self2.parser.lazy[token.start.line]) {
|
||
let index2 = childFlow.events.length;
|
||
while (index2--) {
|
||
if (
|
||
// The token starts before the line ending…
|
||
childFlow.events[index2][1].start.offset < lineStartOffset && // …and either is not ended yet…
|
||
(!childFlow.events[index2][1].end || // …or ends after it.
|
||
childFlow.events[index2][1].end.offset > lineStartOffset)
|
||
) {
|
||
return;
|
||
}
|
||
}
|
||
const indexBeforeExits = self2.events.length;
|
||
let indexBeforeFlow = indexBeforeExits;
|
||
let seen;
|
||
let point3;
|
||
while (indexBeforeFlow--) {
|
||
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||
if (seen) {
|
||
point3 = self2.events[indexBeforeFlow][1].end;
|
||
break;
|
||
}
|
||
seen = true;
|
||
}
|
||
}
|
||
exitContainers(continued);
|
||
index2 = indexBeforeExits;
|
||
while (index2 < self2.events.length) {
|
||
self2.events[index2][1].end = __spreadValues({}, point3);
|
||
index2++;
|
||
}
|
||
splice(self2.events, indexBeforeFlow + 1, 0, self2.events.slice(indexBeforeExits));
|
||
self2.events.length = index2;
|
||
}
|
||
}
|
||
function exitContainers(size) {
|
||
let index2 = stack.length;
|
||
while (index2-- > size) {
|
||
const entry = stack[index2];
|
||
self2.containerState = entry[1];
|
||
entry[0].exit.call(self2, effects);
|
||
}
|
||
stack.length = size;
|
||
}
|
||
function closeFlow() {
|
||
childFlow.write([null]);
|
||
childToken = void 0;
|
||
childFlow = void 0;
|
||
self2.containerState._closeFlow = void 0;
|
||
}
|
||
}
|
||
function tokenizeContainer(effects, ok3, nok) {
|
||
return factorySpace(effects, effects.attempt(this.parser.constructs.document, ok3, nok), "linePrefix", this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4);
|
||
}
|
||
|
||
// node_modules/micromark-util-classify-character/index.js
|
||
function classifyCharacter(code2) {
|
||
if (code2 === null || markdownLineEndingOrSpace(code2) || unicodeWhitespace(code2)) {
|
||
return 1;
|
||
}
|
||
if (unicodePunctuation(code2)) {
|
||
return 2;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-resolve-all/index.js
|
||
function resolveAll(constructs2, events, context) {
|
||
const called = [];
|
||
let index2 = -1;
|
||
while (++index2 < constructs2.length) {
|
||
const resolve = constructs2[index2].resolveAll;
|
||
if (resolve && !called.includes(resolve)) {
|
||
events = resolve(events, context);
|
||
called.push(resolve);
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/attention.js
|
||
var attention = {
|
||
name: "attention",
|
||
resolveAll: resolveAllAttention,
|
||
tokenize: tokenizeAttention
|
||
};
|
||
function resolveAllAttention(events, context) {
|
||
let index2 = -1;
|
||
let open;
|
||
let group;
|
||
let text4;
|
||
let openingSequence;
|
||
let closingSequence;
|
||
let use;
|
||
let nextEvents;
|
||
let offset;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][0] === "enter" && events[index2][1].type === "attentionSequence" && events[index2][1]._close) {
|
||
open = index2;
|
||
while (open--) {
|
||
if (events[open][0] === "exit" && events[open][1].type === "attentionSequence" && events[open][1]._open && // If the markers are the same:
|
||
context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index2][1]).charCodeAt(0)) {
|
||
if ((events[open][1]._close || events[index2][1]._open) && (events[index2][1].end.offset - events[index2][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index2][1].end.offset - events[index2][1].start.offset) % 3)) {
|
||
continue;
|
||
}
|
||
use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index2][1].end.offset - events[index2][1].start.offset > 1 ? 2 : 1;
|
||
const start = __spreadValues({}, events[open][1].end);
|
||
const end = __spreadValues({}, events[index2][1].start);
|
||
movePoint(start, -use);
|
||
movePoint(end, use);
|
||
openingSequence = {
|
||
type: use > 1 ? "strongSequence" : "emphasisSequence",
|
||
start,
|
||
end: __spreadValues({}, events[open][1].end)
|
||
};
|
||
closingSequence = {
|
||
type: use > 1 ? "strongSequence" : "emphasisSequence",
|
||
start: __spreadValues({}, events[index2][1].start),
|
||
end
|
||
};
|
||
text4 = {
|
||
type: use > 1 ? "strongText" : "emphasisText",
|
||
start: __spreadValues({}, events[open][1].end),
|
||
end: __spreadValues({}, events[index2][1].start)
|
||
};
|
||
group = {
|
||
type: use > 1 ? "strong" : "emphasis",
|
||
start: __spreadValues({}, openingSequence.start),
|
||
end: __spreadValues({}, closingSequence.end)
|
||
};
|
||
events[open][1].end = __spreadValues({}, openingSequence.start);
|
||
events[index2][1].start = __spreadValues({}, closingSequence.end);
|
||
nextEvents = [];
|
||
if (events[open][1].end.offset - events[open][1].start.offset) {
|
||
nextEvents = push(nextEvents, [["enter", events[open][1], context], ["exit", events[open][1], context]]);
|
||
}
|
||
nextEvents = push(nextEvents, [["enter", group, context], ["enter", openingSequence, context], ["exit", openingSequence, context], ["enter", text4, context]]);
|
||
nextEvents = push(nextEvents, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + 1, index2), context));
|
||
nextEvents = push(nextEvents, [["exit", text4, context], ["enter", closingSequence, context], ["exit", closingSequence, context], ["exit", group, context]]);
|
||
if (events[index2][1].end.offset - events[index2][1].start.offset) {
|
||
offset = 2;
|
||
nextEvents = push(nextEvents, [["enter", events[index2][1], context], ["exit", events[index2][1], context]]);
|
||
} else {
|
||
offset = 0;
|
||
}
|
||
splice(events, open - 1, index2 - open + 3, nextEvents);
|
||
index2 = open + nextEvents.length - offset - 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][1].type === "attentionSequence") {
|
||
events[index2][1].type = "data";
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeAttention(effects, ok3) {
|
||
const attentionMarkers2 = this.parser.constructs.attentionMarkers.null;
|
||
const previous2 = this.previous;
|
||
const before = classifyCharacter(previous2);
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
marker = code2;
|
||
effects.enter("attentionSequence");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
const token = effects.exit("attentionSequence");
|
||
const after = classifyCharacter(code2);
|
||
const open = !after || after === 2 && before || attentionMarkers2.includes(code2);
|
||
const close = !before || before === 2 && after || attentionMarkers2.includes(previous2);
|
||
token._open = Boolean(marker === 42 ? open : open && (before || !close));
|
||
token._close = Boolean(marker === 42 ? close : close && (after || !open));
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function movePoint(point3, offset) {
|
||
point3.column += offset;
|
||
point3.offset += offset;
|
||
point3._bufferIndex += offset;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/autolink.js
|
||
var autolink = {
|
||
name: "autolink",
|
||
tokenize: tokenizeAutolink
|
||
};
|
||
function tokenizeAutolink(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("autolink");
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.enter("autolinkProtocol");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return schemeOrEmailAtext;
|
||
}
|
||
if (code2 === 64) {
|
||
return nok(code2);
|
||
}
|
||
return emailAtext(code2);
|
||
}
|
||
function schemeOrEmailAtext(code2) {
|
||
if (code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) {
|
||
size = 1;
|
||
return schemeInsideOrEmailAtext(code2);
|
||
}
|
||
return emailAtext(code2);
|
||
}
|
||
function schemeInsideOrEmailAtext(code2) {
|
||
if (code2 === 58) {
|
||
effects.consume(code2);
|
||
size = 0;
|
||
return urlInside;
|
||
}
|
||
if ((code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) && size++ < 32) {
|
||
effects.consume(code2);
|
||
return schemeInsideOrEmailAtext;
|
||
}
|
||
size = 0;
|
||
return emailAtext(code2);
|
||
}
|
||
function urlInside(code2) {
|
||
if (code2 === 62) {
|
||
effects.exit("autolinkProtocol");
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.exit("autolink");
|
||
return ok3;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 60 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return urlInside;
|
||
}
|
||
function emailAtext(code2) {
|
||
if (code2 === 64) {
|
||
effects.consume(code2);
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (asciiAtext(code2)) {
|
||
effects.consume(code2);
|
||
return emailAtext;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function emailAtSignOrDot(code2) {
|
||
return asciiAlphanumeric(code2) ? emailLabel(code2) : nok(code2);
|
||
}
|
||
function emailLabel(code2) {
|
||
if (code2 === 46) {
|
||
effects.consume(code2);
|
||
size = 0;
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (code2 === 62) {
|
||
effects.exit("autolinkProtocol").type = "autolinkEmail";
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.exit("autolink");
|
||
return ok3;
|
||
}
|
||
return emailValue(code2);
|
||
}
|
||
function emailValue(code2) {
|
||
if ((code2 === 45 || asciiAlphanumeric(code2)) && size++ < 63) {
|
||
const next = code2 === 45 ? emailValue : emailLabel;
|
||
effects.consume(code2);
|
||
return next;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/blank-line.js
|
||
var blankLine = {
|
||
partial: true,
|
||
tokenize: tokenizeBlankLine
|
||
};
|
||
function tokenizeBlankLine(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, after, "linePrefix")(code2) : after(code2);
|
||
}
|
||
function after(code2) {
|
||
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/block-quote.js
|
||
var blockQuote = {
|
||
continuation: {
|
||
tokenize: tokenizeBlockQuoteContinuation
|
||
},
|
||
exit,
|
||
name: "blockQuote",
|
||
tokenize: tokenizeBlockQuoteStart
|
||
};
|
||
function tokenizeBlockQuoteStart(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 62) {
|
||
const state = self2.containerState;
|
||
if (!state.open) {
|
||
effects.enter("blockQuote", {
|
||
_container: true
|
||
});
|
||
state.open = true;
|
||
}
|
||
effects.enter("blockQuotePrefix");
|
||
effects.enter("blockQuoteMarker");
|
||
effects.consume(code2);
|
||
effects.exit("blockQuoteMarker");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("blockQuotePrefixWhitespace");
|
||
effects.consume(code2);
|
||
effects.exit("blockQuotePrefixWhitespace");
|
||
effects.exit("blockQuotePrefix");
|
||
return ok3;
|
||
}
|
||
effects.exit("blockQuotePrefix");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeBlockQuoteContinuation(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return contStart;
|
||
function contStart(code2) {
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(effects, contBefore, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2);
|
||
}
|
||
return contBefore(code2);
|
||
}
|
||
function contBefore(code2) {
|
||
return effects.attempt(blockQuote, ok3, nok)(code2);
|
||
}
|
||
}
|
||
function exit(effects) {
|
||
effects.exit("blockQuote");
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/character-escape.js
|
||
var characterEscape = {
|
||
name: "characterEscape",
|
||
tokenize: tokenizeCharacterEscape
|
||
};
|
||
function tokenizeCharacterEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("characterEscape");
|
||
effects.enter("escapeMarker");
|
||
effects.consume(code2);
|
||
effects.exit("escapeMarker");
|
||
return inside;
|
||
}
|
||
function inside(code2) {
|
||
if (asciiPunctuation(code2)) {
|
||
effects.enter("characterEscapeValue");
|
||
effects.consume(code2);
|
||
effects.exit("characterEscapeValue");
|
||
effects.exit("characterEscape");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/character-reference.js
|
||
var characterReference = {
|
||
name: "characterReference",
|
||
tokenize: tokenizeCharacterReference
|
||
};
|
||
function tokenizeCharacterReference(effects, ok3, nok) {
|
||
const self2 = this;
|
||
let size = 0;
|
||
let max;
|
||
let test;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("characterReference");
|
||
effects.enter("characterReferenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarker");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 35) {
|
||
effects.enter("characterReferenceMarkerNumeric");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarkerNumeric");
|
||
return numeric;
|
||
}
|
||
effects.enter("characterReferenceValue");
|
||
max = 31;
|
||
test = asciiAlphanumeric;
|
||
return value(code2);
|
||
}
|
||
function numeric(code2) {
|
||
if (code2 === 88 || code2 === 120) {
|
||
effects.enter("characterReferenceMarkerHexadecimal");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarkerHexadecimal");
|
||
effects.enter("characterReferenceValue");
|
||
max = 6;
|
||
test = asciiHexDigit;
|
||
return value;
|
||
}
|
||
effects.enter("characterReferenceValue");
|
||
max = 7;
|
||
test = asciiDigit;
|
||
return value(code2);
|
||
}
|
||
function value(code2) {
|
||
if (code2 === 59 && size) {
|
||
const token = effects.exit("characterReferenceValue");
|
||
if (test === asciiAlphanumeric && !decodeNamedCharacterReference(self2.sliceSerialize(token))) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter("characterReferenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarker");
|
||
effects.exit("characterReference");
|
||
return ok3;
|
||
}
|
||
if (test(code2) && size++ < max) {
|
||
effects.consume(code2);
|
||
return value;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-fenced.js
|
||
var nonLazyContinuation = {
|
||
partial: true,
|
||
tokenize: tokenizeNonLazyContinuation
|
||
};
|
||
var codeFenced = {
|
||
concrete: true,
|
||
name: "codeFenced",
|
||
tokenize: tokenizeCodeFenced
|
||
};
|
||
function tokenizeCodeFenced(effects, ok3, nok) {
|
||
const self2 = this;
|
||
const closeStart = {
|
||
partial: true,
|
||
tokenize: tokenizeCloseStart
|
||
};
|
||
let initialPrefix = 0;
|
||
let sizeOpen = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
return beforeSequenceOpen(code2);
|
||
}
|
||
function beforeSequenceOpen(code2) {
|
||
const tail = self2.events[self2.events.length - 1];
|
||
initialPrefix = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
marker = code2;
|
||
effects.enter("codeFenced");
|
||
effects.enter("codeFencedFence");
|
||
effects.enter("codeFencedFenceSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === marker) {
|
||
sizeOpen++;
|
||
effects.consume(code2);
|
||
return sequenceOpen;
|
||
}
|
||
if (sizeOpen < 3) {
|
||
return nok(code2);
|
||
}
|
||
effects.exit("codeFencedFenceSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, infoBefore, "whitespace")(code2) : infoBefore(code2);
|
||
}
|
||
function infoBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFencedFence");
|
||
return self2.interrupt ? ok3(code2) : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFencedFenceInfo");
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return info(code2);
|
||
}
|
||
function info(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceInfo");
|
||
return infoBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceInfo");
|
||
return factorySpace(effects, metaBefore, "whitespace")(code2);
|
||
}
|
||
if (code2 === 96 && code2 === marker) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return info;
|
||
}
|
||
function metaBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return infoBefore(code2);
|
||
}
|
||
effects.enter("codeFencedFenceMeta");
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return meta(code2);
|
||
}
|
||
function meta(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceMeta");
|
||
return infoBefore(code2);
|
||
}
|
||
if (code2 === 96 && code2 === marker) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return meta;
|
||
}
|
||
function atNonLazyBreak(code2) {
|
||
return effects.attempt(closeStart, after, contentBefore)(code2);
|
||
}
|
||
function contentBefore(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return contentStart;
|
||
}
|
||
function contentStart(code2) {
|
||
return initialPrefix > 0 && markdownSpace(code2) ? factorySpace(effects, beforeContentChunk, "linePrefix", initialPrefix + 1)(code2) : beforeContentChunk(code2);
|
||
}
|
||
function beforeContentChunk(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFlowValue");
|
||
return contentChunk(code2);
|
||
}
|
||
function contentChunk(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFlowValue");
|
||
return beforeContentChunk(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return contentChunk;
|
||
}
|
||
function after(code2) {
|
||
effects.exit("codeFenced");
|
||
return ok3(code2);
|
||
}
|
||
function tokenizeCloseStart(effects2, ok4, nok2) {
|
||
let size = 0;
|
||
return startBefore;
|
||
function startBefore(code2) {
|
||
effects2.enter("lineEnding");
|
||
effects2.consume(code2);
|
||
effects2.exit("lineEnding");
|
||
return start2;
|
||
}
|
||
function start2(code2) {
|
||
effects2.enter("codeFencedFence");
|
||
return markdownSpace(code2) ? factorySpace(effects2, beforeSequenceClose, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2) : beforeSequenceClose(code2);
|
||
}
|
||
function beforeSequenceClose(code2) {
|
||
if (code2 === marker) {
|
||
effects2.enter("codeFencedFenceSequence");
|
||
return sequenceClose(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
function sequenceClose(code2) {
|
||
if (code2 === marker) {
|
||
size++;
|
||
effects2.consume(code2);
|
||
return sequenceClose;
|
||
}
|
||
if (size >= sizeOpen) {
|
||
effects2.exit("codeFencedFenceSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects2, sequenceCloseAfter, "whitespace")(code2) : sequenceCloseAfter(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
function sequenceCloseAfter(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects2.exit("codeFencedFence");
|
||
return ok4(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuation(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return lineStart;
|
||
}
|
||
function lineStart(code2) {
|
||
return self2.parser.lazy[self2.now().line] ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-indented.js
|
||
var codeIndented = {
|
||
name: "codeIndented",
|
||
tokenize: tokenizeCodeIndented
|
||
};
|
||
var furtherStart = {
|
||
partial: true,
|
||
tokenize: tokenizeFurtherStart
|
||
};
|
||
function tokenizeCodeIndented(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("codeIndented");
|
||
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
|
||
}
|
||
function afterPrefix(code2) {
|
||
const tail = self2.events[self2.events.length - 1];
|
||
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? atBreak(code2) : nok(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === null) {
|
||
return after(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
return effects.attempt(furtherStart, atBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFlowValue");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFlowValue");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
function after(code2) {
|
||
effects.exit("codeIndented");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeFurtherStart(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return furtherStart2;
|
||
function furtherStart2(code2) {
|
||
if (self2.parser.lazy[self2.now().line]) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return furtherStart2;
|
||
}
|
||
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
|
||
}
|
||
function afterPrefix(code2) {
|
||
const tail = self2.events[self2.events.length - 1];
|
||
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? ok3(code2) : markdownLineEnding(code2) ? furtherStart2(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-text.js
|
||
var codeText = {
|
||
name: "codeText",
|
||
previous,
|
||
resolve: resolveCodeText,
|
||
tokenize: tokenizeCodeText
|
||
};
|
||
function resolveCodeText(events) {
|
||
let tailExitIndex = events.length - 4;
|
||
let headEnterIndex = 3;
|
||
let index2;
|
||
let enter;
|
||
if ((events[headEnterIndex][1].type === "lineEnding" || events[headEnterIndex][1].type === "space") && (events[tailExitIndex][1].type === "lineEnding" || events[tailExitIndex][1].type === "space")) {
|
||
index2 = headEnterIndex;
|
||
while (++index2 < tailExitIndex) {
|
||
if (events[index2][1].type === "codeTextData") {
|
||
events[headEnterIndex][1].type = "codeTextPadding";
|
||
events[tailExitIndex][1].type = "codeTextPadding";
|
||
headEnterIndex += 2;
|
||
tailExitIndex -= 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
index2 = headEnterIndex - 1;
|
||
tailExitIndex++;
|
||
while (++index2 <= tailExitIndex) {
|
||
if (enter === void 0) {
|
||
if (index2 !== tailExitIndex && events[index2][1].type !== "lineEnding") {
|
||
enter = index2;
|
||
}
|
||
} else if (index2 === tailExitIndex || events[index2][1].type === "lineEnding") {
|
||
events[enter][1].type = "codeTextData";
|
||
if (index2 !== enter + 2) {
|
||
events[enter][1].end = events[index2 - 1][1].end;
|
||
events.splice(enter + 2, index2 - enter - 2);
|
||
tailExitIndex -= index2 - enter - 2;
|
||
index2 = enter + 2;
|
||
}
|
||
enter = void 0;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function previous(code2) {
|
||
return code2 !== 96 || this.events[this.events.length - 1][1].type === "characterEscape";
|
||
}
|
||
function tokenizeCodeText(effects, ok3, nok) {
|
||
const self2 = this;
|
||
let sizeOpen = 0;
|
||
let size;
|
||
let token;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("codeText");
|
||
effects.enter("codeTextSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === 96) {
|
||
effects.consume(code2);
|
||
sizeOpen++;
|
||
return sequenceOpen;
|
||
}
|
||
effects.exit("codeTextSequence");
|
||
return between2(code2);
|
||
}
|
||
function between2(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 32) {
|
||
effects.enter("space");
|
||
effects.consume(code2);
|
||
effects.exit("space");
|
||
return between2;
|
||
}
|
||
if (code2 === 96) {
|
||
token = effects.enter("codeTextSequence");
|
||
size = 0;
|
||
return sequenceClose(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return between2;
|
||
}
|
||
effects.enter("codeTextData");
|
||
return data(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null || code2 === 32 || code2 === 96 || markdownLineEnding(code2)) {
|
||
effects.exit("codeTextData");
|
||
return between2(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function sequenceClose(code2) {
|
||
if (code2 === 96) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return sequenceClose;
|
||
}
|
||
if (size === sizeOpen) {
|
||
effects.exit("codeTextSequence");
|
||
effects.exit("codeText");
|
||
return ok3(code2);
|
||
}
|
||
token.type = "codeTextData";
|
||
return data(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-subtokenize/lib/splice-buffer.js
|
||
var SpliceBuffer = class {
|
||
/**
|
||
* @param {ReadonlyArray<T> | null | undefined} [initial]
|
||
* Initial items (optional).
|
||
* @returns
|
||
* Splice buffer.
|
||
*/
|
||
constructor(initial) {
|
||
this.left = initial ? [...initial] : [];
|
||
this.right = [];
|
||
}
|
||
/**
|
||
* Array access;
|
||
* does not move the cursor.
|
||
*
|
||
* @param {number} index
|
||
* Index.
|
||
* @return {T}
|
||
* Item.
|
||
*/
|
||
get(index2) {
|
||
if (index2 < 0 || index2 >= this.left.length + this.right.length) {
|
||
throw new RangeError("Cannot access index `" + index2 + "` in a splice buffer of size `" + (this.left.length + this.right.length) + "`");
|
||
}
|
||
if (index2 < this.left.length) return this.left[index2];
|
||
return this.right[this.right.length - index2 + this.left.length - 1];
|
||
}
|
||
/**
|
||
* The length of the splice buffer, one greater than the largest index in the
|
||
* array.
|
||
*/
|
||
get length() {
|
||
return this.left.length + this.right.length;
|
||
}
|
||
/**
|
||
* Remove and return `list[0]`;
|
||
* moves the cursor to `0`.
|
||
*
|
||
* @returns {T | undefined}
|
||
* Item, optional.
|
||
*/
|
||
shift() {
|
||
this.setCursor(0);
|
||
return this.right.pop();
|
||
}
|
||
/**
|
||
* Slice the buffer to get an array;
|
||
* does not move the cursor.
|
||
*
|
||
* @param {number} start
|
||
* Start.
|
||
* @param {number | null | undefined} [end]
|
||
* End (optional).
|
||
* @returns {Array<T>}
|
||
* Array of items.
|
||
*/
|
||
slice(start, end) {
|
||
const stop = end === null || end === void 0 ? Number.POSITIVE_INFINITY : end;
|
||
if (stop < this.left.length) {
|
||
return this.left.slice(start, stop);
|
||
}
|
||
if (start > this.left.length) {
|
||
return this.right.slice(this.right.length - stop + this.left.length, this.right.length - start + this.left.length).reverse();
|
||
}
|
||
return this.left.slice(start).concat(this.right.slice(this.right.length - stop + this.left.length).reverse());
|
||
}
|
||
/**
|
||
* Mimics the behavior of Array.prototype.splice() except for the change of
|
||
* interface necessary to avoid segfaults when patching in very large arrays.
|
||
*
|
||
* This operation moves cursor is moved to `start` and results in the cursor
|
||
* placed after any inserted items.
|
||
*
|
||
* @param {number} start
|
||
* Start;
|
||
* zero-based index at which to start changing the array;
|
||
* negative numbers count backwards from the end of the array and values
|
||
* that are out-of bounds are clamped to the appropriate end of the array.
|
||
* @param {number | null | undefined} [deleteCount=0]
|
||
* Delete count (default: `0`);
|
||
* maximum number of elements to delete, starting from start.
|
||
* @param {Array<T> | null | undefined} [items=[]]
|
||
* Items to include in place of the deleted items (default: `[]`).
|
||
* @return {Array<T>}
|
||
* Any removed items.
|
||
*/
|
||
splice(start, deleteCount, items) {
|
||
const count = deleteCount || 0;
|
||
this.setCursor(Math.trunc(start));
|
||
const removed = this.right.splice(this.right.length - count, Number.POSITIVE_INFINITY);
|
||
if (items) chunkedPush(this.left, items);
|
||
return removed.reverse();
|
||
}
|
||
/**
|
||
* Remove and return the highest-numbered item in the array, so
|
||
* `list[list.length - 1]`;
|
||
* Moves the cursor to `length`.
|
||
*
|
||
* @returns {T | undefined}
|
||
* Item, optional.
|
||
*/
|
||
pop() {
|
||
this.setCursor(Number.POSITIVE_INFINITY);
|
||
return this.left.pop();
|
||
}
|
||
/**
|
||
* Inserts a single item to the high-numbered side of the array;
|
||
* moves the cursor to `length`.
|
||
*
|
||
* @param {T} item
|
||
* Item.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
push(item) {
|
||
this.setCursor(Number.POSITIVE_INFINITY);
|
||
this.left.push(item);
|
||
}
|
||
/**
|
||
* Inserts many items to the high-numbered side of the array.
|
||
* Moves the cursor to `length`.
|
||
*
|
||
* @param {Array<T>} items
|
||
* Items.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
pushMany(items) {
|
||
this.setCursor(Number.POSITIVE_INFINITY);
|
||
chunkedPush(this.left, items);
|
||
}
|
||
/**
|
||
* Inserts a single item to the low-numbered side of the array;
|
||
* Moves the cursor to `0`.
|
||
*
|
||
* @param {T} item
|
||
* Item.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
unshift(item) {
|
||
this.setCursor(0);
|
||
this.right.push(item);
|
||
}
|
||
/**
|
||
* Inserts many items to the low-numbered side of the array;
|
||
* moves the cursor to `0`.
|
||
*
|
||
* @param {Array<T>} items
|
||
* Items.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
unshiftMany(items) {
|
||
this.setCursor(0);
|
||
chunkedPush(this.right, items.reverse());
|
||
}
|
||
/**
|
||
* Move the cursor to a specific position in the array. Requires
|
||
* time proportional to the distance moved.
|
||
*
|
||
* If `n < 0`, the cursor will end up at the beginning.
|
||
* If `n > length`, the cursor will end up at the end.
|
||
*
|
||
* @param {number} n
|
||
* Position.
|
||
* @return {undefined}
|
||
* Nothing.
|
||
*/
|
||
setCursor(n) {
|
||
if (n === this.left.length || n > this.left.length && this.right.length === 0 || n < 0 && this.left.length === 0) return;
|
||
if (n < this.left.length) {
|
||
const removed = this.left.splice(n, Number.POSITIVE_INFINITY);
|
||
chunkedPush(this.right, removed.reverse());
|
||
} else {
|
||
const removed = this.right.splice(this.left.length + this.right.length - n, Number.POSITIVE_INFINITY);
|
||
chunkedPush(this.left, removed.reverse());
|
||
}
|
||
}
|
||
};
|
||
function chunkedPush(list4, right) {
|
||
let chunkStart = 0;
|
||
if (right.length < 1e4) {
|
||
list4.push(...right);
|
||
} else {
|
||
while (chunkStart < right.length) {
|
||
list4.push(...right.slice(chunkStart, chunkStart + 1e4));
|
||
chunkStart += 1e4;
|
||
}
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-subtokenize/index.js
|
||
function subtokenize(eventsArray) {
|
||
const jumps = {};
|
||
let index2 = -1;
|
||
let event;
|
||
let lineIndex;
|
||
let otherIndex;
|
||
let otherEvent;
|
||
let parameters;
|
||
let subevents;
|
||
let more;
|
||
const events = new SpliceBuffer(eventsArray);
|
||
while (++index2 < events.length) {
|
||
while (index2 in jumps) {
|
||
index2 = jumps[index2];
|
||
}
|
||
event = events.get(index2);
|
||
if (index2 && event[1].type === "chunkFlow" && events.get(index2 - 1)[1].type === "listItemPrefix") {
|
||
subevents = event[1]._tokenizer.events;
|
||
otherIndex = 0;
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "lineEndingBlank") {
|
||
otherIndex += 2;
|
||
}
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "content") {
|
||
while (++otherIndex < subevents.length) {
|
||
if (subevents[otherIndex][1].type === "content") {
|
||
break;
|
||
}
|
||
if (subevents[otherIndex][1].type === "chunkText") {
|
||
subevents[otherIndex][1]._isInFirstContentOfListItem = true;
|
||
otherIndex++;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (event[0] === "enter") {
|
||
if (event[1].contentType) {
|
||
Object.assign(jumps, subcontent(events, index2));
|
||
index2 = jumps[index2];
|
||
more = true;
|
||
}
|
||
} else if (event[1]._container) {
|
||
otherIndex = index2;
|
||
lineIndex = void 0;
|
||
while (otherIndex--) {
|
||
otherEvent = events.get(otherIndex);
|
||
if (otherEvent[1].type === "lineEnding" || otherEvent[1].type === "lineEndingBlank") {
|
||
if (otherEvent[0] === "enter") {
|
||
if (lineIndex) {
|
||
events.get(lineIndex)[1].type = "lineEndingBlank";
|
||
}
|
||
otherEvent[1].type = "lineEnding";
|
||
lineIndex = otherIndex;
|
||
}
|
||
} else if (otherEvent[1].type === "linePrefix") {
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (lineIndex) {
|
||
event[1].end = __spreadValues({}, events.get(lineIndex)[1].start);
|
||
parameters = events.slice(lineIndex, index2);
|
||
parameters.unshift(event);
|
||
events.splice(lineIndex, index2 - lineIndex + 1, parameters);
|
||
}
|
||
}
|
||
}
|
||
splice(eventsArray, 0, Number.POSITIVE_INFINITY, events.slice(0));
|
||
return !more;
|
||
}
|
||
function subcontent(events, eventIndex) {
|
||
const token = events.get(eventIndex)[1];
|
||
const context = events.get(eventIndex)[2];
|
||
let startPosition = eventIndex - 1;
|
||
const startPositions = [];
|
||
const tokenizer = token._tokenizer || context.parser[token.contentType](token.start);
|
||
const childEvents = tokenizer.events;
|
||
const jumps = [];
|
||
const gaps = {};
|
||
let stream;
|
||
let previous2;
|
||
let index2 = -1;
|
||
let current = token;
|
||
let adjust = 0;
|
||
let start = 0;
|
||
const breaks = [start];
|
||
while (current) {
|
||
while (events.get(++startPosition)[1] !== current) {
|
||
}
|
||
startPositions.push(startPosition);
|
||
if (!current._tokenizer) {
|
||
stream = context.sliceStream(current);
|
||
if (!current.next) {
|
||
stream.push(null);
|
||
}
|
||
if (previous2) {
|
||
tokenizer.defineSkip(current.start);
|
||
}
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = true;
|
||
}
|
||
tokenizer.write(stream);
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = void 0;
|
||
}
|
||
}
|
||
previous2 = current;
|
||
current = current.next;
|
||
}
|
||
current = token;
|
||
while (++index2 < childEvents.length) {
|
||
if (
|
||
// Find a void token that includes a break.
|
||
childEvents[index2][0] === "exit" && childEvents[index2 - 1][0] === "enter" && childEvents[index2][1].type === childEvents[index2 - 1][1].type && childEvents[index2][1].start.line !== childEvents[index2][1].end.line
|
||
) {
|
||
start = index2 + 1;
|
||
breaks.push(start);
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
current = current.next;
|
||
}
|
||
}
|
||
tokenizer.events = [];
|
||
if (current) {
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
} else {
|
||
breaks.pop();
|
||
}
|
||
index2 = breaks.length;
|
||
while (index2--) {
|
||
const slice = childEvents.slice(breaks[index2], breaks[index2 + 1]);
|
||
const start2 = startPositions.pop();
|
||
jumps.push([start2, start2 + slice.length - 1]);
|
||
events.splice(start2, 2, slice);
|
||
}
|
||
jumps.reverse();
|
||
index2 = -1;
|
||
while (++index2 < jumps.length) {
|
||
gaps[adjust + jumps[index2][0]] = adjust + jumps[index2][1];
|
||
adjust += jumps[index2][1] - jumps[index2][0] - 1;
|
||
}
|
||
return gaps;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/content.js
|
||
var content2 = {
|
||
resolve: resolveContent,
|
||
tokenize: tokenizeContent
|
||
};
|
||
var continuationConstruct = {
|
||
partial: true,
|
||
tokenize: tokenizeContinuation
|
||
};
|
||
function resolveContent(events) {
|
||
subtokenize(events);
|
||
return events;
|
||
}
|
||
function tokenizeContent(effects, ok3) {
|
||
let previous2;
|
||
return chunkStart;
|
||
function chunkStart(code2) {
|
||
effects.enter("content");
|
||
previous2 = effects.enter("chunkContent", {
|
||
contentType: "content"
|
||
});
|
||
return chunkInside(code2);
|
||
}
|
||
function chunkInside(code2) {
|
||
if (code2 === null) {
|
||
return contentEnd(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
return effects.check(continuationConstruct, contentContinue, contentEnd)(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return chunkInside;
|
||
}
|
||
function contentEnd(code2) {
|
||
effects.exit("chunkContent");
|
||
effects.exit("content");
|
||
return ok3(code2);
|
||
}
|
||
function contentContinue(code2) {
|
||
effects.consume(code2);
|
||
effects.exit("chunkContent");
|
||
previous2.next = effects.enter("chunkContent", {
|
||
contentType: "content",
|
||
previous: previous2
|
||
});
|
||
previous2 = previous2.next;
|
||
return chunkInside;
|
||
}
|
||
}
|
||
function tokenizeContinuation(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return startLookahead;
|
||
function startLookahead(code2) {
|
||
effects.exit("chunkContent");
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, prefixed, "linePrefix");
|
||
}
|
||
function prefixed(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
const tail = self2.events[self2.events.length - 1];
|
||
if (!self2.parser.constructs.disable.null.includes("codeIndented") && tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4) {
|
||
return ok3(code2);
|
||
}
|
||
return effects.interrupt(self2.parser.constructs.flow, nok, ok3)(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-destination/index.js
|
||
function factoryDestination(effects, ok3, nok, type, literalType, literalMarkerType, rawType, stringType, max) {
|
||
const limit = max || Number.POSITIVE_INFINITY;
|
||
let balance = 0;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 60) {
|
||
effects.enter(type);
|
||
effects.enter(literalType);
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code2);
|
||
effects.exit(literalMarkerType);
|
||
return enclosedBefore;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 41 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter(type);
|
||
effects.enter(rawType);
|
||
effects.enter(stringType);
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return raw(code2);
|
||
}
|
||
function enclosedBefore(code2) {
|
||
if (code2 === 62) {
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code2);
|
||
effects.exit(literalMarkerType);
|
||
effects.exit(literalType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return enclosed(code2);
|
||
}
|
||
function enclosed(code2) {
|
||
if (code2 === 62) {
|
||
effects.exit("chunkString");
|
||
effects.exit(stringType);
|
||
return enclosedBefore(code2);
|
||
}
|
||
if (code2 === null || code2 === 60 || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? enclosedEscape : enclosed;
|
||
}
|
||
function enclosedEscape(code2) {
|
||
if (code2 === 60 || code2 === 62 || code2 === 92) {
|
||
effects.consume(code2);
|
||
return enclosed;
|
||
}
|
||
return enclosed(code2);
|
||
}
|
||
function raw(code2) {
|
||
if (!balance && (code2 === null || code2 === 41 || markdownLineEndingOrSpace(code2))) {
|
||
effects.exit("chunkString");
|
||
effects.exit(stringType);
|
||
effects.exit(rawType);
|
||
effects.exit(type);
|
||
return ok3(code2);
|
||
}
|
||
if (balance < limit && code2 === 40) {
|
||
effects.consume(code2);
|
||
balance++;
|
||
return raw;
|
||
}
|
||
if (code2 === 41) {
|
||
effects.consume(code2);
|
||
balance--;
|
||
return raw;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 40 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? rawEscape : raw;
|
||
}
|
||
function rawEscape(code2) {
|
||
if (code2 === 40 || code2 === 41 || code2 === 92) {
|
||
effects.consume(code2);
|
||
return raw;
|
||
}
|
||
return raw(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-label/index.js
|
||
function factoryLabel(effects, ok3, nok, type, markerType, stringType) {
|
||
const self2 = this;
|
||
let size = 0;
|
||
let seen;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.enter(stringType);
|
||
return atBreak;
|
||
}
|
||
function atBreak(code2) {
|
||
if (size > 999 || code2 === null || code2 === 91 || code2 === 93 && !seen || // To do: remove in the future once we’ve switched from
|
||
// `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,
|
||
// which doesn’t need this.
|
||
// Hidden footnotes hook.
|
||
/* c8 ignore next 3 */
|
||
code2 === 94 && !size && "_hiddenFootnoteSupport" in self2.parser.constructs) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.exit(stringType);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return atBreak;
|
||
}
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return labelInside(code2);
|
||
}
|
||
function labelInside(code2) {
|
||
if (code2 === null || code2 === 91 || code2 === 93 || markdownLineEnding(code2) || size++ > 999) {
|
||
effects.exit("chunkString");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
if (!seen) seen = !markdownSpace(code2);
|
||
return code2 === 92 ? labelEscape : labelInside;
|
||
}
|
||
function labelEscape(code2) {
|
||
if (code2 === 91 || code2 === 92 || code2 === 93) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return labelInside;
|
||
}
|
||
return labelInside(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-title/index.js
|
||
function factoryTitle(effects, ok3, nok, type, markerType, stringType) {
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 34 || code2 === 39 || code2 === 40) {
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
marker = code2 === 40 ? 41 : code2;
|
||
return begin;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function begin(code2) {
|
||
if (code2 === marker) {
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
return atBreak(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === marker) {
|
||
effects.exit(stringType);
|
||
return begin(marker);
|
||
}
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, atBreak, "linePrefix");
|
||
}
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker || code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? escape : inside;
|
||
}
|
||
function escape(code2) {
|
||
if (code2 === marker || code2 === 92) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
return inside(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-whitespace/index.js
|
||
function factoryWhitespace(effects, ok3) {
|
||
let seen;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
seen = true;
|
||
return start;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(effects, start, seen ? "linePrefix" : "lineSuffix")(code2);
|
||
}
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/definition.js
|
||
var definition = {
|
||
name: "definition",
|
||
tokenize: tokenizeDefinition
|
||
};
|
||
var titleBefore = {
|
||
partial: true,
|
||
tokenize: tokenizeTitleBefore
|
||
};
|
||
function tokenizeDefinition(effects, ok3, nok) {
|
||
const self2 = this;
|
||
let identifier;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("definition");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
return factoryLabel.call(
|
||
self2,
|
||
effects,
|
||
labelAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
"definitionLabel",
|
||
"definitionLabelMarker",
|
||
"definitionLabelString"
|
||
)(code2);
|
||
}
|
||
function labelAfter(code2) {
|
||
identifier = normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1));
|
||
if (code2 === 58) {
|
||
effects.enter("definitionMarker");
|
||
effects.consume(code2);
|
||
effects.exit("definitionMarker");
|
||
return markerAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function markerAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, destinationBefore)(code2) : destinationBefore(code2);
|
||
}
|
||
function destinationBefore(code2) {
|
||
return factoryDestination(
|
||
effects,
|
||
destinationAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
"definitionDestination",
|
||
"definitionDestinationLiteral",
|
||
"definitionDestinationLiteralMarker",
|
||
"definitionDestinationRaw",
|
||
"definitionDestinationString"
|
||
)(code2);
|
||
}
|
||
function destinationAfter(code2) {
|
||
return effects.attempt(titleBefore, after, after)(code2);
|
||
}
|
||
function after(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, afterWhitespace, "whitespace")(code2) : afterWhitespace(code2);
|
||
}
|
||
function afterWhitespace(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("definition");
|
||
self2.parser.defined.push(identifier);
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeTitleBefore(effects, ok3, nok) {
|
||
return titleBefore2;
|
||
function titleBefore2(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, beforeMarker)(code2) : nok(code2);
|
||
}
|
||
function beforeMarker(code2) {
|
||
return factoryTitle(effects, titleAfter, nok, "definitionTitle", "definitionTitleMarker", "definitionTitleString")(code2);
|
||
}
|
||
function titleAfter(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, titleAfterOptionalWhitespace, "whitespace")(code2) : titleAfterOptionalWhitespace(code2);
|
||
}
|
||
function titleAfterOptionalWhitespace(code2) {
|
||
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/hard-break-escape.js
|
||
var hardBreakEscape = {
|
||
name: "hardBreakEscape",
|
||
tokenize: tokenizeHardBreakEscape
|
||
};
|
||
function tokenizeHardBreakEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("hardBreakEscape");
|
||
effects.consume(code2);
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.exit("hardBreakEscape");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/heading-atx.js
|
||
var headingAtx = {
|
||
name: "headingAtx",
|
||
resolve: resolveHeadingAtx,
|
||
tokenize: tokenizeHeadingAtx
|
||
};
|
||
function resolveHeadingAtx(events, context) {
|
||
let contentEnd = events.length - 2;
|
||
let contentStart = 3;
|
||
let content3;
|
||
let text4;
|
||
if (events[contentStart][1].type === "whitespace") {
|
||
contentStart += 2;
|
||
}
|
||
if (contentEnd - 2 > contentStart && events[contentEnd][1].type === "whitespace") {
|
||
contentEnd -= 2;
|
||
}
|
||
if (events[contentEnd][1].type === "atxHeadingSequence" && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === "whitespace")) {
|
||
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4;
|
||
}
|
||
if (contentEnd > contentStart) {
|
||
content3 = {
|
||
type: "atxHeadingText",
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end
|
||
};
|
||
text4 = {
|
||
type: "chunkText",
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end,
|
||
contentType: "text"
|
||
};
|
||
splice(events, contentStart, contentEnd - contentStart + 1, [["enter", content3, context], ["enter", text4, context], ["exit", text4, context], ["exit", content3, context]]);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHeadingAtx(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("atxHeading");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("atxHeadingSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === 35 && size++ < 6) {
|
||
effects.consume(code2);
|
||
return sequenceOpen;
|
||
}
|
||
if (code2 === null || markdownLineEndingOrSpace(code2)) {
|
||
effects.exit("atxHeadingSequence");
|
||
return atBreak(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === 35) {
|
||
effects.enter("atxHeadingSequence");
|
||
return sequenceFurther(code2);
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("atxHeading");
|
||
return ok3(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(effects, atBreak, "whitespace")(code2);
|
||
}
|
||
effects.enter("atxHeadingText");
|
||
return data(code2);
|
||
}
|
||
function sequenceFurther(code2) {
|
||
if (code2 === 35) {
|
||
effects.consume(code2);
|
||
return sequenceFurther;
|
||
}
|
||
effects.exit("atxHeadingSequence");
|
||
return atBreak(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null || code2 === 35 || markdownLineEndingOrSpace(code2)) {
|
||
effects.exit("atxHeadingText");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-html-tag-name/index.js
|
||
var htmlBlockNames = [
|
||
"address",
|
||
"article",
|
||
"aside",
|
||
"base",
|
||
"basefont",
|
||
"blockquote",
|
||
"body",
|
||
"caption",
|
||
"center",
|
||
"col",
|
||
"colgroup",
|
||
"dd",
|
||
"details",
|
||
"dialog",
|
||
"dir",
|
||
"div",
|
||
"dl",
|
||
"dt",
|
||
"fieldset",
|
||
"figcaption",
|
||
"figure",
|
||
"footer",
|
||
"form",
|
||
"frame",
|
||
"frameset",
|
||
"h1",
|
||
"h2",
|
||
"h3",
|
||
"h4",
|
||
"h5",
|
||
"h6",
|
||
"head",
|
||
"header",
|
||
"hr",
|
||
"html",
|
||
"iframe",
|
||
"legend",
|
||
"li",
|
||
"link",
|
||
"main",
|
||
"menu",
|
||
"menuitem",
|
||
"nav",
|
||
"noframes",
|
||
"ol",
|
||
"optgroup",
|
||
"option",
|
||
"p",
|
||
"param",
|
||
"search",
|
||
"section",
|
||
"summary",
|
||
"table",
|
||
"tbody",
|
||
"td",
|
||
"tfoot",
|
||
"th",
|
||
"thead",
|
||
"title",
|
||
"tr",
|
||
"track",
|
||
"ul"
|
||
];
|
||
var htmlRawNames = ["pre", "script", "style", "textarea"];
|
||
|
||
// node_modules/micromark-core-commonmark/lib/html-flow.js
|
||
var htmlFlow = {
|
||
concrete: true,
|
||
name: "htmlFlow",
|
||
resolveTo: resolveToHtmlFlow,
|
||
tokenize: tokenizeHtmlFlow
|
||
};
|
||
var blankLineBefore = {
|
||
partial: true,
|
||
tokenize: tokenizeBlankLineBefore
|
||
};
|
||
var nonLazyContinuationStart = {
|
||
partial: true,
|
||
tokenize: tokenizeNonLazyContinuationStart
|
||
};
|
||
function resolveToHtmlFlow(events) {
|
||
let index2 = events.length;
|
||
while (index2--) {
|
||
if (events[index2][0] === "enter" && events[index2][1].type === "htmlFlow") {
|
||
break;
|
||
}
|
||
}
|
||
if (index2 > 1 && events[index2 - 2][1].type === "linePrefix") {
|
||
events[index2][1].start = events[index2 - 2][1].start;
|
||
events[index2 + 1][1].start = events[index2 - 2][1].start;
|
||
events.splice(index2 - 2, 2);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHtmlFlow(effects, ok3, nok) {
|
||
const self2 = this;
|
||
let marker;
|
||
let closingTag;
|
||
let buffer;
|
||
let index2;
|
||
let markerB;
|
||
return start;
|
||
function start(code2) {
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("htmlFlow");
|
||
effects.enter("htmlFlowData");
|
||
effects.consume(code2);
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 33) {
|
||
effects.consume(code2);
|
||
return declarationOpen;
|
||
}
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
closingTag = true;
|
||
return tagCloseStart;
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
marker = 3;
|
||
return self2.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
buffer = String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function declarationOpen(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
marker = 2;
|
||
return commentOpenInside;
|
||
}
|
||
if (code2 === 91) {
|
||
effects.consume(code2);
|
||
marker = 5;
|
||
index2 = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
marker = 4;
|
||
return self2.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function commentOpenInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return self2.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function cdataOpenInside(code2) {
|
||
const value = "CDATA[";
|
||
if (code2 === value.charCodeAt(index2++)) {
|
||
effects.consume(code2);
|
||
if (index2 === value.length) {
|
||
return self2.interrupt ? ok3 : continuation;
|
||
}
|
||
return cdataOpenInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagCloseStart(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
buffer = String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagName(code2) {
|
||
if (code2 === null || code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
const slash = code2 === 47;
|
||
const name = buffer.toLowerCase();
|
||
if (!slash && !closingTag && htmlRawNames.includes(name)) {
|
||
marker = 1;
|
||
return self2.interrupt ? ok3(code2) : continuation(code2);
|
||
}
|
||
if (htmlBlockNames.includes(buffer.toLowerCase())) {
|
||
marker = 6;
|
||
if (slash) {
|
||
effects.consume(code2);
|
||
return basicSelfClosing;
|
||
}
|
||
return self2.interrupt ? ok3(code2) : continuation(code2);
|
||
}
|
||
marker = 7;
|
||
return self2.interrupt && !self2.parser.lazy[self2.now().line] ? nok(code2) : closingTag ? completeClosingTagAfter(code2) : completeAttributeNameBefore(code2);
|
||
}
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
buffer += String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function basicSelfClosing(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return self2.interrupt ? ok3 : continuation;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeClosingTagAfter(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeClosingTagAfter;
|
||
}
|
||
return completeEnd(code2);
|
||
}
|
||
function completeAttributeNameBefore(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return completeEnd;
|
||
}
|
||
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeName;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeNameBefore;
|
||
}
|
||
return completeEnd(code2);
|
||
}
|
||
function completeAttributeName(code2) {
|
||
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeName;
|
||
}
|
||
return completeAttributeNameAfter(code2);
|
||
}
|
||
function completeAttributeNameAfter(code2) {
|
||
if (code2 === 61) {
|
||
effects.consume(code2);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeNameAfter;
|
||
}
|
||
return completeAttributeNameBefore(code2);
|
||
}
|
||
function completeAttributeValueBefore(code2) {
|
||
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 34 || code2 === 39) {
|
||
effects.consume(code2);
|
||
markerB = code2;
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
return completeAttributeValueUnquoted(code2);
|
||
}
|
||
function completeAttributeValueQuoted(code2) {
|
||
if (code2 === markerB) {
|
||
effects.consume(code2);
|
||
markerB = null;
|
||
return completeAttributeValueQuotedAfter;
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
function completeAttributeValueUnquoted(code2) {
|
||
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 47 || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96 || markdownLineEndingOrSpace(code2)) {
|
||
return completeAttributeNameAfter(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return completeAttributeValueUnquoted;
|
||
}
|
||
function completeAttributeValueQuotedAfter(code2) {
|
||
if (code2 === 47 || code2 === 62 || markdownSpace(code2)) {
|
||
return completeAttributeNameBefore(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeEnd(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return completeAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeAfter(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return continuation(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function continuation(code2) {
|
||
if (code2 === 45 && marker === 2) {
|
||
effects.consume(code2);
|
||
return continuationCommentInside;
|
||
}
|
||
if (code2 === 60 && marker === 1) {
|
||
effects.consume(code2);
|
||
return continuationRawTagOpen;
|
||
}
|
||
if (code2 === 62 && marker === 4) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
if (code2 === 63 && marker === 3) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
if (code2 === 93 && marker === 5) {
|
||
effects.consume(code2);
|
||
return continuationCdataInside;
|
||
}
|
||
if (markdownLineEnding(code2) && (marker === 6 || marker === 7)) {
|
||
effects.exit("htmlFlowData");
|
||
return effects.check(blankLineBefore, continuationAfter, continuationStart)(code2);
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("htmlFlowData");
|
||
return continuationStart(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return continuation;
|
||
}
|
||
function continuationStart(code2) {
|
||
return effects.check(nonLazyContinuationStart, continuationStartNonLazy, continuationAfter)(code2);
|
||
}
|
||
function continuationStartNonLazy(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return continuationBefore;
|
||
}
|
||
function continuationBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return continuationStart(code2);
|
||
}
|
||
effects.enter("htmlFlowData");
|
||
return continuation(code2);
|
||
}
|
||
function continuationCommentInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationRawTagOpen(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
buffer = "";
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationRawEndTag(code2) {
|
||
if (code2 === 62) {
|
||
const name = buffer.toLowerCase();
|
||
if (htmlRawNames.includes(name)) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
if (asciiAlpha(code2) && buffer.length < 8) {
|
||
effects.consume(code2);
|
||
buffer += String.fromCharCode(code2);
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationCdataInside(code2) {
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationDeclarationInside(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
if (code2 === 45 && marker === 2) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationClose(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("htmlFlowData");
|
||
return continuationAfter(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
function continuationAfter(code2) {
|
||
effects.exit("htmlFlow");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuationStart(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
return self2.parser.lazy[self2.now().line] ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeBlankLineBefore(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return effects.attempt(blankLine, ok3, nok);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/html-text.js
|
||
var htmlText = {
|
||
name: "htmlText",
|
||
tokenize: tokenizeHtmlText
|
||
};
|
||
function tokenizeHtmlText(effects, ok3, nok) {
|
||
const self2 = this;
|
||
let marker;
|
||
let index2;
|
||
let returnState;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("htmlText");
|
||
effects.enter("htmlTextData");
|
||
effects.consume(code2);
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 33) {
|
||
effects.consume(code2);
|
||
return declarationOpen;
|
||
}
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return tagCloseStart;
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
return instruction;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpen;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function declarationOpen(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentOpenInside;
|
||
}
|
||
if (code2 === 91) {
|
||
effects.consume(code2);
|
||
index2 = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return declaration;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function commentOpenInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentEnd;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function comment(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = comment;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return comment;
|
||
}
|
||
function commentClose(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentEnd;
|
||
}
|
||
return comment(code2);
|
||
}
|
||
function commentEnd(code2) {
|
||
return code2 === 62 ? end(code2) : code2 === 45 ? commentClose(code2) : comment(code2);
|
||
}
|
||
function cdataOpenInside(code2) {
|
||
const value = "CDATA[";
|
||
if (code2 === value.charCodeAt(index2++)) {
|
||
effects.consume(code2);
|
||
return index2 === value.length ? cdata : cdataOpenInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function cdata(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = cdata;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return cdata;
|
||
}
|
||
function cdataClose(code2) {
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code2);
|
||
}
|
||
function cdataEnd(code2) {
|
||
if (code2 === 62) {
|
||
return end(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code2);
|
||
}
|
||
function declaration(code2) {
|
||
if (code2 === null || code2 === 62) {
|
||
return end(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = declaration;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return declaration;
|
||
}
|
||
function instruction(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
return instructionClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = instruction;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return instruction;
|
||
}
|
||
function instructionClose(code2) {
|
||
return code2 === 62 ? end(code2) : instruction(code2);
|
||
}
|
||
function tagCloseStart(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagClose;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagClose(code2) {
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagClose;
|
||
}
|
||
return tagCloseBetween(code2);
|
||
}
|
||
function tagCloseBetween(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagCloseBetween;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagCloseBetween;
|
||
}
|
||
return end(code2);
|
||
}
|
||
function tagOpen(code2) {
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpen;
|
||
}
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagOpenBetween(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return end;
|
||
}
|
||
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeName;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenBetween;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenBetween;
|
||
}
|
||
return end(code2);
|
||
}
|
||
function tagOpenAttributeName(code2) {
|
||
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeName;
|
||
}
|
||
return tagOpenAttributeNameAfter(code2);
|
||
}
|
||
function tagOpenAttributeNameAfter(code2) {
|
||
if (code2 === 61) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeNameAfter;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeNameAfter;
|
||
}
|
||
return tagOpenBetween(code2);
|
||
}
|
||
function tagOpenAttributeValueBefore(code2) {
|
||
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 34 || code2 === 39) {
|
||
effects.consume(code2);
|
||
marker = code2;
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeValueBefore;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuoted(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
marker = void 0;
|
||
return tagOpenAttributeValueQuotedAfter;
|
||
}
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeValueQuoted;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
function tagOpenAttributeValueUnquoted(code2) {
|
||
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 60 || code2 === 61 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuotedAfter(code2) {
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function end(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
effects.exit("htmlTextData");
|
||
effects.exit("htmlText");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function lineEndingBefore(code2) {
|
||
effects.exit("htmlTextData");
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return lineEndingAfter;
|
||
}
|
||
function lineEndingAfter(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, lineEndingAfterPrefix, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2) : lineEndingAfterPrefix(code2);
|
||
}
|
||
function lineEndingAfterPrefix(code2) {
|
||
effects.enter("htmlTextData");
|
||
return returnState(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-end.js
|
||
var labelEnd = {
|
||
name: "labelEnd",
|
||
resolveAll: resolveAllLabelEnd,
|
||
resolveTo: resolveToLabelEnd,
|
||
tokenize: tokenizeLabelEnd
|
||
};
|
||
var resourceConstruct = {
|
||
tokenize: tokenizeResource
|
||
};
|
||
var referenceFullConstruct = {
|
||
tokenize: tokenizeReferenceFull
|
||
};
|
||
var referenceCollapsedConstruct = {
|
||
tokenize: tokenizeReferenceCollapsed
|
||
};
|
||
function resolveAllLabelEnd(events) {
|
||
let index2 = -1;
|
||
const newEvents = [];
|
||
while (++index2 < events.length) {
|
||
const token = events[index2][1];
|
||
newEvents.push(events[index2]);
|
||
if (token.type === "labelImage" || token.type === "labelLink" || token.type === "labelEnd") {
|
||
const offset = token.type === "labelImage" ? 4 : 2;
|
||
token.type = "data";
|
||
index2 += offset;
|
||
}
|
||
}
|
||
if (events.length !== newEvents.length) {
|
||
splice(events, 0, events.length, newEvents);
|
||
}
|
||
return events;
|
||
}
|
||
function resolveToLabelEnd(events, context) {
|
||
let index2 = events.length;
|
||
let offset = 0;
|
||
let token;
|
||
let open;
|
||
let close;
|
||
let media;
|
||
while (index2--) {
|
||
token = events[index2][1];
|
||
if (open) {
|
||
if (token.type === "link" || token.type === "labelLink" && token._inactive) {
|
||
break;
|
||
}
|
||
if (events[index2][0] === "enter" && token.type === "labelLink") {
|
||
token._inactive = true;
|
||
}
|
||
} else if (close) {
|
||
if (events[index2][0] === "enter" && (token.type === "labelImage" || token.type === "labelLink") && !token._balanced) {
|
||
open = index2;
|
||
if (token.type !== "labelLink") {
|
||
offset = 2;
|
||
break;
|
||
}
|
||
}
|
||
} else if (token.type === "labelEnd") {
|
||
close = index2;
|
||
}
|
||
}
|
||
const group = {
|
||
type: events[open][1].type === "labelLink" ? "link" : "image",
|
||
start: __spreadValues({}, events[open][1].start),
|
||
end: __spreadValues({}, events[events.length - 1][1].end)
|
||
};
|
||
const label = {
|
||
type: "label",
|
||
start: __spreadValues({}, events[open][1].start),
|
||
end: __spreadValues({}, events[close][1].end)
|
||
};
|
||
const text4 = {
|
||
type: "labelText",
|
||
start: __spreadValues({}, events[open + offset + 2][1].end),
|
||
end: __spreadValues({}, events[close - 2][1].start)
|
||
};
|
||
media = [["enter", group, context], ["enter", label, context]];
|
||
media = push(media, events.slice(open + 1, open + offset + 3));
|
||
media = push(media, [["enter", text4, context]]);
|
||
media = push(media, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + offset + 4, close - 3), context));
|
||
media = push(media, [["exit", text4, context], events[close - 2], events[close - 1], ["exit", label, context]]);
|
||
media = push(media, events.slice(close + 1));
|
||
media = push(media, [["exit", group, context]]);
|
||
splice(events, open, events.length, media);
|
||
return events;
|
||
}
|
||
function tokenizeLabelEnd(effects, ok3, nok) {
|
||
const self2 = this;
|
||
let index2 = self2.events.length;
|
||
let labelStart;
|
||
let defined;
|
||
while (index2--) {
|
||
if ((self2.events[index2][1].type === "labelImage" || self2.events[index2][1].type === "labelLink") && !self2.events[index2][1]._balanced) {
|
||
labelStart = self2.events[index2][1];
|
||
break;
|
||
}
|
||
}
|
||
return start;
|
||
function start(code2) {
|
||
if (!labelStart) {
|
||
return nok(code2);
|
||
}
|
||
if (labelStart._inactive) {
|
||
return labelEndNok(code2);
|
||
}
|
||
defined = self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize({
|
||
start: labelStart.end,
|
||
end: self2.now()
|
||
})));
|
||
effects.enter("labelEnd");
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelEnd");
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
if (code2 === 40) {
|
||
return effects.attempt(resourceConstruct, labelEndOk, defined ? labelEndOk : labelEndNok)(code2);
|
||
}
|
||
if (code2 === 91) {
|
||
return effects.attempt(referenceFullConstruct, labelEndOk, defined ? referenceNotFull : labelEndNok)(code2);
|
||
}
|
||
return defined ? labelEndOk(code2) : labelEndNok(code2);
|
||
}
|
||
function referenceNotFull(code2) {
|
||
return effects.attempt(referenceCollapsedConstruct, labelEndOk, labelEndNok)(code2);
|
||
}
|
||
function labelEndOk(code2) {
|
||
return ok3(code2);
|
||
}
|
||
function labelEndNok(code2) {
|
||
labelStart._balanced = true;
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeResource(effects, ok3, nok) {
|
||
return resourceStart;
|
||
function resourceStart(code2) {
|
||
effects.enter("resource");
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("resourceMarker");
|
||
return resourceBefore;
|
||
}
|
||
function resourceBefore(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceOpen)(code2) : resourceOpen(code2);
|
||
}
|
||
function resourceOpen(code2) {
|
||
if (code2 === 41) {
|
||
return resourceEnd(code2);
|
||
}
|
||
return factoryDestination(effects, resourceDestinationAfter, resourceDestinationMissing, "resourceDestination", "resourceDestinationLiteral", "resourceDestinationLiteralMarker", "resourceDestinationRaw", "resourceDestinationString", 32)(code2);
|
||
}
|
||
function resourceDestinationAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceBetween)(code2) : resourceEnd(code2);
|
||
}
|
||
function resourceDestinationMissing(code2) {
|
||
return nok(code2);
|
||
}
|
||
function resourceBetween(code2) {
|
||
if (code2 === 34 || code2 === 39 || code2 === 40) {
|
||
return factoryTitle(effects, resourceTitleAfter, nok, "resourceTitle", "resourceTitleMarker", "resourceTitleString")(code2);
|
||
}
|
||
return resourceEnd(code2);
|
||
}
|
||
function resourceTitleAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceEnd)(code2) : resourceEnd(code2);
|
||
}
|
||
function resourceEnd(code2) {
|
||
if (code2 === 41) {
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("resourceMarker");
|
||
effects.exit("resource");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeReferenceFull(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return referenceFull;
|
||
function referenceFull(code2) {
|
||
return factoryLabel.call(self2, effects, referenceFullAfter, referenceFullMissing, "reference", "referenceMarker", "referenceString")(code2);
|
||
}
|
||
function referenceFullAfter(code2) {
|
||
return self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1))) ? ok3(code2) : nok(code2);
|
||
}
|
||
function referenceFullMissing(code2) {
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeReferenceCollapsed(effects, ok3, nok) {
|
||
return referenceCollapsedStart;
|
||
function referenceCollapsedStart(code2) {
|
||
effects.enter("reference");
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("referenceMarker");
|
||
return referenceCollapsedOpen;
|
||
}
|
||
function referenceCollapsedOpen(code2) {
|
||
if (code2 === 93) {
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("referenceMarker");
|
||
effects.exit("reference");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-start-image.js
|
||
var labelStartImage = {
|
||
name: "labelStartImage",
|
||
resolveAll: labelEnd.resolveAll,
|
||
tokenize: tokenizeLabelStartImage
|
||
};
|
||
function tokenizeLabelStartImage(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("labelImage");
|
||
effects.enter("labelImageMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelImageMarker");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 91) {
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelImage");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
return code2 === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-start-link.js
|
||
var labelStartLink = {
|
||
name: "labelStartLink",
|
||
resolveAll: labelEnd.resolveAll,
|
||
tokenize: tokenizeLabelStartLink
|
||
};
|
||
function tokenizeLabelStartLink(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("labelLink");
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelLink");
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
return code2 === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/line-ending.js
|
||
var lineEnding = {
|
||
name: "lineEnding",
|
||
tokenize: tokenizeLineEnding
|
||
};
|
||
function tokenizeLineEnding(effects, ok3) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, ok3, "linePrefix");
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/thematic-break.js
|
||
var thematicBreak = {
|
||
name: "thematicBreak",
|
||
tokenize: tokenizeThematicBreak
|
||
};
|
||
function tokenizeThematicBreak(effects, ok3, nok) {
|
||
let size = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("thematicBreak");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
marker = code2;
|
||
return atBreak(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === marker) {
|
||
effects.enter("thematicBreakSequence");
|
||
return sequence(code2);
|
||
}
|
||
if (size >= 3 && (code2 === null || markdownLineEnding(code2))) {
|
||
effects.exit("thematicBreak");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function sequence(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return sequence;
|
||
}
|
||
effects.exit("thematicBreakSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, atBreak, "whitespace")(code2) : atBreak(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/list.js
|
||
var list = {
|
||
continuation: {
|
||
tokenize: tokenizeListContinuation
|
||
},
|
||
exit: tokenizeListEnd,
|
||
name: "list",
|
||
tokenize: tokenizeListStart
|
||
};
|
||
var listItemPrefixWhitespaceConstruct = {
|
||
partial: true,
|
||
tokenize: tokenizeListItemPrefixWhitespace
|
||
};
|
||
var indentConstruct = {
|
||
partial: true,
|
||
tokenize: tokenizeIndent
|
||
};
|
||
function tokenizeListStart(effects, ok3, nok) {
|
||
const self2 = this;
|
||
const tail = self2.events[self2.events.length - 1];
|
||
let initialSize = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
const kind = self2.containerState.type || (code2 === 42 || code2 === 43 || code2 === 45 ? "listUnordered" : "listOrdered");
|
||
if (kind === "listUnordered" ? !self2.containerState.marker || code2 === self2.containerState.marker : asciiDigit(code2)) {
|
||
if (!self2.containerState.type) {
|
||
self2.containerState.type = kind;
|
||
effects.enter(kind, {
|
||
_container: true
|
||
});
|
||
}
|
||
if (kind === "listUnordered") {
|
||
effects.enter("listItemPrefix");
|
||
return code2 === 42 || code2 === 45 ? effects.check(thematicBreak, nok, atMarker)(code2) : atMarker(code2);
|
||
}
|
||
if (!self2.interrupt || code2 === 49) {
|
||
effects.enter("listItemPrefix");
|
||
effects.enter("listItemValue");
|
||
return inside(code2);
|
||
}
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (asciiDigit(code2) && ++size < 10) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
if ((!self2.interrupt || size < 2) && (self2.containerState.marker ? code2 === self2.containerState.marker : code2 === 41 || code2 === 46)) {
|
||
effects.exit("listItemValue");
|
||
return atMarker(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function atMarker(code2) {
|
||
effects.enter("listItemMarker");
|
||
effects.consume(code2);
|
||
effects.exit("listItemMarker");
|
||
self2.containerState.marker = self2.containerState.marker || code2;
|
||
return effects.check(
|
||
blankLine,
|
||
// Can’t be empty when interrupting.
|
||
self2.interrupt ? nok : onBlank,
|
||
effects.attempt(listItemPrefixWhitespaceConstruct, endOfPrefix, otherPrefix)
|
||
);
|
||
}
|
||
function onBlank(code2) {
|
||
self2.containerState.initialBlankLine = true;
|
||
initialSize++;
|
||
return endOfPrefix(code2);
|
||
}
|
||
function otherPrefix(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("listItemPrefixWhitespace");
|
||
effects.consume(code2);
|
||
effects.exit("listItemPrefixWhitespace");
|
||
return endOfPrefix;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function endOfPrefix(code2) {
|
||
self2.containerState.size = initialSize + self2.sliceSerialize(effects.exit("listItemPrefix"), true).length;
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeListContinuation(effects, ok3, nok) {
|
||
const self2 = this;
|
||
self2.containerState._closeFlow = void 0;
|
||
return effects.check(blankLine, onBlank, notBlank);
|
||
function onBlank(code2) {
|
||
self2.containerState.furtherBlankLines = self2.containerState.furtherBlankLines || self2.containerState.initialBlankLine;
|
||
return factorySpace(effects, ok3, "listItemIndent", self2.containerState.size + 1)(code2);
|
||
}
|
||
function notBlank(code2) {
|
||
if (self2.containerState.furtherBlankLines || !markdownSpace(code2)) {
|
||
self2.containerState.furtherBlankLines = void 0;
|
||
self2.containerState.initialBlankLine = void 0;
|
||
return notInCurrentItem(code2);
|
||
}
|
||
self2.containerState.furtherBlankLines = void 0;
|
||
self2.containerState.initialBlankLine = void 0;
|
||
return effects.attempt(indentConstruct, ok3, notInCurrentItem)(code2);
|
||
}
|
||
function notInCurrentItem(code2) {
|
||
self2.containerState._closeFlow = true;
|
||
self2.interrupt = void 0;
|
||
return factorySpace(effects, effects.attempt(list, ok3, nok), "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2);
|
||
}
|
||
}
|
||
function tokenizeIndent(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return factorySpace(effects, afterPrefix, "listItemIndent", self2.containerState.size + 1);
|
||
function afterPrefix(code2) {
|
||
const tail = self2.events[self2.events.length - 1];
|
||
return tail && tail[1].type === "listItemIndent" && tail[2].sliceSerialize(tail[1], true).length === self2.containerState.size ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
function tokenizeListEnd(effects) {
|
||
effects.exit(this.containerState.type);
|
||
}
|
||
function tokenizeListItemPrefixWhitespace(effects, ok3, nok) {
|
||
const self2 = this;
|
||
return factorySpace(effects, afterPrefix, "listItemPrefixWhitespace", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4 + 1);
|
||
function afterPrefix(code2) {
|
||
const tail = self2.events[self2.events.length - 1];
|
||
return !markdownSpace(code2) && tail && tail[1].type === "listItemPrefixWhitespace" ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/setext-underline.js
|
||
var setextUnderline = {
|
||
name: "setextUnderline",
|
||
resolveTo: resolveToSetextUnderline,
|
||
tokenize: tokenizeSetextUnderline
|
||
};
|
||
function resolveToSetextUnderline(events, context) {
|
||
let index2 = events.length;
|
||
let content3;
|
||
let text4;
|
||
let definition3;
|
||
while (index2--) {
|
||
if (events[index2][0] === "enter") {
|
||
if (events[index2][1].type === "content") {
|
||
content3 = index2;
|
||
break;
|
||
}
|
||
if (events[index2][1].type === "paragraph") {
|
||
text4 = index2;
|
||
}
|
||
} else {
|
||
if (events[index2][1].type === "content") {
|
||
events.splice(index2, 1);
|
||
}
|
||
if (!definition3 && events[index2][1].type === "definition") {
|
||
definition3 = index2;
|
||
}
|
||
}
|
||
}
|
||
const heading2 = {
|
||
type: "setextHeading",
|
||
start: __spreadValues({}, events[text4][1].start),
|
||
end: __spreadValues({}, events[events.length - 1][1].end)
|
||
};
|
||
events[text4][1].type = "setextHeadingText";
|
||
if (definition3) {
|
||
events.splice(text4, 0, ["enter", heading2, context]);
|
||
events.splice(definition3 + 1, 0, ["exit", events[content3][1], context]);
|
||
events[content3][1].end = __spreadValues({}, events[definition3][1].end);
|
||
} else {
|
||
events[content3][1] = heading2;
|
||
}
|
||
events.push(["exit", heading2, context]);
|
||
return events;
|
||
}
|
||
function tokenizeSetextUnderline(effects, ok3, nok) {
|
||
const self2 = this;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
let index2 = self2.events.length;
|
||
let paragraph2;
|
||
while (index2--) {
|
||
if (self2.events[index2][1].type !== "lineEnding" && self2.events[index2][1].type !== "linePrefix" && self2.events[index2][1].type !== "content") {
|
||
paragraph2 = self2.events[index2][1].type === "paragraph";
|
||
break;
|
||
}
|
||
}
|
||
if (!self2.parser.lazy[self2.now().line] && (self2.interrupt || paragraph2)) {
|
||
effects.enter("setextHeadingLine");
|
||
marker = code2;
|
||
return before(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("setextHeadingLineSequence");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
effects.exit("setextHeadingLineSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, after, "lineSuffix")(code2) : after(code2);
|
||
}
|
||
function after(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("setextHeadingLine");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/flow.js
|
||
var flow = {
|
||
tokenize: initializeFlow
|
||
};
|
||
function initializeFlow(effects) {
|
||
const self2 = this;
|
||
const initial = effects.attempt(
|
||
// Try to parse a blank line.
|
||
blankLine,
|
||
atBlankEnding,
|
||
// Try to parse initial flow (essentially, only code).
|
||
effects.attempt(this.parser.constructs.flowInitial, afterConstruct, factorySpace(effects, effects.attempt(this.parser.constructs.flow, afterConstruct, effects.attempt(content2, afterConstruct)), "linePrefix"))
|
||
);
|
||
return initial;
|
||
function atBlankEnding(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEndingBlank");
|
||
effects.consume(code2);
|
||
effects.exit("lineEndingBlank");
|
||
self2.currentConstruct = void 0;
|
||
return initial;
|
||
}
|
||
function afterConstruct(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
self2.currentConstruct = void 0;
|
||
return initial;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/text.js
|
||
var resolver = {
|
||
resolveAll: createResolver()
|
||
};
|
||
var string = initializeFactory("string");
|
||
var text = initializeFactory("text");
|
||
function initializeFactory(field) {
|
||
return {
|
||
resolveAll: createResolver(field === "text" ? resolveAllLineSuffixes : void 0),
|
||
tokenize: initializeText
|
||
};
|
||
function initializeText(effects) {
|
||
const self2 = this;
|
||
const constructs2 = this.parser.constructs[field];
|
||
const text4 = effects.attempt(constructs2, start, notText);
|
||
return start;
|
||
function start(code2) {
|
||
return atBreak(code2) ? text4(code2) : notText(code2);
|
||
}
|
||
function notText(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("data");
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function data(code2) {
|
||
if (atBreak(code2)) {
|
||
effects.exit("data");
|
||
return text4(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === null) {
|
||
return true;
|
||
}
|
||
const list4 = constructs2[code2];
|
||
let index2 = -1;
|
||
if (list4) {
|
||
while (++index2 < list4.length) {
|
||
const item = list4[index2];
|
||
if (!item.previous || item.previous.call(self2, self2.previous)) {
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
function createResolver(extraResolver) {
|
||
return resolveAllText;
|
||
function resolveAllText(events, context) {
|
||
let index2 = -1;
|
||
let enter;
|
||
while (++index2 <= events.length) {
|
||
if (enter === void 0) {
|
||
if (events[index2] && events[index2][1].type === "data") {
|
||
enter = index2;
|
||
index2++;
|
||
}
|
||
} else if (!events[index2] || events[index2][1].type !== "data") {
|
||
if (index2 !== enter + 2) {
|
||
events[enter][1].end = events[index2 - 1][1].end;
|
||
events.splice(enter + 2, index2 - enter - 2);
|
||
index2 = enter + 2;
|
||
}
|
||
enter = void 0;
|
||
}
|
||
}
|
||
return extraResolver ? extraResolver(events, context) : events;
|
||
}
|
||
}
|
||
function resolveAllLineSuffixes(events, context) {
|
||
let eventIndex = 0;
|
||
while (++eventIndex <= events.length) {
|
||
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
|
||
const data = events[eventIndex - 1][1];
|
||
const chunks = context.sliceStream(data);
|
||
let index2 = chunks.length;
|
||
let bufferIndex = -1;
|
||
let size = 0;
|
||
let tabs;
|
||
while (index2--) {
|
||
const chunk = chunks[index2];
|
||
if (typeof chunk === "string") {
|
||
bufferIndex = chunk.length;
|
||
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
|
||
size++;
|
||
bufferIndex--;
|
||
}
|
||
if (bufferIndex) break;
|
||
bufferIndex = -1;
|
||
} else if (chunk === -2) {
|
||
tabs = true;
|
||
size++;
|
||
} else if (chunk === -1) {
|
||
} else {
|
||
index2++;
|
||
break;
|
||
}
|
||
}
|
||
if (size) {
|
||
const token = {
|
||
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
|
||
start: {
|
||
_bufferIndex: index2 ? bufferIndex : data.start._bufferIndex + bufferIndex,
|
||
_index: data.start._index + index2,
|
||
line: data.end.line,
|
||
column: data.end.column - size,
|
||
offset: data.end.offset - size
|
||
},
|
||
end: __spreadValues({}, data.end)
|
||
};
|
||
data.end = __spreadValues({}, token.start);
|
||
if (data.start.offset === data.end.offset) {
|
||
Object.assign(data, token);
|
||
} else {
|
||
events.splice(eventIndex, 0, ["enter", token, context], ["exit", token, context]);
|
||
eventIndex += 2;
|
||
}
|
||
}
|
||
eventIndex++;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark/lib/constructs.js
|
||
var constructs_exports = {};
|
||
__export(constructs_exports, {
|
||
attentionMarkers: () => attentionMarkers,
|
||
contentInitial: () => contentInitial,
|
||
disable: () => disable,
|
||
document: () => document3,
|
||
flow: () => flow2,
|
||
flowInitial: () => flowInitial,
|
||
insideSpan: () => insideSpan,
|
||
string: () => string2,
|
||
text: () => text2
|
||
});
|
||
var document3 = {
|
||
[42]: list,
|
||
[43]: list,
|
||
[45]: list,
|
||
[48]: list,
|
||
[49]: list,
|
||
[50]: list,
|
||
[51]: list,
|
||
[52]: list,
|
||
[53]: list,
|
||
[54]: list,
|
||
[55]: list,
|
||
[56]: list,
|
||
[57]: list,
|
||
[62]: blockQuote
|
||
};
|
||
var contentInitial = {
|
||
[91]: definition
|
||
};
|
||
var flowInitial = {
|
||
[-2]: codeIndented,
|
||
[-1]: codeIndented,
|
||
[32]: codeIndented
|
||
};
|
||
var flow2 = {
|
||
[35]: headingAtx,
|
||
[42]: thematicBreak,
|
||
[45]: [setextUnderline, thematicBreak],
|
||
[60]: htmlFlow,
|
||
[61]: setextUnderline,
|
||
[95]: thematicBreak,
|
||
[96]: codeFenced,
|
||
[126]: codeFenced
|
||
};
|
||
var string2 = {
|
||
[38]: characterReference,
|
||
[92]: characterEscape
|
||
};
|
||
var text2 = {
|
||
[-5]: lineEnding,
|
||
[-4]: lineEnding,
|
||
[-3]: lineEnding,
|
||
[33]: labelStartImage,
|
||
[38]: characterReference,
|
||
[42]: attention,
|
||
[60]: [autolink, htmlText],
|
||
[91]: labelStartLink,
|
||
[92]: [hardBreakEscape, characterEscape],
|
||
[93]: labelEnd,
|
||
[95]: attention,
|
||
[96]: codeText
|
||
};
|
||
var insideSpan = {
|
||
null: [attention, resolver]
|
||
};
|
||
var attentionMarkers = {
|
||
null: [42, 95]
|
||
};
|
||
var disable = {
|
||
null: []
|
||
};
|
||
|
||
// node_modules/micromark/lib/create-tokenizer.js
|
||
function createTokenizer(parser, initialize, from) {
|
||
let point3 = {
|
||
_bufferIndex: -1,
|
||
_index: 0,
|
||
line: from && from.line || 1,
|
||
column: from && from.column || 1,
|
||
offset: from && from.offset || 0
|
||
};
|
||
const columnStart = {};
|
||
const resolveAllConstructs = [];
|
||
let chunks = [];
|
||
let stack = [];
|
||
let consumed = true;
|
||
const effects = {
|
||
attempt: constructFactory(onsuccessfulconstruct),
|
||
check: constructFactory(onsuccessfulcheck),
|
||
consume,
|
||
enter,
|
||
exit: exit2,
|
||
interrupt: constructFactory(onsuccessfulcheck, {
|
||
interrupt: true
|
||
})
|
||
};
|
||
const context = {
|
||
code: null,
|
||
containerState: {},
|
||
defineSkip,
|
||
events: [],
|
||
now,
|
||
parser,
|
||
previous: null,
|
||
sliceSerialize,
|
||
sliceStream,
|
||
write
|
||
};
|
||
let state = initialize.tokenize.call(context, effects);
|
||
let expectedCode;
|
||
if (initialize.resolveAll) {
|
||
resolveAllConstructs.push(initialize);
|
||
}
|
||
return context;
|
||
function write(slice) {
|
||
chunks = push(chunks, slice);
|
||
main();
|
||
if (chunks[chunks.length - 1] !== null) {
|
||
return [];
|
||
}
|
||
addResult(initialize, 0);
|
||
context.events = resolveAll(resolveAllConstructs, context.events, context);
|
||
return context.events;
|
||
}
|
||
function sliceSerialize(token, expandTabs) {
|
||
return serializeChunks(sliceStream(token), expandTabs);
|
||
}
|
||
function sliceStream(token) {
|
||
return sliceChunks(chunks, token);
|
||
}
|
||
function now() {
|
||
const {
|
||
_bufferIndex,
|
||
_index,
|
||
line,
|
||
column,
|
||
offset
|
||
} = point3;
|
||
return {
|
||
_bufferIndex,
|
||
_index,
|
||
line,
|
||
column,
|
||
offset
|
||
};
|
||
}
|
||
function defineSkip(value) {
|
||
columnStart[value.line] = value.column;
|
||
accountForPotentialSkip();
|
||
}
|
||
function main() {
|
||
let chunkIndex;
|
||
while (point3._index < chunks.length) {
|
||
const chunk = chunks[point3._index];
|
||
if (typeof chunk === "string") {
|
||
chunkIndex = point3._index;
|
||
if (point3._bufferIndex < 0) {
|
||
point3._bufferIndex = 0;
|
||
}
|
||
while (point3._index === chunkIndex && point3._bufferIndex < chunk.length) {
|
||
go(chunk.charCodeAt(point3._bufferIndex));
|
||
}
|
||
} else {
|
||
go(chunk);
|
||
}
|
||
}
|
||
}
|
||
function go(code2) {
|
||
consumed = void 0;
|
||
expectedCode = code2;
|
||
state = state(code2);
|
||
}
|
||
function consume(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
point3.line++;
|
||
point3.column = 1;
|
||
point3.offset += code2 === -3 ? 2 : 1;
|
||
accountForPotentialSkip();
|
||
} else if (code2 !== -1) {
|
||
point3.column++;
|
||
point3.offset++;
|
||
}
|
||
if (point3._bufferIndex < 0) {
|
||
point3._index++;
|
||
} else {
|
||
point3._bufferIndex++;
|
||
if (point3._bufferIndex === // Points w/ non-negative `_bufferIndex` reference
|
||
// strings.
|
||
/** @type {string} */
|
||
chunks[point3._index].length) {
|
||
point3._bufferIndex = -1;
|
||
point3._index++;
|
||
}
|
||
}
|
||
context.previous = code2;
|
||
consumed = true;
|
||
}
|
||
function enter(type, fields) {
|
||
const token = fields || {};
|
||
token.type = type;
|
||
token.start = now();
|
||
context.events.push(["enter", token, context]);
|
||
stack.push(token);
|
||
return token;
|
||
}
|
||
function exit2(type) {
|
||
const token = stack.pop();
|
||
token.end = now();
|
||
context.events.push(["exit", token, context]);
|
||
return token;
|
||
}
|
||
function onsuccessfulconstruct(construct, info) {
|
||
addResult(construct, info.from);
|
||
}
|
||
function onsuccessfulcheck(_, info) {
|
||
info.restore();
|
||
}
|
||
function constructFactory(onreturn, fields) {
|
||
return hook2;
|
||
function hook2(constructs2, returnState, bogusState) {
|
||
let listOfConstructs;
|
||
let constructIndex;
|
||
let currentConstruct;
|
||
let info;
|
||
return Array.isArray(constructs2) ? (
|
||
/* c8 ignore next 1 */
|
||
handleListOfConstructs(constructs2)
|
||
) : "tokenize" in constructs2 ? (
|
||
// Looks like a construct.
|
||
handleListOfConstructs([
|
||
/** @type {Construct} */
|
||
constructs2
|
||
])
|
||
) : handleMapOfConstructs(constructs2);
|
||
function handleMapOfConstructs(map4) {
|
||
return start;
|
||
function start(code2) {
|
||
const left = code2 !== null && map4[code2];
|
||
const all3 = code2 !== null && map4.null;
|
||
const list4 = [
|
||
// To do: add more extension tests.
|
||
/* c8 ignore next 2 */
|
||
...Array.isArray(left) ? left : left ? [left] : [],
|
||
...Array.isArray(all3) ? all3 : all3 ? [all3] : []
|
||
];
|
||
return handleListOfConstructs(list4)(code2);
|
||
}
|
||
}
|
||
function handleListOfConstructs(list4) {
|
||
listOfConstructs = list4;
|
||
constructIndex = 0;
|
||
if (list4.length === 0) {
|
||
return bogusState;
|
||
}
|
||
return handleConstruct(list4[constructIndex]);
|
||
}
|
||
function handleConstruct(construct) {
|
||
return start;
|
||
function start(code2) {
|
||
info = store();
|
||
currentConstruct = construct;
|
||
if (!construct.partial) {
|
||
context.currentConstruct = construct;
|
||
}
|
||
if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) {
|
||
return nok(code2);
|
||
}
|
||
return construct.tokenize.call(
|
||
// If we do have fields, create an object w/ `context` as its
|
||
// prototype.
|
||
// This allows a “live binding”, which is needed for `interrupt`.
|
||
fields ? Object.assign(Object.create(context), fields) : context,
|
||
effects,
|
||
ok3,
|
||
nok
|
||
)(code2);
|
||
}
|
||
}
|
||
function ok3(code2) {
|
||
consumed = true;
|
||
onreturn(currentConstruct, info);
|
||
return returnState;
|
||
}
|
||
function nok(code2) {
|
||
consumed = true;
|
||
info.restore();
|
||
if (++constructIndex < listOfConstructs.length) {
|
||
return handleConstruct(listOfConstructs[constructIndex]);
|
||
}
|
||
return bogusState;
|
||
}
|
||
}
|
||
}
|
||
function addResult(construct, from2) {
|
||
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
|
||
resolveAllConstructs.push(construct);
|
||
}
|
||
if (construct.resolve) {
|
||
splice(context.events, from2, context.events.length - from2, construct.resolve(context.events.slice(from2), context));
|
||
}
|
||
if (construct.resolveTo) {
|
||
context.events = construct.resolveTo(context.events, context);
|
||
}
|
||
}
|
||
function store() {
|
||
const startPoint = now();
|
||
const startPrevious = context.previous;
|
||
const startCurrentConstruct = context.currentConstruct;
|
||
const startEventsIndex = context.events.length;
|
||
const startStack = Array.from(stack);
|
||
return {
|
||
from: startEventsIndex,
|
||
restore
|
||
};
|
||
function restore() {
|
||
point3 = startPoint;
|
||
context.previous = startPrevious;
|
||
context.currentConstruct = startCurrentConstruct;
|
||
context.events.length = startEventsIndex;
|
||
stack = startStack;
|
||
accountForPotentialSkip();
|
||
}
|
||
}
|
||
function accountForPotentialSkip() {
|
||
if (point3.line in columnStart && point3.column < 2) {
|
||
point3.column = columnStart[point3.line];
|
||
point3.offset += columnStart[point3.line] - 1;
|
||
}
|
||
}
|
||
}
|
||
function sliceChunks(chunks, token) {
|
||
const startIndex = token.start._index;
|
||
const startBufferIndex = token.start._bufferIndex;
|
||
const endIndex = token.end._index;
|
||
const endBufferIndex = token.end._bufferIndex;
|
||
let view;
|
||
if (startIndex === endIndex) {
|
||
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
|
||
} else {
|
||
view = chunks.slice(startIndex, endIndex);
|
||
if (startBufferIndex > -1) {
|
||
const head = view[0];
|
||
if (typeof head === "string") {
|
||
view[0] = head.slice(startBufferIndex);
|
||
} else {
|
||
view.shift();
|
||
}
|
||
}
|
||
if (endBufferIndex > 0) {
|
||
view.push(chunks[endIndex].slice(0, endBufferIndex));
|
||
}
|
||
}
|
||
return view;
|
||
}
|
||
function serializeChunks(chunks, expandTabs) {
|
||
let index2 = -1;
|
||
const result = [];
|
||
let atTab;
|
||
while (++index2 < chunks.length) {
|
||
const chunk = chunks[index2];
|
||
let value;
|
||
if (typeof chunk === "string") {
|
||
value = chunk;
|
||
} else switch (chunk) {
|
||
case -5: {
|
||
value = "\r";
|
||
break;
|
||
}
|
||
case -4: {
|
||
value = "\n";
|
||
break;
|
||
}
|
||
case -3: {
|
||
value = "\r\n";
|
||
break;
|
||
}
|
||
case -2: {
|
||
value = expandTabs ? " " : " ";
|
||
break;
|
||
}
|
||
case -1: {
|
||
if (!expandTabs && atTab) continue;
|
||
value = " ";
|
||
break;
|
||
}
|
||
default: {
|
||
value = String.fromCharCode(chunk);
|
||
}
|
||
}
|
||
atTab = chunk === -2;
|
||
result.push(value);
|
||
}
|
||
return result.join("");
|
||
}
|
||
|
||
// node_modules/micromark/lib/parse.js
|
||
function parse2(options2) {
|
||
const settings = options2 || {};
|
||
const constructs2 = (
|
||
/** @type {FullNormalizedExtension} */
|
||
combineExtensions([constructs_exports, ...settings.extensions || []])
|
||
);
|
||
const parser = {
|
||
constructs: constructs2,
|
||
content: create(content),
|
||
defined: [],
|
||
document: create(document2),
|
||
flow: create(flow),
|
||
lazy: {},
|
||
string: create(string),
|
||
text: create(text)
|
||
};
|
||
return parser;
|
||
function create(initial) {
|
||
return creator;
|
||
function creator(from) {
|
||
return createTokenizer(parser, initial, from);
|
||
}
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/postprocess.js
|
||
function postprocess(events) {
|
||
while (!subtokenize(events)) {
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark/lib/preprocess.js
|
||
var search = /[\0\t\n\r]/g;
|
||
function preprocess() {
|
||
let column = 1;
|
||
let buffer = "";
|
||
let start = true;
|
||
let atCarriageReturn;
|
||
return preprocessor;
|
||
function preprocessor(value, encoding, end) {
|
||
const chunks = [];
|
||
let match;
|
||
let next;
|
||
let startPosition;
|
||
let endPosition;
|
||
let code2;
|
||
value = buffer + (typeof value === "string" ? value.toString() : new TextDecoder(encoding || void 0).decode(value));
|
||
startPosition = 0;
|
||
buffer = "";
|
||
if (start) {
|
||
if (value.charCodeAt(0) === 65279) {
|
||
startPosition++;
|
||
}
|
||
start = void 0;
|
||
}
|
||
while (startPosition < value.length) {
|
||
search.lastIndex = startPosition;
|
||
match = search.exec(value);
|
||
endPosition = match && match.index !== void 0 ? match.index : value.length;
|
||
code2 = value.charCodeAt(endPosition);
|
||
if (!match) {
|
||
buffer = value.slice(startPosition);
|
||
break;
|
||
}
|
||
if (code2 === 10 && startPosition === endPosition && atCarriageReturn) {
|
||
chunks.push(-3);
|
||
atCarriageReturn = void 0;
|
||
} else {
|
||
if (atCarriageReturn) {
|
||
chunks.push(-5);
|
||
atCarriageReturn = void 0;
|
||
}
|
||
if (startPosition < endPosition) {
|
||
chunks.push(value.slice(startPosition, endPosition));
|
||
column += endPosition - startPosition;
|
||
}
|
||
switch (code2) {
|
||
case 0: {
|
||
chunks.push(65533);
|
||
column++;
|
||
break;
|
||
}
|
||
case 9: {
|
||
next = Math.ceil(column / 4) * 4;
|
||
chunks.push(-2);
|
||
while (column++ < next) chunks.push(-1);
|
||
break;
|
||
}
|
||
case 10: {
|
||
chunks.push(-4);
|
||
column = 1;
|
||
break;
|
||
}
|
||
default: {
|
||
atCarriageReturn = true;
|
||
column = 1;
|
||
}
|
||
}
|
||
}
|
||
startPosition = endPosition + 1;
|
||
}
|
||
if (end) {
|
||
if (atCarriageReturn) chunks.push(-5);
|
||
if (buffer) chunks.push(buffer);
|
||
chunks.push(null);
|
||
}
|
||
return chunks;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-string/index.js
|
||
var characterEscapeOrReference = /\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;
|
||
function decodeString(value) {
|
||
return value.replace(characterEscapeOrReference, decode);
|
||
}
|
||
function decode($0, $1, $2) {
|
||
if ($1) {
|
||
return $1;
|
||
}
|
||
const head = $2.charCodeAt(0);
|
||
if (head === 35) {
|
||
const head2 = $2.charCodeAt(1);
|
||
const hex = head2 === 120 || head2 === 88;
|
||
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10);
|
||
}
|
||
return decodeNamedCharacterReference($2) || $0;
|
||
}
|
||
|
||
// node_modules/unist-util-stringify-position/lib/index.js
|
||
function stringifyPosition(value) {
|
||
if (!value || typeof value !== "object") {
|
||
return "";
|
||
}
|
||
if ("position" in value || "type" in value) {
|
||
return position(value.position);
|
||
}
|
||
if ("start" in value || "end" in value) {
|
||
return position(value);
|
||
}
|
||
if ("line" in value || "column" in value) {
|
||
return point(value);
|
||
}
|
||
return "";
|
||
}
|
||
function point(point3) {
|
||
return index(point3 && point3.line) + ":" + index(point3 && point3.column);
|
||
}
|
||
function position(pos) {
|
||
return point(pos && pos.start) + "-" + point(pos && pos.end);
|
||
}
|
||
function index(value) {
|
||
return value && typeof value === "number" ? value : 1;
|
||
}
|
||
|
||
// node_modules/mdast-util-from-markdown/lib/index.js
|
||
var own = {}.hasOwnProperty;
|
||
function fromMarkdown(value, encoding, options2) {
|
||
if (typeof encoding !== "string") {
|
||
options2 = encoding;
|
||
encoding = void 0;
|
||
}
|
||
return compiler(options2)(postprocess(parse2(options2).document().write(preprocess()(value, encoding, true))));
|
||
}
|
||
function compiler(options2) {
|
||
const config = {
|
||
transforms: [],
|
||
canContainEols: ["emphasis", "fragment", "heading", "paragraph", "strong"],
|
||
enter: {
|
||
autolink: opener(link2),
|
||
autolinkProtocol: onenterdata,
|
||
autolinkEmail: onenterdata,
|
||
atxHeading: opener(heading2),
|
||
blockQuote: opener(blockQuote2),
|
||
characterEscape: onenterdata,
|
||
characterReference: onenterdata,
|
||
codeFenced: opener(codeFlow),
|
||
codeFencedFenceInfo: buffer,
|
||
codeFencedFenceMeta: buffer,
|
||
codeIndented: opener(codeFlow, buffer),
|
||
codeText: opener(codeText2, buffer),
|
||
codeTextData: onenterdata,
|
||
data: onenterdata,
|
||
codeFlowValue: onenterdata,
|
||
definition: opener(definition3),
|
||
definitionDestinationString: buffer,
|
||
definitionLabelString: buffer,
|
||
definitionTitleString: buffer,
|
||
emphasis: opener(emphasis2),
|
||
hardBreakEscape: opener(hardBreak2),
|
||
hardBreakTrailing: opener(hardBreak2),
|
||
htmlFlow: opener(html2, buffer),
|
||
htmlFlowData: onenterdata,
|
||
htmlText: opener(html2, buffer),
|
||
htmlTextData: onenterdata,
|
||
image: opener(image2),
|
||
label: buffer,
|
||
link: opener(link2),
|
||
listItem: opener(listItem2),
|
||
listItemValue: onenterlistitemvalue,
|
||
listOrdered: opener(list4, onenterlistordered),
|
||
listUnordered: opener(list4),
|
||
paragraph: opener(paragraph2),
|
||
reference: onenterreference,
|
||
referenceString: buffer,
|
||
resourceDestinationString: buffer,
|
||
resourceTitleString: buffer,
|
||
setextHeading: opener(heading2),
|
||
strong: opener(strong2),
|
||
thematicBreak: opener(thematicBreak3)
|
||
},
|
||
exit: {
|
||
atxHeading: closer(),
|
||
atxHeadingSequence: onexitatxheadingsequence,
|
||
autolink: closer(),
|
||
autolinkEmail: onexitautolinkemail,
|
||
autolinkProtocol: onexitautolinkprotocol,
|
||
blockQuote: closer(),
|
||
characterEscapeValue: onexitdata,
|
||
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
|
||
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
|
||
characterReferenceValue: onexitcharacterreferencevalue,
|
||
characterReference: onexitcharacterreference,
|
||
codeFenced: closer(onexitcodefenced),
|
||
codeFencedFence: onexitcodefencedfence,
|
||
codeFencedFenceInfo: onexitcodefencedfenceinfo,
|
||
codeFencedFenceMeta: onexitcodefencedfencemeta,
|
||
codeFlowValue: onexitdata,
|
||
codeIndented: closer(onexitcodeindented),
|
||
codeText: closer(onexitcodetext),
|
||
codeTextData: onexitdata,
|
||
data: onexitdata,
|
||
definition: closer(),
|
||
definitionDestinationString: onexitdefinitiondestinationstring,
|
||
definitionLabelString: onexitdefinitionlabelstring,
|
||
definitionTitleString: onexitdefinitiontitlestring,
|
||
emphasis: closer(),
|
||
hardBreakEscape: closer(onexithardbreak),
|
||
hardBreakTrailing: closer(onexithardbreak),
|
||
htmlFlow: closer(onexithtmlflow),
|
||
htmlFlowData: onexitdata,
|
||
htmlText: closer(onexithtmltext),
|
||
htmlTextData: onexitdata,
|
||
image: closer(onexitimage),
|
||
label: onexitlabel,
|
||
labelText: onexitlabeltext,
|
||
lineEnding: onexitlineending,
|
||
link: closer(onexitlink),
|
||
listItem: closer(),
|
||
listOrdered: closer(),
|
||
listUnordered: closer(),
|
||
paragraph: closer(),
|
||
referenceString: onexitreferencestring,
|
||
resourceDestinationString: onexitresourcedestinationstring,
|
||
resourceTitleString: onexitresourcetitlestring,
|
||
resource: onexitresource,
|
||
setextHeading: closer(onexitsetextheading),
|
||
setextHeadingLineSequence: onexitsetextheadinglinesequence,
|
||
setextHeadingText: onexitsetextheadingtext,
|
||
strong: closer(),
|
||
thematicBreak: closer()
|
||
}
|
||
};
|
||
configure(config, (options2 || {}).mdastExtensions || []);
|
||
const data = {};
|
||
return compile;
|
||
function compile(events) {
|
||
let tree = {
|
||
type: "root",
|
||
children: []
|
||
};
|
||
const context = {
|
||
stack: [tree],
|
||
tokenStack: [],
|
||
config,
|
||
enter,
|
||
exit: exit2,
|
||
buffer,
|
||
resume,
|
||
data
|
||
};
|
||
const listStack = [];
|
||
let index2 = -1;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][1].type === "listOrdered" || events[index2][1].type === "listUnordered") {
|
||
if (events[index2][0] === "enter") {
|
||
listStack.push(index2);
|
||
} else {
|
||
const tail = listStack.pop();
|
||
index2 = prepareList(events, tail, index2);
|
||
}
|
||
}
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < events.length) {
|
||
const handler2 = config[events[index2][0]];
|
||
if (own.call(handler2, events[index2][1].type)) {
|
||
handler2[events[index2][1].type].call(Object.assign({
|
||
sliceSerialize: events[index2][2].sliceSerialize
|
||
}, context), events[index2][1]);
|
||
}
|
||
}
|
||
if (context.tokenStack.length > 0) {
|
||
const tail = context.tokenStack[context.tokenStack.length - 1];
|
||
const handler2 = tail[1] || defaultOnError;
|
||
handler2.call(context, void 0, tail[0]);
|
||
}
|
||
tree.position = {
|
||
start: point2(events.length > 0 ? events[0][1].start : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
}),
|
||
end: point2(events.length > 0 ? events[events.length - 2][1].end : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
})
|
||
};
|
||
index2 = -1;
|
||
while (++index2 < config.transforms.length) {
|
||
tree = config.transforms[index2](tree) || tree;
|
||
}
|
||
return tree;
|
||
}
|
||
function prepareList(events, start, length) {
|
||
let index2 = start - 1;
|
||
let containerBalance = -1;
|
||
let listSpread = false;
|
||
let listItem3;
|
||
let lineIndex;
|
||
let firstBlankLineIndex;
|
||
let atMarker;
|
||
while (++index2 <= length) {
|
||
const event = events[index2];
|
||
switch (event[1].type) {
|
||
case "listUnordered":
|
||
case "listOrdered":
|
||
case "blockQuote": {
|
||
if (event[0] === "enter") {
|
||
containerBalance++;
|
||
} else {
|
||
containerBalance--;
|
||
}
|
||
atMarker = void 0;
|
||
break;
|
||
}
|
||
case "lineEndingBlank": {
|
||
if (event[0] === "enter") {
|
||
if (listItem3 && !atMarker && !containerBalance && !firstBlankLineIndex) {
|
||
firstBlankLineIndex = index2;
|
||
}
|
||
atMarker = void 0;
|
||
}
|
||
break;
|
||
}
|
||
case "linePrefix":
|
||
case "listItemValue":
|
||
case "listItemMarker":
|
||
case "listItemPrefix":
|
||
case "listItemPrefixWhitespace": {
|
||
break;
|
||
}
|
||
default: {
|
||
atMarker = void 0;
|
||
}
|
||
}
|
||
if (!containerBalance && event[0] === "enter" && event[1].type === "listItemPrefix" || containerBalance === -1 && event[0] === "exit" && (event[1].type === "listUnordered" || event[1].type === "listOrdered")) {
|
||
if (listItem3) {
|
||
let tailIndex = index2;
|
||
lineIndex = void 0;
|
||
while (tailIndex--) {
|
||
const tailEvent = events[tailIndex];
|
||
if (tailEvent[1].type === "lineEnding" || tailEvent[1].type === "lineEndingBlank") {
|
||
if (tailEvent[0] === "exit") continue;
|
||
if (lineIndex) {
|
||
events[lineIndex][1].type = "lineEndingBlank";
|
||
listSpread = true;
|
||
}
|
||
tailEvent[1].type = "lineEnding";
|
||
lineIndex = tailIndex;
|
||
} else if (tailEvent[1].type === "linePrefix" || tailEvent[1].type === "blockQuotePrefix" || tailEvent[1].type === "blockQuotePrefixWhitespace" || tailEvent[1].type === "blockQuoteMarker" || tailEvent[1].type === "listItemIndent") {
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (firstBlankLineIndex && (!lineIndex || firstBlankLineIndex < lineIndex)) {
|
||
listItem3._spread = true;
|
||
}
|
||
listItem3.end = Object.assign({}, lineIndex ? events[lineIndex][1].start : event[1].end);
|
||
events.splice(lineIndex || index2, 0, ["exit", listItem3, event[2]]);
|
||
index2++;
|
||
length++;
|
||
}
|
||
if (event[1].type === "listItemPrefix") {
|
||
const item = {
|
||
type: "listItem",
|
||
_spread: false,
|
||
start: Object.assign({}, event[1].start),
|
||
// @ts-expect-error: we’ll add `end` in a second.
|
||
end: void 0
|
||
};
|
||
listItem3 = item;
|
||
events.splice(index2, 0, ["enter", item, event[2]]);
|
||
index2++;
|
||
length++;
|
||
firstBlankLineIndex = void 0;
|
||
atMarker = true;
|
||
}
|
||
}
|
||
}
|
||
events[start][1]._spread = listSpread;
|
||
return length;
|
||
}
|
||
function opener(create, and) {
|
||
return open;
|
||
function open(token) {
|
||
enter.call(this, create(token), token);
|
||
if (and) and.call(this, token);
|
||
}
|
||
}
|
||
function buffer() {
|
||
this.stack.push({
|
||
type: "fragment",
|
||
children: []
|
||
});
|
||
}
|
||
function enter(node2, token, errorHandler) {
|
||
const parent = this.stack[this.stack.length - 1];
|
||
const siblings = parent.children;
|
||
siblings.push(node2);
|
||
this.stack.push(node2);
|
||
this.tokenStack.push([token, errorHandler || void 0]);
|
||
node2.position = {
|
||
start: point2(token.start),
|
||
// @ts-expect-error: `end` will be patched later.
|
||
end: void 0
|
||
};
|
||
}
|
||
function closer(and) {
|
||
return close;
|
||
function close(token) {
|
||
if (and) and.call(this, token);
|
||
exit2.call(this, token);
|
||
}
|
||
}
|
||
function exit2(token, onExitError) {
|
||
const node2 = this.stack.pop();
|
||
const open = this.tokenStack.pop();
|
||
if (!open) {
|
||
throw new Error("Cannot close `" + token.type + "` (" + stringifyPosition({
|
||
start: token.start,
|
||
end: token.end
|
||
}) + "): it\u2019s not open");
|
||
} else if (open[0].type !== token.type) {
|
||
if (onExitError) {
|
||
onExitError.call(this, token, open[0]);
|
||
} else {
|
||
const handler2 = open[1] || defaultOnError;
|
||
handler2.call(this, token, open[0]);
|
||
}
|
||
}
|
||
node2.position.end = point2(token.end);
|
||
}
|
||
function resume() {
|
||
return toString(this.stack.pop());
|
||
}
|
||
function onenterlistordered() {
|
||
this.data.expectingFirstListItemValue = true;
|
||
}
|
||
function onenterlistitemvalue(token) {
|
||
if (this.data.expectingFirstListItemValue) {
|
||
const ancestor = this.stack[this.stack.length - 2];
|
||
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10);
|
||
this.data.expectingFirstListItemValue = void 0;
|
||
}
|
||
}
|
||
function onexitcodefencedfenceinfo() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.lang = data2;
|
||
}
|
||
function onexitcodefencedfencemeta() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.meta = data2;
|
||
}
|
||
function onexitcodefencedfence() {
|
||
if (this.data.flowCodeInside) return;
|
||
this.buffer();
|
||
this.data.flowCodeInside = true;
|
||
}
|
||
function onexitcodefenced() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "");
|
||
this.data.flowCodeInside = void 0;
|
||
}
|
||
function onexitcodeindented() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2.replace(/(\r?\n|\r)$/g, "");
|
||
}
|
||
function onexitdefinitionlabelstring(token) {
|
||
const label = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.label = label;
|
||
node2.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase();
|
||
}
|
||
function onexitdefinitiontitlestring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.title = data2;
|
||
}
|
||
function onexitdefinitiondestinationstring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = data2;
|
||
}
|
||
function onexitatxheadingsequence(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (!node2.depth) {
|
||
const depth = this.sliceSerialize(token).length;
|
||
node2.depth = depth;
|
||
}
|
||
}
|
||
function onexitsetextheadingtext() {
|
||
this.data.setextHeadingSlurpLineEnding = true;
|
||
}
|
||
function onexitsetextheadinglinesequence(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.depth = this.sliceSerialize(token).codePointAt(0) === 61 ? 1 : 2;
|
||
}
|
||
function onexitsetextheading() {
|
||
this.data.setextHeadingSlurpLineEnding = void 0;
|
||
}
|
||
function onenterdata(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
const siblings = node2.children;
|
||
let tail = siblings[siblings.length - 1];
|
||
if (!tail || tail.type !== "text") {
|
||
tail = text4();
|
||
tail.position = {
|
||
start: point2(token.start),
|
||
// @ts-expect-error: we’ll add `end` later.
|
||
end: void 0
|
||
};
|
||
siblings.push(tail);
|
||
}
|
||
this.stack.push(tail);
|
||
}
|
||
function onexitdata(token) {
|
||
const tail = this.stack.pop();
|
||
tail.value += this.sliceSerialize(token);
|
||
tail.position.end = point2(token.end);
|
||
}
|
||
function onexitlineending(token) {
|
||
const context = this.stack[this.stack.length - 1];
|
||
if (this.data.atHardBreak) {
|
||
const tail = context.children[context.children.length - 1];
|
||
tail.position.end = point2(token.end);
|
||
this.data.atHardBreak = void 0;
|
||
return;
|
||
}
|
||
if (!this.data.setextHeadingSlurpLineEnding && config.canContainEols.includes(context.type)) {
|
||
onenterdata.call(this, token);
|
||
onexitdata.call(this, token);
|
||
}
|
||
}
|
||
function onexithardbreak() {
|
||
this.data.atHardBreak = true;
|
||
}
|
||
function onexithtmlflow() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexithtmltext() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexitcodetext() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexitlink() {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (this.data.inReference) {
|
||
const referenceType = this.data.referenceType || "shortcut";
|
||
node2.type += "Reference";
|
||
node2.referenceType = referenceType;
|
||
delete node2.url;
|
||
delete node2.title;
|
||
} else {
|
||
delete node2.identifier;
|
||
delete node2.label;
|
||
}
|
||
this.data.referenceType = void 0;
|
||
}
|
||
function onexitimage() {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (this.data.inReference) {
|
||
const referenceType = this.data.referenceType || "shortcut";
|
||
node2.type += "Reference";
|
||
node2.referenceType = referenceType;
|
||
delete node2.url;
|
||
delete node2.title;
|
||
} else {
|
||
delete node2.identifier;
|
||
delete node2.label;
|
||
}
|
||
this.data.referenceType = void 0;
|
||
}
|
||
function onexitlabeltext(token) {
|
||
const string3 = this.sliceSerialize(token);
|
||
const ancestor = this.stack[this.stack.length - 2];
|
||
ancestor.label = decodeString(string3);
|
||
ancestor.identifier = normalizeIdentifier(string3).toLowerCase();
|
||
}
|
||
function onexitlabel() {
|
||
const fragment = this.stack[this.stack.length - 1];
|
||
const value = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
this.data.inReference = true;
|
||
if (node2.type === "link") {
|
||
const children = fragment.children;
|
||
node2.children = children;
|
||
} else {
|
||
node2.alt = value;
|
||
}
|
||
}
|
||
function onexitresourcedestinationstring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = data2;
|
||
}
|
||
function onexitresourcetitlestring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.title = data2;
|
||
}
|
||
function onexitresource() {
|
||
this.data.inReference = void 0;
|
||
}
|
||
function onenterreference() {
|
||
this.data.referenceType = "collapsed";
|
||
}
|
||
function onexitreferencestring(token) {
|
||
const label = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.label = label;
|
||
node2.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase();
|
||
this.data.referenceType = "full";
|
||
}
|
||
function onexitcharacterreferencemarker(token) {
|
||
this.data.characterReferenceType = token.type;
|
||
}
|
||
function onexitcharacterreferencevalue(token) {
|
||
const data2 = this.sliceSerialize(token);
|
||
const type = this.data.characterReferenceType;
|
||
let value;
|
||
if (type) {
|
||
value = decodeNumericCharacterReference(data2, type === "characterReferenceMarkerNumeric" ? 10 : 16);
|
||
this.data.characterReferenceType = void 0;
|
||
} else {
|
||
const result = decodeNamedCharacterReference(data2);
|
||
value = result;
|
||
}
|
||
const tail = this.stack[this.stack.length - 1];
|
||
tail.value += value;
|
||
}
|
||
function onexitcharacterreference(token) {
|
||
const tail = this.stack.pop();
|
||
tail.position.end = point2(token.end);
|
||
}
|
||
function onexitautolinkprotocol(token) {
|
||
onexitdata.call(this, token);
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = this.sliceSerialize(token);
|
||
}
|
||
function onexitautolinkemail(token) {
|
||
onexitdata.call(this, token);
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = "mailto:" + this.sliceSerialize(token);
|
||
}
|
||
function blockQuote2() {
|
||
return {
|
||
type: "blockquote",
|
||
children: []
|
||
};
|
||
}
|
||
function codeFlow() {
|
||
return {
|
||
type: "code",
|
||
lang: null,
|
||
meta: null,
|
||
value: ""
|
||
};
|
||
}
|
||
function codeText2() {
|
||
return {
|
||
type: "inlineCode",
|
||
value: ""
|
||
};
|
||
}
|
||
function definition3() {
|
||
return {
|
||
type: "definition",
|
||
identifier: "",
|
||
label: null,
|
||
title: null,
|
||
url: ""
|
||
};
|
||
}
|
||
function emphasis2() {
|
||
return {
|
||
type: "emphasis",
|
||
children: []
|
||
};
|
||
}
|
||
function heading2() {
|
||
return {
|
||
type: "heading",
|
||
// @ts-expect-error `depth` will be set later.
|
||
depth: 0,
|
||
children: []
|
||
};
|
||
}
|
||
function hardBreak2() {
|
||
return {
|
||
type: "break"
|
||
};
|
||
}
|
||
function html2() {
|
||
return {
|
||
type: "html",
|
||
value: ""
|
||
};
|
||
}
|
||
function image2() {
|
||
return {
|
||
type: "image",
|
||
title: null,
|
||
url: "",
|
||
alt: null
|
||
};
|
||
}
|
||
function link2() {
|
||
return {
|
||
type: "link",
|
||
title: null,
|
||
url: "",
|
||
children: []
|
||
};
|
||
}
|
||
function list4(token) {
|
||
return {
|
||
type: "list",
|
||
ordered: token.type === "listOrdered",
|
||
start: null,
|
||
spread: token._spread,
|
||
children: []
|
||
};
|
||
}
|
||
function listItem2(token) {
|
||
return {
|
||
type: "listItem",
|
||
spread: token._spread,
|
||
checked: null,
|
||
children: []
|
||
};
|
||
}
|
||
function paragraph2() {
|
||
return {
|
||
type: "paragraph",
|
||
children: []
|
||
};
|
||
}
|
||
function strong2() {
|
||
return {
|
||
type: "strong",
|
||
children: []
|
||
};
|
||
}
|
||
function text4() {
|
||
return {
|
||
type: "text",
|
||
value: ""
|
||
};
|
||
}
|
||
function thematicBreak3() {
|
||
return {
|
||
type: "thematicBreak"
|
||
};
|
||
}
|
||
}
|
||
function point2(d) {
|
||
return {
|
||
line: d.line,
|
||
column: d.column,
|
||
offset: d.offset
|
||
};
|
||
}
|
||
function configure(combined, extensions) {
|
||
let index2 = -1;
|
||
while (++index2 < extensions.length) {
|
||
const value = extensions[index2];
|
||
if (Array.isArray(value)) {
|
||
configure(combined, value);
|
||
} else {
|
||
extension(combined, value);
|
||
}
|
||
}
|
||
}
|
||
function extension(combined, extension2) {
|
||
let key;
|
||
for (key in extension2) {
|
||
if (own.call(extension2, key)) {
|
||
switch (key) {
|
||
case "canContainEols": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
combined[key].push(...right);
|
||
}
|
||
break;
|
||
}
|
||
case "transforms": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
combined[key].push(...right);
|
||
}
|
||
break;
|
||
}
|
||
case "enter":
|
||
case "exit": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
Object.assign(combined[key], right);
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function defaultOnError(left, right) {
|
||
if (left) {
|
||
throw new Error("Cannot close `" + left.type + "` (" + stringifyPosition({
|
||
start: left.start,
|
||
end: left.end
|
||
}) + "): a different token (`" + right.type + "`, " + stringifyPosition({
|
||
start: right.start,
|
||
end: right.end
|
||
}) + ") is open");
|
||
} else {
|
||
throw new Error("Cannot close document, a token (`" + right.type + "`, " + stringifyPosition({
|
||
start: right.start,
|
||
end: right.end
|
||
}) + ") is still open");
|
||
}
|
||
}
|
||
|
||
// node_modules/remark-parse/lib/index.js
|
||
function remarkParse(options2) {
|
||
const self2 = this;
|
||
self2.parser = parser;
|
||
function parser(doc) {
|
||
return fromMarkdown(doc, __spreadProps(__spreadValues(__spreadValues({}, self2.data("settings")), options2), {
|
||
// Note: these options are not in the readme.
|
||
// The goal is for them to be set by plugins on `data` instead of being
|
||
// passed by users.
|
||
extensions: self2.data("micromarkExtensions") || [],
|
||
mdastExtensions: self2.data("fromMarkdownExtensions") || []
|
||
}));
|
||
}
|
||
}
|
||
|
||
// node_modules/zwitch/index.js
|
||
var own2 = {}.hasOwnProperty;
|
||
function zwitch(key, options2) {
|
||
const settings = options2 || {};
|
||
function one3(value, ...parameters) {
|
||
let fn = one3.invalid;
|
||
const handlers = one3.handlers;
|
||
if (value && own2.call(value, key)) {
|
||
const id = String(value[key]);
|
||
fn = own2.call(handlers, id) ? handlers[id] : one3.unknown;
|
||
}
|
||
if (fn) {
|
||
return fn.call(this, value, ...parameters);
|
||
}
|
||
}
|
||
one3.handlers = settings.handlers || {};
|
||
one3.invalid = settings.invalid;
|
||
one3.unknown = settings.unknown;
|
||
return one3;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/configure.js
|
||
var own3 = {}.hasOwnProperty;
|
||
function configure2(base, extension2) {
|
||
let index2 = -1;
|
||
let key;
|
||
if (extension2.extensions) {
|
||
while (++index2 < extension2.extensions.length) {
|
||
configure2(base, extension2.extensions[index2]);
|
||
}
|
||
}
|
||
for (key in extension2) {
|
||
if (own3.call(extension2, key)) {
|
||
switch (key) {
|
||
case "extensions": {
|
||
break;
|
||
}
|
||
/* c8 ignore next 4 */
|
||
case "unsafe": {
|
||
list2(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
case "join": {
|
||
list2(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
case "handlers": {
|
||
map(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
default: {
|
||
base.options[key] = extension2[key];
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return base;
|
||
}
|
||
function list2(left, right) {
|
||
if (right) {
|
||
left.push(...right);
|
||
}
|
||
}
|
||
function map(left, right) {
|
||
if (right) {
|
||
Object.assign(left, right);
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/blockquote.js
|
||
function blockquote(node2, _, state, info) {
|
||
const exit2 = state.enter("blockquote");
|
||
const tracker = state.createTracker(info);
|
||
tracker.move("> ");
|
||
tracker.shift(2);
|
||
const value = state.indentLines(
|
||
state.containerFlow(node2, tracker.current()),
|
||
map2
|
||
);
|
||
exit2();
|
||
return value;
|
||
}
|
||
function map2(line, _, blank) {
|
||
return ">" + (blank ? "" : " ") + line;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js
|
||
function patternInScope(stack, pattern) {
|
||
return listInScope(stack, pattern.inConstruct, true) && !listInScope(stack, pattern.notInConstruct, false);
|
||
}
|
||
function listInScope(stack, list4, none) {
|
||
if (typeof list4 === "string") {
|
||
list4 = [list4];
|
||
}
|
||
if (!list4 || list4.length === 0) {
|
||
return none;
|
||
}
|
||
let index2 = -1;
|
||
while (++index2 < list4.length) {
|
||
if (stack.includes(list4[index2])) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/break.js
|
||
function hardBreak(_, _1, state, info) {
|
||
let index2 = -1;
|
||
while (++index2 < state.unsafe.length) {
|
||
if (state.unsafe[index2].character === "\n" && patternInScope(state.stack, state.unsafe[index2])) {
|
||
return /[ \t]/.test(info.before) ? "" : " ";
|
||
}
|
||
}
|
||
return "\\\n";
|
||
}
|
||
|
||
// node_modules/longest-streak/index.js
|
||
function longestStreak(value, substring) {
|
||
const source = String(value);
|
||
let index2 = source.indexOf(substring);
|
||
let expected = index2;
|
||
let count = 0;
|
||
let max = 0;
|
||
if (typeof substring !== "string") {
|
||
throw new TypeError("Expected substring");
|
||
}
|
||
while (index2 !== -1) {
|
||
if (index2 === expected) {
|
||
if (++count > max) {
|
||
max = count;
|
||
}
|
||
} else {
|
||
count = 1;
|
||
}
|
||
expected = index2 + substring.length;
|
||
index2 = source.indexOf(substring, expected);
|
||
}
|
||
return max;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-code-as-indented.js
|
||
function formatCodeAsIndented(node2, state) {
|
||
return Boolean(
|
||
state.options.fences === false && node2.value && // If there’s no info…
|
||
!node2.lang && // And there’s a non-whitespace character…
|
||
/[^ \r\n]/.test(node2.value) && // And the value doesn’t start or end in a blank…
|
||
!/^[\t ]*(?:[\r\n]|$)|(?:^|[\r\n])[\t ]*$/.test(node2.value)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-fence.js
|
||
function checkFence(state) {
|
||
const marker = state.options.fence || "`";
|
||
if (marker !== "`" && marker !== "~") {
|
||
throw new Error(
|
||
"Cannot serialize code with `" + marker + "` for `options.fence`, expected `` ` `` or `~`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/code.js
|
||
function code(node2, _, state, info) {
|
||
const marker = checkFence(state);
|
||
const raw = node2.value || "";
|
||
const suffix = marker === "`" ? "GraveAccent" : "Tilde";
|
||
if (formatCodeAsIndented(node2, state)) {
|
||
const exit3 = state.enter("codeIndented");
|
||
const value2 = state.indentLines(raw, map3);
|
||
exit3();
|
||
return value2;
|
||
}
|
||
const tracker = state.createTracker(info);
|
||
const sequence = marker.repeat(Math.max(longestStreak(raw, marker) + 1, 3));
|
||
const exit2 = state.enter("codeFenced");
|
||
let value = tracker.move(sequence);
|
||
if (node2.lang) {
|
||
const subexit = state.enter(`codeFencedLang${suffix}`);
|
||
value += tracker.move(
|
||
state.safe(node2.lang, __spreadValues({
|
||
before: value,
|
||
after: " ",
|
||
encode: ["`"]
|
||
}, tracker.current()))
|
||
);
|
||
subexit();
|
||
}
|
||
if (node2.lang && node2.meta) {
|
||
const subexit = state.enter(`codeFencedMeta${suffix}`);
|
||
value += tracker.move(" ");
|
||
value += tracker.move(
|
||
state.safe(node2.meta, __spreadValues({
|
||
before: value,
|
||
after: "\n",
|
||
encode: ["`"]
|
||
}, tracker.current()))
|
||
);
|
||
subexit();
|
||
}
|
||
value += tracker.move("\n");
|
||
if (raw) {
|
||
value += tracker.move(raw + "\n");
|
||
}
|
||
value += tracker.move(sequence);
|
||
exit2();
|
||
return value;
|
||
}
|
||
function map3(line, _, blank) {
|
||
return (blank ? "" : " ") + line;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-quote.js
|
||
function checkQuote(state) {
|
||
const marker = state.options.quote || '"';
|
||
if (marker !== '"' && marker !== "'") {
|
||
throw new Error(
|
||
"Cannot serialize title with `" + marker + "` for `options.quote`, expected `\"`, or `'`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/definition.js
|
||
function definition2(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const exit2 = state.enter("definition");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("[");
|
||
value += tracker.move(
|
||
state.safe(state.associationId(node2), __spreadValues({
|
||
before: value,
|
||
after: "]"
|
||
}, tracker.current()))
|
||
);
|
||
value += tracker.move("]: ");
|
||
subexit();
|
||
if (
|
||
// If there’s no url, or…
|
||
!node2.url || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value += tracker.move("<");
|
||
value += tracker.move(
|
||
state.safe(node2.url, __spreadValues({ before: value, after: ">" }, tracker.current()))
|
||
);
|
||
value += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value += tracker.move(
|
||
state.safe(node2.url, __spreadValues({
|
||
before: value,
|
||
after: node2.title ? " " : "\n"
|
||
}, tracker.current()))
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value += tracker.move(" " + quote);
|
||
value += tracker.move(
|
||
state.safe(node2.title, __spreadValues({
|
||
before: value,
|
||
after: quote
|
||
}, tracker.current()))
|
||
);
|
||
value += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-emphasis.js
|
||
function checkEmphasis(state) {
|
||
const marker = state.options.emphasis || "*";
|
||
if (marker !== "*" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize emphasis with `" + marker + "` for `options.emphasis`, expected `*`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/encode-character-reference.js
|
||
function encodeCharacterReference(code2) {
|
||
return "&#x" + code2.toString(16).toUpperCase() + ";";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/encode-info.js
|
||
function encodeInfo(outside, inside, marker) {
|
||
const outsideKind = classifyCharacter(outside);
|
||
const insideKind = classifyCharacter(inside);
|
||
if (outsideKind === void 0) {
|
||
return insideKind === void 0 ? (
|
||
// Letter inside:
|
||
// we have to encode *both* letters for `_` as it is looser.
|
||
// it already forms for `*` (and GFMs `~`).
|
||
marker === "_" ? { inside: true, outside: true } : { inside: false, outside: false }
|
||
) : insideKind === 1 ? (
|
||
// Whitespace inside: encode both (letter, whitespace).
|
||
{ inside: true, outside: true }
|
||
) : (
|
||
// Punctuation inside: encode outer (letter)
|
||
{ inside: false, outside: true }
|
||
);
|
||
}
|
||
if (outsideKind === 1) {
|
||
return insideKind === void 0 ? (
|
||
// Letter inside: already forms.
|
||
{ inside: false, outside: false }
|
||
) : insideKind === 1 ? (
|
||
// Whitespace inside: encode both (whitespace).
|
||
{ inside: true, outside: true }
|
||
) : (
|
||
// Punctuation inside: already forms.
|
||
{ inside: false, outside: false }
|
||
);
|
||
}
|
||
return insideKind === void 0 ? (
|
||
// Letter inside: already forms.
|
||
{ inside: false, outside: false }
|
||
) : insideKind === 1 ? (
|
||
// Whitespace inside: encode inner (whitespace).
|
||
{ inside: true, outside: false }
|
||
) : (
|
||
// Punctuation inside: already forms.
|
||
{ inside: false, outside: false }
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/emphasis.js
|
||
emphasis.peek = emphasisPeek;
|
||
function emphasis(node2, _, state, info) {
|
||
const marker = checkEmphasis(state);
|
||
const exit2 = state.enter("emphasis");
|
||
const tracker = state.createTracker(info);
|
||
const before = tracker.move(marker);
|
||
let between2 = tracker.move(
|
||
state.containerPhrasing(node2, __spreadValues({
|
||
after: marker,
|
||
before
|
||
}, tracker.current()))
|
||
);
|
||
const betweenHead = between2.charCodeAt(0);
|
||
const open = encodeInfo(
|
||
info.before.charCodeAt(info.before.length - 1),
|
||
betweenHead,
|
||
marker
|
||
);
|
||
if (open.inside) {
|
||
between2 = encodeCharacterReference(betweenHead) + between2.slice(1);
|
||
}
|
||
const betweenTail = between2.charCodeAt(between2.length - 1);
|
||
const close = encodeInfo(info.after.charCodeAt(0), betweenTail, marker);
|
||
if (close.inside) {
|
||
between2 = between2.slice(0, -1) + encodeCharacterReference(betweenTail);
|
||
}
|
||
const after = tracker.move(marker);
|
||
exit2();
|
||
state.attentionEncodeSurroundingInfo = {
|
||
after: close.outside,
|
||
before: open.outside
|
||
};
|
||
return before + between2 + after;
|
||
}
|
||
function emphasisPeek(_, _1, state) {
|
||
return state.options.emphasis || "*";
|
||
}
|
||
|
||
// node_modules/unist-util-is/lib/index.js
|
||
var convert = (
|
||
// Note: overloads in JSDoc can’t yet use different `@template`s.
|
||
/**
|
||
* @type {(
|
||
* (<Condition extends string>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & {type: Condition}) &
|
||
* (<Condition extends Props>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Condition) &
|
||
* (<Condition extends TestFunction>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Predicate<Condition, Node>) &
|
||
* ((test?: null | undefined) => (node?: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node) &
|
||
* ((test?: Test) => Check)
|
||
* )}
|
||
*/
|
||
/**
|
||
* @param {Test} [test]
|
||
* @returns {Check}
|
||
*/
|
||
function(test) {
|
||
if (test === null || test === void 0) {
|
||
return ok;
|
||
}
|
||
if (typeof test === "function") {
|
||
return castFactory(test);
|
||
}
|
||
if (typeof test === "object") {
|
||
return Array.isArray(test) ? anyFactory(test) : propsFactory(test);
|
||
}
|
||
if (typeof test === "string") {
|
||
return typeFactory(test);
|
||
}
|
||
throw new Error("Expected function, string, or object as test");
|
||
}
|
||
);
|
||
function anyFactory(tests) {
|
||
const checks = [];
|
||
let index2 = -1;
|
||
while (++index2 < tests.length) {
|
||
checks[index2] = convert(tests[index2]);
|
||
}
|
||
return castFactory(any);
|
||
function any(...parameters) {
|
||
let index3 = -1;
|
||
while (++index3 < checks.length) {
|
||
if (checks[index3].apply(this, parameters)) return true;
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
function propsFactory(check) {
|
||
const checkAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
check
|
||
);
|
||
return castFactory(all3);
|
||
function all3(node2) {
|
||
const nodeAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
/** @type {unknown} */
|
||
node2
|
||
);
|
||
let key;
|
||
for (key in check) {
|
||
if (nodeAsRecord[key] !== checkAsRecord[key]) return false;
|
||
}
|
||
return true;
|
||
}
|
||
}
|
||
function typeFactory(check) {
|
||
return castFactory(type);
|
||
function type(node2) {
|
||
return node2 && node2.type === check;
|
||
}
|
||
}
|
||
function castFactory(testFunction) {
|
||
return check;
|
||
function check(value, index2, parent) {
|
||
return Boolean(
|
||
looksLikeANode(value) && testFunction.call(
|
||
this,
|
||
value,
|
||
typeof index2 === "number" ? index2 : void 0,
|
||
parent || void 0
|
||
)
|
||
);
|
||
}
|
||
}
|
||
function ok() {
|
||
return true;
|
||
}
|
||
function looksLikeANode(value) {
|
||
return value !== null && typeof value === "object" && "type" in value;
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/color.js
|
||
function color(d) {
|
||
return d;
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/index.js
|
||
var empty = [];
|
||
var CONTINUE = true;
|
||
var EXIT = false;
|
||
var SKIP = "skip";
|
||
function visitParents(tree, test, visitor, reverse) {
|
||
let check;
|
||
if (typeof test === "function" && typeof visitor !== "function") {
|
||
reverse = visitor;
|
||
visitor = test;
|
||
} else {
|
||
check = test;
|
||
}
|
||
const is2 = convert(check);
|
||
const step = reverse ? -1 : 1;
|
||
factory(tree, void 0, [])();
|
||
function factory(node2, index2, parents) {
|
||
const value = (
|
||
/** @type {Record<string, unknown>} */
|
||
node2 && typeof node2 === "object" ? node2 : {}
|
||
);
|
||
if (typeof value.type === "string") {
|
||
const name = (
|
||
// `hast`
|
||
typeof value.tagName === "string" ? value.tagName : (
|
||
// `xast`
|
||
typeof value.name === "string" ? value.name : void 0
|
||
)
|
||
);
|
||
Object.defineProperty(visit2, "name", {
|
||
value: "node (" + color(node2.type + (name ? "<" + name + ">" : "")) + ")"
|
||
});
|
||
}
|
||
return visit2;
|
||
function visit2() {
|
||
let result = empty;
|
||
let subresult;
|
||
let offset;
|
||
let grandparents;
|
||
if (!test || is2(node2, index2, parents[parents.length - 1] || void 0)) {
|
||
result = toResult(visitor(node2, parents));
|
||
if (result[0] === EXIT) {
|
||
return result;
|
||
}
|
||
}
|
||
if ("children" in node2 && node2.children) {
|
||
const nodeAsParent = (
|
||
/** @type {UnistParent} */
|
||
node2
|
||
);
|
||
if (nodeAsParent.children && result[0] !== SKIP) {
|
||
offset = (reverse ? nodeAsParent.children.length : -1) + step;
|
||
grandparents = parents.concat(nodeAsParent);
|
||
while (offset > -1 && offset < nodeAsParent.children.length) {
|
||
const child = nodeAsParent.children[offset];
|
||
subresult = factory(child, offset, grandparents)();
|
||
if (subresult[0] === EXIT) {
|
||
return subresult;
|
||
}
|
||
offset = typeof subresult[1] === "number" ? subresult[1] : offset + step;
|
||
}
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
}
|
||
}
|
||
function toResult(value) {
|
||
if (Array.isArray(value)) {
|
||
return value;
|
||
}
|
||
if (typeof value === "number") {
|
||
return [CONTINUE, value];
|
||
}
|
||
return value === null || value === void 0 ? empty : [value];
|
||
}
|
||
|
||
// node_modules/unist-util-visit/lib/index.js
|
||
function visit(tree, testOrVisitor, visitorOrReverse, maybeReverse) {
|
||
let reverse;
|
||
let test;
|
||
let visitor;
|
||
if (typeof testOrVisitor === "function" && typeof visitorOrReverse !== "function") {
|
||
test = void 0;
|
||
visitor = testOrVisitor;
|
||
reverse = visitorOrReverse;
|
||
} else {
|
||
test = testOrVisitor;
|
||
visitor = visitorOrReverse;
|
||
reverse = maybeReverse;
|
||
}
|
||
visitParents(tree, test, overload, reverse);
|
||
function overload(node2, parents) {
|
||
const parent = parents[parents.length - 1];
|
||
const index2 = parent ? parent.children.indexOf(node2) : void 0;
|
||
return visitor(node2, index2, parent);
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-heading-as-setext.js
|
||
function formatHeadingAsSetext(node2, state) {
|
||
let literalWithBreak = false;
|
||
visit(node2, function(node3) {
|
||
if ("value" in node3 && /\r?\n|\r/.test(node3.value) || node3.type === "break") {
|
||
literalWithBreak = true;
|
||
return EXIT;
|
||
}
|
||
});
|
||
return Boolean(
|
||
(!node2.depth || node2.depth < 3) && toString(node2) && (state.options.setext || literalWithBreak)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/heading.js
|
||
function heading(node2, _, state, info) {
|
||
const rank = Math.max(Math.min(6, node2.depth || 1), 1);
|
||
const tracker = state.createTracker(info);
|
||
if (formatHeadingAsSetext(node2, state)) {
|
||
const exit3 = state.enter("headingSetext");
|
||
const subexit2 = state.enter("phrasing");
|
||
const value2 = state.containerPhrasing(node2, __spreadProps(__spreadValues({}, tracker.current()), {
|
||
before: "\n",
|
||
after: "\n"
|
||
}));
|
||
subexit2();
|
||
exit3();
|
||
return value2 + "\n" + (rank === 1 ? "=" : "-").repeat(
|
||
// The whole size…
|
||
value2.length - // Minus the position of the character after the last EOL (or
|
||
// 0 if there is none)…
|
||
(Math.max(value2.lastIndexOf("\r"), value2.lastIndexOf("\n")) + 1)
|
||
);
|
||
}
|
||
const sequence = "#".repeat(rank);
|
||
const exit2 = state.enter("headingAtx");
|
||
const subexit = state.enter("phrasing");
|
||
tracker.move(sequence + " ");
|
||
let value = state.containerPhrasing(node2, __spreadValues({
|
||
before: "# ",
|
||
after: "\n"
|
||
}, tracker.current()));
|
||
if (/^[\t ]/.test(value)) {
|
||
value = encodeCharacterReference(value.charCodeAt(0)) + value.slice(1);
|
||
}
|
||
value = value ? sequence + " " + value : sequence;
|
||
if (state.options.closeAtx) {
|
||
value += " " + sequence;
|
||
}
|
||
subexit();
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/html.js
|
||
html.peek = htmlPeek;
|
||
function html(node2) {
|
||
return node2.value || "";
|
||
}
|
||
function htmlPeek() {
|
||
return "<";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/image.js
|
||
image.peek = imagePeek;
|
||
function image(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const exit2 = state.enter("image");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("![");
|
||
value += tracker.move(
|
||
state.safe(node2.alt, __spreadValues({ before: value, after: "]" }, tracker.current()))
|
||
);
|
||
value += tracker.move("](");
|
||
subexit();
|
||
if (
|
||
// If there’s no url but there is a title…
|
||
!node2.url && node2.title || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value += tracker.move("<");
|
||
value += tracker.move(
|
||
state.safe(node2.url, __spreadValues({ before: value, after: ">" }, tracker.current()))
|
||
);
|
||
value += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value += tracker.move(
|
||
state.safe(node2.url, __spreadValues({
|
||
before: value,
|
||
after: node2.title ? " " : ")"
|
||
}, tracker.current()))
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value += tracker.move(" " + quote);
|
||
value += tracker.move(
|
||
state.safe(node2.title, __spreadValues({
|
||
before: value,
|
||
after: quote
|
||
}, tracker.current()))
|
||
);
|
||
value += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
value += tracker.move(")");
|
||
exit2();
|
||
return value;
|
||
}
|
||
function imagePeek() {
|
||
return "!";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/image-reference.js
|
||
imageReference.peek = imageReferencePeek;
|
||
function imageReference(node2, _, state, info) {
|
||
const type = node2.referenceType;
|
||
const exit2 = state.enter("imageReference");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("![");
|
||
const alt = state.safe(node2.alt, __spreadValues({
|
||
before: value,
|
||
after: "]"
|
||
}, tracker.current()));
|
||
value += tracker.move(alt + "][");
|
||
subexit();
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
subexit = state.enter("reference");
|
||
const reference = state.safe(state.associationId(node2), __spreadValues({
|
||
before: value,
|
||
after: "]"
|
||
}, tracker.current()));
|
||
subexit();
|
||
state.stack = stack;
|
||
exit2();
|
||
if (type === "full" || !alt || alt !== reference) {
|
||
value += tracker.move(reference + "]");
|
||
} else if (type === "shortcut") {
|
||
value = value.slice(0, -1);
|
||
} else {
|
||
value += tracker.move("]");
|
||
}
|
||
return value;
|
||
}
|
||
function imageReferencePeek() {
|
||
return "!";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/inline-code.js
|
||
inlineCode.peek = inlineCodePeek;
|
||
function inlineCode(node2, _, state) {
|
||
let value = node2.value || "";
|
||
let sequence = "`";
|
||
let index2 = -1;
|
||
while (new RegExp("(^|[^`])" + sequence + "([^`]|$)").test(value)) {
|
||
sequence += "`";
|
||
}
|
||
if (/[^ \r\n]/.test(value) && (/^[ \r\n]/.test(value) && /[ \r\n]$/.test(value) || /^`|`$/.test(value))) {
|
||
value = " " + value + " ";
|
||
}
|
||
while (++index2 < state.unsafe.length) {
|
||
const pattern = state.unsafe[index2];
|
||
const expression = state.compilePattern(pattern);
|
||
let match;
|
||
if (!pattern.atBreak) continue;
|
||
while (match = expression.exec(value)) {
|
||
let position2 = match.index;
|
||
if (value.charCodeAt(position2) === 10 && value.charCodeAt(position2 - 1) === 13) {
|
||
position2--;
|
||
}
|
||
value = value.slice(0, position2) + " " + value.slice(match.index + 1);
|
||
}
|
||
}
|
||
return sequence + value + sequence;
|
||
}
|
||
function inlineCodePeek() {
|
||
return "`";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-link-as-autolink.js
|
||
function formatLinkAsAutolink(node2, state) {
|
||
const raw = toString(node2);
|
||
return Boolean(
|
||
!state.options.resourceLink && // If there’s a url…
|
||
node2.url && // And there’s a no title…
|
||
!node2.title && // And the content of `node` is a single text node…
|
||
node2.children && node2.children.length === 1 && node2.children[0].type === "text" && // And if the url is the same as the content…
|
||
(raw === node2.url || "mailto:" + raw === node2.url) && // And that starts w/ a protocol…
|
||
/^[a-z][a-z+.-]+:/i.test(node2.url) && // And that doesn’t contain ASCII control codes (character escapes and
|
||
// references don’t work), space, or angle brackets…
|
||
!/[\0- <>\u007F]/.test(node2.url)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/link.js
|
||
link.peek = linkPeek;
|
||
function link(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const tracker = state.createTracker(info);
|
||
let exit2;
|
||
let subexit;
|
||
if (formatLinkAsAutolink(node2, state)) {
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
exit2 = state.enter("autolink");
|
||
let value2 = tracker.move("<");
|
||
value2 += tracker.move(
|
||
state.containerPhrasing(node2, __spreadValues({
|
||
before: value2,
|
||
after: ">"
|
||
}, tracker.current()))
|
||
);
|
||
value2 += tracker.move(">");
|
||
exit2();
|
||
state.stack = stack;
|
||
return value2;
|
||
}
|
||
exit2 = state.enter("link");
|
||
subexit = state.enter("label");
|
||
let value = tracker.move("[");
|
||
value += tracker.move(
|
||
state.containerPhrasing(node2, __spreadValues({
|
||
before: value,
|
||
after: "]("
|
||
}, tracker.current()))
|
||
);
|
||
value += tracker.move("](");
|
||
subexit();
|
||
if (
|
||
// If there’s no url but there is a title…
|
||
!node2.url && node2.title || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value += tracker.move("<");
|
||
value += tracker.move(
|
||
state.safe(node2.url, __spreadValues({ before: value, after: ">" }, tracker.current()))
|
||
);
|
||
value += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value += tracker.move(
|
||
state.safe(node2.url, __spreadValues({
|
||
before: value,
|
||
after: node2.title ? " " : ")"
|
||
}, tracker.current()))
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value += tracker.move(" " + quote);
|
||
value += tracker.move(
|
||
state.safe(node2.title, __spreadValues({
|
||
before: value,
|
||
after: quote
|
||
}, tracker.current()))
|
||
);
|
||
value += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
value += tracker.move(")");
|
||
exit2();
|
||
return value;
|
||
}
|
||
function linkPeek(node2, _, state) {
|
||
return formatLinkAsAutolink(node2, state) ? "<" : "[";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/link-reference.js
|
||
linkReference.peek = linkReferencePeek;
|
||
function linkReference(node2, _, state, info) {
|
||
const type = node2.referenceType;
|
||
const exit2 = state.enter("linkReference");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value = tracker.move("[");
|
||
const text4 = state.containerPhrasing(node2, __spreadValues({
|
||
before: value,
|
||
after: "]"
|
||
}, tracker.current()));
|
||
value += tracker.move(text4 + "][");
|
||
subexit();
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
subexit = state.enter("reference");
|
||
const reference = state.safe(state.associationId(node2), __spreadValues({
|
||
before: value,
|
||
after: "]"
|
||
}, tracker.current()));
|
||
subexit();
|
||
state.stack = stack;
|
||
exit2();
|
||
if (type === "full" || !text4 || text4 !== reference) {
|
||
value += tracker.move(reference + "]");
|
||
} else if (type === "shortcut") {
|
||
value = value.slice(0, -1);
|
||
} else {
|
||
value += tracker.move("]");
|
||
}
|
||
return value;
|
||
}
|
||
function linkReferencePeek() {
|
||
return "[";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet.js
|
||
function checkBullet(state) {
|
||
const marker = state.options.bullet || "*";
|
||
if (marker !== "*" && marker !== "+" && marker !== "-") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + marker + "` for `options.bullet`, expected `*`, `+`, or `-`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet-other.js
|
||
function checkBulletOther(state) {
|
||
const bullet = checkBullet(state);
|
||
const bulletOther = state.options.bulletOther;
|
||
if (!bulletOther) {
|
||
return bullet === "*" ? "-" : "*";
|
||
}
|
||
if (bulletOther !== "*" && bulletOther !== "+" && bulletOther !== "-") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + bulletOther + "` for `options.bulletOther`, expected `*`, `+`, or `-`"
|
||
);
|
||
}
|
||
if (bulletOther === bullet) {
|
||
throw new Error(
|
||
"Expected `bullet` (`" + bullet + "`) and `bulletOther` (`" + bulletOther + "`) to be different"
|
||
);
|
||
}
|
||
return bulletOther;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet-ordered.js
|
||
function checkBulletOrdered(state) {
|
||
const marker = state.options.bulletOrdered || ".";
|
||
if (marker !== "." && marker !== ")") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + marker + "` for `options.bulletOrdered`, expected `.` or `)`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-rule.js
|
||
function checkRule(state) {
|
||
const marker = state.options.rule || "*";
|
||
if (marker !== "*" && marker !== "-" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize rules with `" + marker + "` for `options.rule`, expected `*`, `-`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/list.js
|
||
function list3(node2, parent, state, info) {
|
||
const exit2 = state.enter("list");
|
||
const bulletCurrent = state.bulletCurrent;
|
||
let bullet = node2.ordered ? checkBulletOrdered(state) : checkBullet(state);
|
||
const bulletOther = node2.ordered ? bullet === "." ? ")" : "." : checkBulletOther(state);
|
||
let useDifferentMarker = parent && state.bulletLastUsed ? bullet === state.bulletLastUsed : false;
|
||
if (!node2.ordered) {
|
||
const firstListItem = node2.children ? node2.children[0] : void 0;
|
||
if (
|
||
// Bullet could be used as a thematic break marker:
|
||
(bullet === "*" || bullet === "-") && // Empty first list item:
|
||
firstListItem && (!firstListItem.children || !firstListItem.children[0]) && // Directly in two other list items:
|
||
state.stack[state.stack.length - 1] === "list" && state.stack[state.stack.length - 2] === "listItem" && state.stack[state.stack.length - 3] === "list" && state.stack[state.stack.length - 4] === "listItem" && // That are each the first child.
|
||
state.indexStack[state.indexStack.length - 1] === 0 && state.indexStack[state.indexStack.length - 2] === 0 && state.indexStack[state.indexStack.length - 3] === 0
|
||
) {
|
||
useDifferentMarker = true;
|
||
}
|
||
if (checkRule(state) === bullet && firstListItem) {
|
||
let index2 = -1;
|
||
while (++index2 < node2.children.length) {
|
||
const item = node2.children[index2];
|
||
if (item && item.type === "listItem" && item.children && item.children[0] && item.children[0].type === "thematicBreak") {
|
||
useDifferentMarker = true;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (useDifferentMarker) {
|
||
bullet = bulletOther;
|
||
}
|
||
state.bulletCurrent = bullet;
|
||
const value = state.containerFlow(node2, info);
|
||
state.bulletLastUsed = bullet;
|
||
state.bulletCurrent = bulletCurrent;
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js
|
||
function checkListItemIndent(state) {
|
||
const style = state.options.listItemIndent || "one";
|
||
if (style !== "tab" && style !== "one" && style !== "mixed") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + style + "` for `options.listItemIndent`, expected `tab`, `one`, or `mixed`"
|
||
);
|
||
}
|
||
return style;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/list-item.js
|
||
function listItem(node2, parent, state, info) {
|
||
const listItemIndent = checkListItemIndent(state);
|
||
let bullet = state.bulletCurrent || checkBullet(state);
|
||
if (parent && parent.type === "list" && parent.ordered) {
|
||
bullet = (typeof parent.start === "number" && parent.start > -1 ? parent.start : 1) + (state.options.incrementListMarker === false ? 0 : parent.children.indexOf(node2)) + bullet;
|
||
}
|
||
let size = bullet.length + 1;
|
||
if (listItemIndent === "tab" || listItemIndent === "mixed" && (parent && parent.type === "list" && parent.spread || node2.spread)) {
|
||
size = Math.ceil(size / 4) * 4;
|
||
}
|
||
const tracker = state.createTracker(info);
|
||
tracker.move(bullet + " ".repeat(size - bullet.length));
|
||
tracker.shift(size);
|
||
const exit2 = state.enter("listItem");
|
||
const value = state.indentLines(
|
||
state.containerFlow(node2, tracker.current()),
|
||
map4
|
||
);
|
||
exit2();
|
||
return value;
|
||
function map4(line, index2, blank) {
|
||
if (index2) {
|
||
return (blank ? "" : " ".repeat(size)) + line;
|
||
}
|
||
return (blank ? bullet : bullet + " ".repeat(size - bullet.length)) + line;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/paragraph.js
|
||
function paragraph(node2, _, state, info) {
|
||
const exit2 = state.enter("paragraph");
|
||
const subexit = state.enter("phrasing");
|
||
const value = state.containerPhrasing(node2, info);
|
||
subexit();
|
||
exit2();
|
||
return value;
|
||
}
|
||
|
||
// node_modules/mdast-util-phrasing/lib/index.js
|
||
var phrasing = (
|
||
/** @type {(node?: unknown) => node is Exclude<PhrasingContent, Html>} */
|
||
convert([
|
||
"break",
|
||
"delete",
|
||
"emphasis",
|
||
// To do: next major: removed since footnotes were added to GFM.
|
||
"footnote",
|
||
"footnoteReference",
|
||
"image",
|
||
"imageReference",
|
||
"inlineCode",
|
||
// Enabled by `mdast-util-math`:
|
||
"inlineMath",
|
||
"link",
|
||
"linkReference",
|
||
// Enabled by `mdast-util-mdx`:
|
||
"mdxJsxTextElement",
|
||
// Enabled by `mdast-util-mdx`:
|
||
"mdxTextExpression",
|
||
"strong",
|
||
"text",
|
||
// Enabled by `mdast-util-directive`:
|
||
"textDirective"
|
||
])
|
||
);
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/root.js
|
||
function root(node2, _, state, info) {
|
||
const hasPhrasing = node2.children.some(function(d) {
|
||
return phrasing(d);
|
||
});
|
||
const container = hasPhrasing ? state.containerPhrasing : state.containerFlow;
|
||
return container.call(state, node2, info);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-strong.js
|
||
function checkStrong(state) {
|
||
const marker = state.options.strong || "*";
|
||
if (marker !== "*" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize strong with `" + marker + "` for `options.strong`, expected `*`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/strong.js
|
||
strong.peek = strongPeek;
|
||
function strong(node2, _, state, info) {
|
||
const marker = checkStrong(state);
|
||
const exit2 = state.enter("strong");
|
||
const tracker = state.createTracker(info);
|
||
const before = tracker.move(marker + marker);
|
||
let between2 = tracker.move(
|
||
state.containerPhrasing(node2, __spreadValues({
|
||
after: marker,
|
||
before
|
||
}, tracker.current()))
|
||
);
|
||
const betweenHead = between2.charCodeAt(0);
|
||
const open = encodeInfo(
|
||
info.before.charCodeAt(info.before.length - 1),
|
||
betweenHead,
|
||
marker
|
||
);
|
||
if (open.inside) {
|
||
between2 = encodeCharacterReference(betweenHead) + between2.slice(1);
|
||
}
|
||
const betweenTail = between2.charCodeAt(between2.length - 1);
|
||
const close = encodeInfo(info.after.charCodeAt(0), betweenTail, marker);
|
||
if (close.inside) {
|
||
between2 = between2.slice(0, -1) + encodeCharacterReference(betweenTail);
|
||
}
|
||
const after = tracker.move(marker + marker);
|
||
exit2();
|
||
state.attentionEncodeSurroundingInfo = {
|
||
after: close.outside,
|
||
before: open.outside
|
||
};
|
||
return before + between2 + after;
|
||
}
|
||
function strongPeek(_, _1, state) {
|
||
return state.options.strong || "*";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/text.js
|
||
function text3(node2, _, state, info) {
|
||
return state.safe(node2.value, info);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-rule-repetition.js
|
||
function checkRuleRepetition(state) {
|
||
const repetition = state.options.ruleRepetition || 3;
|
||
if (repetition < 3) {
|
||
throw new Error(
|
||
"Cannot serialize rules with repetition `" + repetition + "` for `options.ruleRepetition`, expected `3` or more"
|
||
);
|
||
}
|
||
return repetition;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/thematic-break.js
|
||
function thematicBreak2(_, _1, state) {
|
||
const value = (checkRule(state) + (state.options.ruleSpaces ? " " : "")).repeat(checkRuleRepetition(state));
|
||
return state.options.ruleSpaces ? value.slice(0, -1) : value;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/index.js
|
||
var handle = {
|
||
blockquote,
|
||
break: hardBreak,
|
||
code,
|
||
definition: definition2,
|
||
emphasis,
|
||
hardBreak,
|
||
heading,
|
||
html,
|
||
image,
|
||
imageReference,
|
||
inlineCode,
|
||
link,
|
||
linkReference,
|
||
list: list3,
|
||
listItem,
|
||
paragraph,
|
||
root,
|
||
strong,
|
||
text: text3,
|
||
thematicBreak: thematicBreak2
|
||
};
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/join.js
|
||
var join = [joinDefaults];
|
||
function joinDefaults(left, right, parent, state) {
|
||
if (right.type === "code" && formatCodeAsIndented(right, state) && (left.type === "list" || left.type === right.type && formatCodeAsIndented(left, state))) {
|
||
return false;
|
||
}
|
||
if ("spread" in parent && typeof parent.spread === "boolean") {
|
||
if (left.type === "paragraph" && // Two paragraphs.
|
||
(left.type === right.type || right.type === "definition" || // Paragraph followed by a setext heading.
|
||
right.type === "heading" && formatHeadingAsSetext(right, state))) {
|
||
return;
|
||
}
|
||
return parent.spread ? 1 : 0;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/unsafe.js
|
||
var fullPhrasingSpans = [
|
||
"autolink",
|
||
"destinationLiteral",
|
||
"destinationRaw",
|
||
"reference",
|
||
"titleQuote",
|
||
"titleApostrophe"
|
||
];
|
||
var unsafe = [
|
||
{ character: " ", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{ character: " ", before: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{
|
||
character: " ",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedLangTilde"]
|
||
},
|
||
{
|
||
character: "\r",
|
||
inConstruct: [
|
||
"codeFencedLangGraveAccent",
|
||
"codeFencedLangTilde",
|
||
"codeFencedMetaGraveAccent",
|
||
"codeFencedMetaTilde",
|
||
"destinationLiteral",
|
||
"headingAtx"
|
||
]
|
||
},
|
||
{
|
||
character: "\n",
|
||
inConstruct: [
|
||
"codeFencedLangGraveAccent",
|
||
"codeFencedLangTilde",
|
||
"codeFencedMetaGraveAccent",
|
||
"codeFencedMetaTilde",
|
||
"destinationLiteral",
|
||
"headingAtx"
|
||
]
|
||
},
|
||
{ character: " ", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{ character: " ", before: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{
|
||
character: " ",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedLangTilde"]
|
||
},
|
||
// An exclamation mark can start an image, if it is followed by a link or
|
||
// a link reference.
|
||
{
|
||
character: "!",
|
||
after: "\\[",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
// A quote can break out of a title.
|
||
{ character: '"', inConstruct: "titleQuote" },
|
||
// A number sign could start an ATX heading if it starts a line.
|
||
{ atBreak: true, character: "#" },
|
||
{ character: "#", inConstruct: "headingAtx", after: "(?:[\r\n]|$)" },
|
||
// Dollar sign and percentage are not used in markdown.
|
||
// An ampersand could start a character reference.
|
||
{ character: "&", after: "[#A-Za-z]", inConstruct: "phrasing" },
|
||
// An apostrophe can break out of a title.
|
||
{ character: "'", inConstruct: "titleApostrophe" },
|
||
// A left paren could break out of a destination raw.
|
||
{ character: "(", inConstruct: "destinationRaw" },
|
||
// A left paren followed by `]` could make something into a link or image.
|
||
{
|
||
before: "\\]",
|
||
character: "(",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
// A right paren could start a list item or break out of a destination
|
||
// raw.
|
||
{ atBreak: true, before: "\\d+", character: ")" },
|
||
{ character: ")", inConstruct: "destinationRaw" },
|
||
// An asterisk can start thematic breaks, list items, emphasis, strong.
|
||
{ atBreak: true, character: "*", after: "(?:[ \r\n*])" },
|
||
{ character: "*", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// A plus sign could start a list item.
|
||
{ atBreak: true, character: "+", after: "(?:[ \r\n])" },
|
||
// A dash can start thematic breaks, list items, and setext heading
|
||
// underlines.
|
||
{ atBreak: true, character: "-", after: "(?:[ \r\n-])" },
|
||
// A dot could start a list item.
|
||
{ atBreak: true, before: "\\d+", character: ".", after: "(?:[ \r\n]|$)" },
|
||
// Slash, colon, and semicolon are not used in markdown for constructs.
|
||
// A less than can start html (flow or text) or an autolink.
|
||
// HTML could start with an exclamation mark (declaration, cdata, comment),
|
||
// slash (closing tag), question mark (instruction), or a letter (tag).
|
||
// An autolink also starts with a letter.
|
||
// Finally, it could break out of a destination literal.
|
||
{ atBreak: true, character: "<", after: "[!/?A-Za-z]" },
|
||
{
|
||
character: "<",
|
||
after: "[!/?A-Za-z]",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
{ character: "<", inConstruct: "destinationLiteral" },
|
||
// An equals to can start setext heading underlines.
|
||
{ atBreak: true, character: "=" },
|
||
// A greater than can start block quotes and it can break out of a
|
||
// destination literal.
|
||
{ atBreak: true, character: ">" },
|
||
{ character: ">", inConstruct: "destinationLiteral" },
|
||
// Question mark and at sign are not used in markdown for constructs.
|
||
// A left bracket can start definitions, references, labels,
|
||
{ atBreak: true, character: "[" },
|
||
{ character: "[", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
{ character: "[", inConstruct: ["label", "reference"] },
|
||
// A backslash can start an escape (when followed by punctuation) or a
|
||
// hard break (when followed by an eol).
|
||
// Note: typical escapes are handled in `safe`!
|
||
{ character: "\\", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
// A right bracket can exit labels.
|
||
{ character: "]", inConstruct: ["label", "reference"] },
|
||
// Caret is not used in markdown for constructs.
|
||
// An underscore can start emphasis, strong, or a thematic break.
|
||
{ atBreak: true, character: "_" },
|
||
{ character: "_", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// A grave accent can start code (fenced or text), or it can break out of
|
||
// a grave accent code fence.
|
||
{ atBreak: true, character: "`" },
|
||
{
|
||
character: "`",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedMetaGraveAccent"]
|
||
},
|
||
{ character: "`", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// Left brace, vertical bar, right brace are not used in markdown for
|
||
// constructs.
|
||
// A tilde can start code (fenced).
|
||
{ atBreak: true, character: "~" }
|
||
];
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/association.js
|
||
function association(node2) {
|
||
if (node2.label || !node2.identifier) {
|
||
return node2.label || "";
|
||
}
|
||
return decodeString(node2.identifier);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/compile-pattern.js
|
||
function compilePattern(pattern) {
|
||
if (!pattern._compiled) {
|
||
const before = (pattern.atBreak ? "[\\r\\n][\\t ]*" : "") + (pattern.before ? "(?:" + pattern.before + ")" : "");
|
||
pattern._compiled = new RegExp(
|
||
(before ? "(" + before + ")" : "") + (/[|\\{}()[\]^$+*?.-]/.test(pattern.character) ? "\\" : "") + pattern.character + (pattern.after ? "(?:" + pattern.after + ")" : ""),
|
||
"g"
|
||
);
|
||
}
|
||
return pattern._compiled;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js
|
||
function containerPhrasing(parent, state, info) {
|
||
const indexStack = state.indexStack;
|
||
const children = parent.children || [];
|
||
const results = [];
|
||
let index2 = -1;
|
||
let before = info.before;
|
||
let encodeAfter;
|
||
indexStack.push(-1);
|
||
let tracker = state.createTracker(info);
|
||
while (++index2 < children.length) {
|
||
const child = children[index2];
|
||
let after;
|
||
indexStack[indexStack.length - 1] = index2;
|
||
if (index2 + 1 < children.length) {
|
||
let handle2 = state.handle.handlers[children[index2 + 1].type];
|
||
if (handle2 && handle2.peek) handle2 = handle2.peek;
|
||
after = handle2 ? handle2(children[index2 + 1], parent, state, __spreadValues({
|
||
before: "",
|
||
after: ""
|
||
}, tracker.current())).charAt(0) : "";
|
||
} else {
|
||
after = info.after;
|
||
}
|
||
if (results.length > 0 && (before === "\r" || before === "\n") && child.type === "html") {
|
||
results[results.length - 1] = results[results.length - 1].replace(
|
||
/(\r?\n|\r)$/,
|
||
" "
|
||
);
|
||
before = " ";
|
||
tracker = state.createTracker(info);
|
||
tracker.move(results.join(""));
|
||
}
|
||
let value = state.handle(child, parent, state, __spreadProps(__spreadValues({}, tracker.current()), {
|
||
after,
|
||
before
|
||
}));
|
||
if (encodeAfter && encodeAfter === value.slice(0, 1)) {
|
||
value = encodeCharacterReference(encodeAfter.charCodeAt(0)) + value.slice(1);
|
||
}
|
||
const encodingInfo = state.attentionEncodeSurroundingInfo;
|
||
state.attentionEncodeSurroundingInfo = void 0;
|
||
encodeAfter = void 0;
|
||
if (encodingInfo) {
|
||
if (results.length > 0 && encodingInfo.before && before === results[results.length - 1].slice(-1)) {
|
||
results[results.length - 1] = results[results.length - 1].slice(0, -1) + encodeCharacterReference(before.charCodeAt(0));
|
||
}
|
||
if (encodingInfo.after) encodeAfter = after;
|
||
}
|
||
tracker.move(value);
|
||
results.push(value);
|
||
before = value.slice(-1);
|
||
}
|
||
indexStack.pop();
|
||
return results.join("");
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/container-flow.js
|
||
function containerFlow(parent, state, info) {
|
||
const indexStack = state.indexStack;
|
||
const children = parent.children || [];
|
||
const tracker = state.createTracker(info);
|
||
const results = [];
|
||
let index2 = -1;
|
||
indexStack.push(-1);
|
||
while (++index2 < children.length) {
|
||
const child = children[index2];
|
||
indexStack[indexStack.length - 1] = index2;
|
||
results.push(
|
||
tracker.move(
|
||
state.handle(child, parent, state, __spreadValues({
|
||
before: "\n",
|
||
after: "\n"
|
||
}, tracker.current()))
|
||
)
|
||
);
|
||
if (child.type !== "list") {
|
||
state.bulletLastUsed = void 0;
|
||
}
|
||
if (index2 < children.length - 1) {
|
||
results.push(
|
||
tracker.move(between(child, children[index2 + 1], parent, state))
|
||
);
|
||
}
|
||
}
|
||
indexStack.pop();
|
||
return results.join("");
|
||
}
|
||
function between(left, right, parent, state) {
|
||
let index2 = state.join.length;
|
||
while (index2--) {
|
||
const result = state.join[index2](left, right, parent, state);
|
||
if (result === true || result === 1) {
|
||
break;
|
||
}
|
||
if (typeof result === "number") {
|
||
return "\n".repeat(1 + result);
|
||
}
|
||
if (result === false) {
|
||
return "\n\n<!---->\n\n";
|
||
}
|
||
}
|
||
return "\n\n";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/indent-lines.js
|
||
var eol = /\r?\n|\r/g;
|
||
function indentLines(value, map4) {
|
||
const result = [];
|
||
let start = 0;
|
||
let line = 0;
|
||
let match;
|
||
while (match = eol.exec(value)) {
|
||
one3(value.slice(start, match.index));
|
||
result.push(match[0]);
|
||
start = match.index + match[0].length;
|
||
line++;
|
||
}
|
||
one3(value.slice(start));
|
||
return result.join("");
|
||
function one3(value2) {
|
||
result.push(map4(value2, line, !value2));
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/safe.js
|
||
function safe(state, input, config) {
|
||
const value = (config.before || "") + (input || "") + (config.after || "");
|
||
const positions = [];
|
||
const result = [];
|
||
const infos = {};
|
||
let index2 = -1;
|
||
while (++index2 < state.unsafe.length) {
|
||
const pattern = state.unsafe[index2];
|
||
if (!patternInScope(state.stack, pattern)) {
|
||
continue;
|
||
}
|
||
const expression = state.compilePattern(pattern);
|
||
let match;
|
||
while (match = expression.exec(value)) {
|
||
const before = "before" in pattern || Boolean(pattern.atBreak);
|
||
const after = "after" in pattern;
|
||
const position2 = match.index + (before ? match[1].length : 0);
|
||
if (positions.includes(position2)) {
|
||
if (infos[position2].before && !before) {
|
||
infos[position2].before = false;
|
||
}
|
||
if (infos[position2].after && !after) {
|
||
infos[position2].after = false;
|
||
}
|
||
} else {
|
||
positions.push(position2);
|
||
infos[position2] = { before, after };
|
||
}
|
||
}
|
||
}
|
||
positions.sort(numerical);
|
||
let start = config.before ? config.before.length : 0;
|
||
const end = value.length - (config.after ? config.after.length : 0);
|
||
index2 = -1;
|
||
while (++index2 < positions.length) {
|
||
const position2 = positions[index2];
|
||
if (position2 < start || position2 >= end) {
|
||
continue;
|
||
}
|
||
if (position2 + 1 < end && positions[index2 + 1] === position2 + 1 && infos[position2].after && !infos[position2 + 1].before && !infos[position2 + 1].after || positions[index2 - 1] === position2 - 1 && infos[position2].before && !infos[position2 - 1].before && !infos[position2 - 1].after) {
|
||
continue;
|
||
}
|
||
if (start !== position2) {
|
||
result.push(escapeBackslashes(value.slice(start, position2), "\\"));
|
||
}
|
||
start = position2;
|
||
if (/[!-/:-@[-`{-~]/.test(value.charAt(position2)) && (!config.encode || !config.encode.includes(value.charAt(position2)))) {
|
||
result.push("\\");
|
||
} else {
|
||
result.push(encodeCharacterReference(value.charCodeAt(position2)));
|
||
start++;
|
||
}
|
||
}
|
||
result.push(escapeBackslashes(value.slice(start, end), config.after));
|
||
return result.join("");
|
||
}
|
||
function numerical(a, b) {
|
||
return a - b;
|
||
}
|
||
function escapeBackslashes(value, after) {
|
||
const expression = /\\(?=[!-/:-@[-`{-~])/g;
|
||
const positions = [];
|
||
const results = [];
|
||
const whole = value + after;
|
||
let index2 = -1;
|
||
let start = 0;
|
||
let match;
|
||
while (match = expression.exec(whole)) {
|
||
positions.push(match.index);
|
||
}
|
||
while (++index2 < positions.length) {
|
||
if (start !== positions[index2]) {
|
||
results.push(value.slice(start, positions[index2]));
|
||
}
|
||
results.push("\\");
|
||
start = positions[index2];
|
||
}
|
||
results.push(value.slice(start));
|
||
return results.join("");
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/track.js
|
||
function track(config) {
|
||
const options2 = config || {};
|
||
const now = options2.now || {};
|
||
let lineShift = options2.lineShift || 0;
|
||
let line = now.line || 1;
|
||
let column = now.column || 1;
|
||
return { move, current, shift };
|
||
function current() {
|
||
return { now: { line, column }, lineShift };
|
||
}
|
||
function shift(value) {
|
||
lineShift += value;
|
||
}
|
||
function move(input) {
|
||
const value = input || "";
|
||
const chunks = value.split(/\r?\n|\r/g);
|
||
const tail = chunks[chunks.length - 1];
|
||
line += chunks.length - 1;
|
||
column = chunks.length === 1 ? column + tail.length : 1 + tail.length + lineShift;
|
||
return value;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/index.js
|
||
function toMarkdown(tree, options2) {
|
||
const settings = options2 || {};
|
||
const state = {
|
||
associationId: association,
|
||
containerPhrasing: containerPhrasingBound,
|
||
containerFlow: containerFlowBound,
|
||
createTracker: track,
|
||
compilePattern,
|
||
enter,
|
||
// @ts-expect-error: GFM / frontmatter are typed in `mdast` but not defined
|
||
// here.
|
||
handlers: __spreadValues({}, handle),
|
||
// @ts-expect-error: add `handle` in a second.
|
||
handle: void 0,
|
||
indentLines,
|
||
indexStack: [],
|
||
join: [...join],
|
||
options: {},
|
||
safe: safeBound,
|
||
stack: [],
|
||
unsafe: [...unsafe]
|
||
};
|
||
configure2(state, settings);
|
||
if (state.options.tightDefinitions) {
|
||
state.join.push(joinDefinition);
|
||
}
|
||
state.handle = zwitch("type", {
|
||
invalid,
|
||
unknown,
|
||
handlers: state.handlers
|
||
});
|
||
let result = state.handle(tree, void 0, state, {
|
||
before: "\n",
|
||
after: "\n",
|
||
now: { line: 1, column: 1 },
|
||
lineShift: 0
|
||
});
|
||
if (result && result.charCodeAt(result.length - 1) !== 10 && result.charCodeAt(result.length - 1) !== 13) {
|
||
result += "\n";
|
||
}
|
||
return result;
|
||
function enter(name) {
|
||
state.stack.push(name);
|
||
return exit2;
|
||
function exit2() {
|
||
state.stack.pop();
|
||
}
|
||
}
|
||
}
|
||
function invalid(value) {
|
||
throw new Error("Cannot handle value `" + value + "`, expected node");
|
||
}
|
||
function unknown(value) {
|
||
const node2 = (
|
||
/** @type {Nodes} */
|
||
value
|
||
);
|
||
throw new Error("Cannot handle unknown node `" + node2.type + "`");
|
||
}
|
||
function joinDefinition(left, right) {
|
||
if (left.type === "definition" && left.type === right.type) {
|
||
return 0;
|
||
}
|
||
}
|
||
function containerPhrasingBound(parent, info) {
|
||
return containerPhrasing(parent, this, info);
|
||
}
|
||
function containerFlowBound(parent, info) {
|
||
return containerFlow(parent, this, info);
|
||
}
|
||
function safeBound(value, config) {
|
||
return safe(this, value, config);
|
||
}
|
||
|
||
// node_modules/remark-stringify/lib/index.js
|
||
function remarkStringify(options2) {
|
||
const self2 = this;
|
||
self2.compiler = compiler2;
|
||
function compiler2(tree) {
|
||
return toMarkdown(tree, __spreadProps(__spreadValues(__spreadValues({}, self2.data("settings")), options2), {
|
||
// Note: this option is not in the readme.
|
||
// The goal is for it to be set by plugins on `data` instead of being
|
||
// passed by users.
|
||
extensions: self2.data("toMarkdownExtensions") || []
|
||
}));
|
||
}
|
||
}
|
||
|
||
// node_modules/bail/index.js
|
||
function bail(error) {
|
||
if (error) {
|
||
throw error;
|
||
}
|
||
}
|
||
|
||
// node_modules/unified/lib/index.js
|
||
var import_extend = __toESM(require_extend(), 1);
|
||
|
||
// node_modules/devlop/lib/default.js
|
||
function ok2() {
|
||
}
|
||
|
||
// node_modules/is-plain-obj/index.js
|
||
function isPlainObject(value) {
|
||
if (typeof value !== "object" || value === null) {
|
||
return false;
|
||
}
|
||
const prototype = Object.getPrototypeOf(value);
|
||
return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in value) && !(Symbol.iterator in value);
|
||
}
|
||
|
||
// node_modules/trough/lib/index.js
|
||
function trough() {
|
||
const fns = [];
|
||
const pipeline = { run, use };
|
||
return pipeline;
|
||
function run(...values) {
|
||
let middlewareIndex = -1;
|
||
const callback = values.pop();
|
||
if (typeof callback !== "function") {
|
||
throw new TypeError("Expected function as last argument, not " + callback);
|
||
}
|
||
next(null, ...values);
|
||
function next(error, ...output) {
|
||
const fn = fns[++middlewareIndex];
|
||
let index2 = -1;
|
||
if (error) {
|
||
callback(error);
|
||
return;
|
||
}
|
||
while (++index2 < values.length) {
|
||
if (output[index2] === null || output[index2] === void 0) {
|
||
output[index2] = values[index2];
|
||
}
|
||
}
|
||
values = output;
|
||
if (fn) {
|
||
wrap2(fn, next)(...output);
|
||
} else {
|
||
callback(null, ...output);
|
||
}
|
||
}
|
||
}
|
||
function use(middelware) {
|
||
if (typeof middelware !== "function") {
|
||
throw new TypeError(
|
||
"Expected `middelware` to be a function, not " + middelware
|
||
);
|
||
}
|
||
fns.push(middelware);
|
||
return pipeline;
|
||
}
|
||
}
|
||
function wrap2(middleware, callback) {
|
||
let called;
|
||
return wrapped;
|
||
function wrapped(...parameters) {
|
||
const fnExpectsCallback = middleware.length > parameters.length;
|
||
let result;
|
||
if (fnExpectsCallback) {
|
||
parameters.push(done);
|
||
}
|
||
try {
|
||
result = middleware.apply(this, parameters);
|
||
} catch (error) {
|
||
const exception = (
|
||
/** @type {Error} */
|
||
error
|
||
);
|
||
if (fnExpectsCallback && called) {
|
||
throw exception;
|
||
}
|
||
return done(exception);
|
||
}
|
||
if (!fnExpectsCallback) {
|
||
if (result && result.then && typeof result.then === "function") {
|
||
result.then(then, done);
|
||
} else if (result instanceof Error) {
|
||
done(result);
|
||
} else {
|
||
then(result);
|
||
}
|
||
}
|
||
}
|
||
function done(error, ...output) {
|
||
if (!called) {
|
||
called = true;
|
||
callback(error, ...output);
|
||
}
|
||
}
|
||
function then(value) {
|
||
done(null, value);
|
||
}
|
||
}
|
||
|
||
// node_modules/vfile-message/lib/index.js
|
||
var VFileMessage = class extends Error {
|
||
/**
|
||
* Create a message for `reason`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Options | null | undefined} [options]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | Options | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns
|
||
* Instance of `VFileMessage`.
|
||
*/
|
||
// eslint-disable-next-line complexity
|
||
constructor(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
super();
|
||
if (typeof optionsOrParentOrPlace === "string") {
|
||
origin = optionsOrParentOrPlace;
|
||
optionsOrParentOrPlace = void 0;
|
||
}
|
||
let reason = "";
|
||
let options2 = {};
|
||
let legacyCause = false;
|
||
if (optionsOrParentOrPlace) {
|
||
if ("line" in optionsOrParentOrPlace && "column" in optionsOrParentOrPlace) {
|
||
options2 = { place: optionsOrParentOrPlace };
|
||
} else if ("start" in optionsOrParentOrPlace && "end" in optionsOrParentOrPlace) {
|
||
options2 = { place: optionsOrParentOrPlace };
|
||
} else if ("type" in optionsOrParentOrPlace) {
|
||
options2 = {
|
||
ancestors: [optionsOrParentOrPlace],
|
||
place: optionsOrParentOrPlace.position
|
||
};
|
||
} else {
|
||
options2 = __spreadValues({}, optionsOrParentOrPlace);
|
||
}
|
||
}
|
||
if (typeof causeOrReason === "string") {
|
||
reason = causeOrReason;
|
||
} else if (!options2.cause && causeOrReason) {
|
||
legacyCause = true;
|
||
reason = causeOrReason.message;
|
||
options2.cause = causeOrReason;
|
||
}
|
||
if (!options2.ruleId && !options2.source && typeof origin === "string") {
|
||
const index2 = origin.indexOf(":");
|
||
if (index2 === -1) {
|
||
options2.ruleId = origin;
|
||
} else {
|
||
options2.source = origin.slice(0, index2);
|
||
options2.ruleId = origin.slice(index2 + 1);
|
||
}
|
||
}
|
||
if (!options2.place && options2.ancestors && options2.ancestors) {
|
||
const parent = options2.ancestors[options2.ancestors.length - 1];
|
||
if (parent) {
|
||
options2.place = parent.position;
|
||
}
|
||
}
|
||
const start = options2.place && "start" in options2.place ? options2.place.start : options2.place;
|
||
this.ancestors = options2.ancestors || void 0;
|
||
this.cause = options2.cause || void 0;
|
||
this.column = start ? start.column : void 0;
|
||
this.fatal = void 0;
|
||
this.file;
|
||
this.message = reason;
|
||
this.line = start ? start.line : void 0;
|
||
this.name = stringifyPosition(options2.place) || "1:1";
|
||
this.place = options2.place || void 0;
|
||
this.reason = this.message;
|
||
this.ruleId = options2.ruleId || void 0;
|
||
this.source = options2.source || void 0;
|
||
this.stack = legacyCause && options2.cause && typeof options2.cause.stack === "string" ? options2.cause.stack : "";
|
||
this.actual;
|
||
this.expected;
|
||
this.note;
|
||
this.url;
|
||
}
|
||
};
|
||
VFileMessage.prototype.file = "";
|
||
VFileMessage.prototype.name = "";
|
||
VFileMessage.prototype.reason = "";
|
||
VFileMessage.prototype.message = "";
|
||
VFileMessage.prototype.stack = "";
|
||
VFileMessage.prototype.column = void 0;
|
||
VFileMessage.prototype.line = void 0;
|
||
VFileMessage.prototype.ancestors = void 0;
|
||
VFileMessage.prototype.cause = void 0;
|
||
VFileMessage.prototype.fatal = void 0;
|
||
VFileMessage.prototype.place = void 0;
|
||
VFileMessage.prototype.ruleId = void 0;
|
||
VFileMessage.prototype.source = void 0;
|
||
|
||
// node_modules/vfile/lib/minpath.browser.js
|
||
var minpath = { basename, dirname, extname, join: join2, sep: "/" };
|
||
function basename(path, extname2) {
|
||
if (extname2 !== void 0 && typeof extname2 !== "string") {
|
||
throw new TypeError('"ext" argument must be a string');
|
||
}
|
||
assertPath(path);
|
||
let start = 0;
|
||
let end = -1;
|
||
let index2 = path.length;
|
||
let seenNonSlash;
|
||
if (extname2 === void 0 || extname2.length === 0 || extname2.length > path.length) {
|
||
while (index2--) {
|
||
if (path.codePointAt(index2) === 47) {
|
||
if (seenNonSlash) {
|
||
start = index2 + 1;
|
||
break;
|
||
}
|
||
} else if (end < 0) {
|
||
seenNonSlash = true;
|
||
end = index2 + 1;
|
||
}
|
||
}
|
||
return end < 0 ? "" : path.slice(start, end);
|
||
}
|
||
if (extname2 === path) {
|
||
return "";
|
||
}
|
||
let firstNonSlashEnd = -1;
|
||
let extnameIndex = extname2.length - 1;
|
||
while (index2--) {
|
||
if (path.codePointAt(index2) === 47) {
|
||
if (seenNonSlash) {
|
||
start = index2 + 1;
|
||
break;
|
||
}
|
||
} else {
|
||
if (firstNonSlashEnd < 0) {
|
||
seenNonSlash = true;
|
||
firstNonSlashEnd = index2 + 1;
|
||
}
|
||
if (extnameIndex > -1) {
|
||
if (path.codePointAt(index2) === extname2.codePointAt(extnameIndex--)) {
|
||
if (extnameIndex < 0) {
|
||
end = index2;
|
||
}
|
||
} else {
|
||
extnameIndex = -1;
|
||
end = firstNonSlashEnd;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (start === end) {
|
||
end = firstNonSlashEnd;
|
||
} else if (end < 0) {
|
||
end = path.length;
|
||
}
|
||
return path.slice(start, end);
|
||
}
|
||
function dirname(path) {
|
||
assertPath(path);
|
||
if (path.length === 0) {
|
||
return ".";
|
||
}
|
||
let end = -1;
|
||
let index2 = path.length;
|
||
let unmatchedSlash;
|
||
while (--index2) {
|
||
if (path.codePointAt(index2) === 47) {
|
||
if (unmatchedSlash) {
|
||
end = index2;
|
||
break;
|
||
}
|
||
} else if (!unmatchedSlash) {
|
||
unmatchedSlash = true;
|
||
}
|
||
}
|
||
return end < 0 ? path.codePointAt(0) === 47 ? "/" : "." : end === 1 && path.codePointAt(0) === 47 ? "//" : path.slice(0, end);
|
||
}
|
||
function extname(path) {
|
||
assertPath(path);
|
||
let index2 = path.length;
|
||
let end = -1;
|
||
let startPart = 0;
|
||
let startDot = -1;
|
||
let preDotState = 0;
|
||
let unmatchedSlash;
|
||
while (index2--) {
|
||
const code2 = path.codePointAt(index2);
|
||
if (code2 === 47) {
|
||
if (unmatchedSlash) {
|
||
startPart = index2 + 1;
|
||
break;
|
||
}
|
||
continue;
|
||
}
|
||
if (end < 0) {
|
||
unmatchedSlash = true;
|
||
end = index2 + 1;
|
||
}
|
||
if (code2 === 46) {
|
||
if (startDot < 0) {
|
||
startDot = index2;
|
||
} else if (preDotState !== 1) {
|
||
preDotState = 1;
|
||
}
|
||
} else if (startDot > -1) {
|
||
preDotState = -1;
|
||
}
|
||
}
|
||
if (startDot < 0 || end < 0 || // We saw a non-dot character immediately before the dot.
|
||
preDotState === 0 || // The (right-most) trimmed path component is exactly `..`.
|
||
preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {
|
||
return "";
|
||
}
|
||
return path.slice(startDot, end);
|
||
}
|
||
function join2(...segments) {
|
||
let index2 = -1;
|
||
let joined;
|
||
while (++index2 < segments.length) {
|
||
assertPath(segments[index2]);
|
||
if (segments[index2]) {
|
||
joined = joined === void 0 ? segments[index2] : joined + "/" + segments[index2];
|
||
}
|
||
}
|
||
return joined === void 0 ? "." : normalize(joined);
|
||
}
|
||
function normalize(path) {
|
||
assertPath(path);
|
||
const absolute = path.codePointAt(0) === 47;
|
||
let value = normalizeString(path, !absolute);
|
||
if (value.length === 0 && !absolute) {
|
||
value = ".";
|
||
}
|
||
if (value.length > 0 && path.codePointAt(path.length - 1) === 47) {
|
||
value += "/";
|
||
}
|
||
return absolute ? "/" + value : value;
|
||
}
|
||
function normalizeString(path, allowAboveRoot) {
|
||
let result = "";
|
||
let lastSegmentLength = 0;
|
||
let lastSlash = -1;
|
||
let dots = 0;
|
||
let index2 = -1;
|
||
let code2;
|
||
let lastSlashIndex;
|
||
while (++index2 <= path.length) {
|
||
if (index2 < path.length) {
|
||
code2 = path.codePointAt(index2);
|
||
} else if (code2 === 47) {
|
||
break;
|
||
} else {
|
||
code2 = 47;
|
||
}
|
||
if (code2 === 47) {
|
||
if (lastSlash === index2 - 1 || dots === 1) {
|
||
} else if (lastSlash !== index2 - 1 && dots === 2) {
|
||
if (result.length < 2 || lastSegmentLength !== 2 || result.codePointAt(result.length - 1) !== 46 || result.codePointAt(result.length - 2) !== 46) {
|
||
if (result.length > 2) {
|
||
lastSlashIndex = result.lastIndexOf("/");
|
||
if (lastSlashIndex !== result.length - 1) {
|
||
if (lastSlashIndex < 0) {
|
||
result = "";
|
||
lastSegmentLength = 0;
|
||
} else {
|
||
result = result.slice(0, lastSlashIndex);
|
||
lastSegmentLength = result.length - 1 - result.lastIndexOf("/");
|
||
}
|
||
lastSlash = index2;
|
||
dots = 0;
|
||
continue;
|
||
}
|
||
} else if (result.length > 0) {
|
||
result = "";
|
||
lastSegmentLength = 0;
|
||
lastSlash = index2;
|
||
dots = 0;
|
||
continue;
|
||
}
|
||
}
|
||
if (allowAboveRoot) {
|
||
result = result.length > 0 ? result + "/.." : "..";
|
||
lastSegmentLength = 2;
|
||
}
|
||
} else {
|
||
if (result.length > 0) {
|
||
result += "/" + path.slice(lastSlash + 1, index2);
|
||
} else {
|
||
result = path.slice(lastSlash + 1, index2);
|
||
}
|
||
lastSegmentLength = index2 - lastSlash - 1;
|
||
}
|
||
lastSlash = index2;
|
||
dots = 0;
|
||
} else if (code2 === 46 && dots > -1) {
|
||
dots++;
|
||
} else {
|
||
dots = -1;
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
function assertPath(path) {
|
||
if (typeof path !== "string") {
|
||
throw new TypeError(
|
||
"Path must be a string. Received " + JSON.stringify(path)
|
||
);
|
||
}
|
||
}
|
||
|
||
// node_modules/vfile/lib/minproc.browser.js
|
||
var minproc = { cwd };
|
||
function cwd() {
|
||
return "/";
|
||
}
|
||
|
||
// node_modules/vfile/lib/minurl.shared.js
|
||
function isUrl(fileUrlOrPath) {
|
||
return Boolean(
|
||
fileUrlOrPath !== null && typeof fileUrlOrPath === "object" && "href" in fileUrlOrPath && fileUrlOrPath.href && "protocol" in fileUrlOrPath && fileUrlOrPath.protocol && // @ts-expect-error: indexing is fine.
|
||
fileUrlOrPath.auth === void 0
|
||
);
|
||
}
|
||
|
||
// node_modules/vfile/lib/minurl.browser.js
|
||
function urlToPath(path) {
|
||
if (typeof path === "string") {
|
||
path = new URL(path);
|
||
} else if (!isUrl(path)) {
|
||
const error = new TypeError(
|
||
'The "path" argument must be of type string or an instance of URL. Received `' + path + "`"
|
||
);
|
||
error.code = "ERR_INVALID_ARG_TYPE";
|
||
throw error;
|
||
}
|
||
if (path.protocol !== "file:") {
|
||
const error = new TypeError("The URL must be of scheme file");
|
||
error.code = "ERR_INVALID_URL_SCHEME";
|
||
throw error;
|
||
}
|
||
return getPathFromURLPosix(path);
|
||
}
|
||
function getPathFromURLPosix(url) {
|
||
if (url.hostname !== "") {
|
||
const error = new TypeError(
|
||
'File URL host must be "localhost" or empty on darwin'
|
||
);
|
||
error.code = "ERR_INVALID_FILE_URL_HOST";
|
||
throw error;
|
||
}
|
||
const pathname = url.pathname;
|
||
let index2 = -1;
|
||
while (++index2 < pathname.length) {
|
||
if (pathname.codePointAt(index2) === 37 && pathname.codePointAt(index2 + 1) === 50) {
|
||
const third = pathname.codePointAt(index2 + 2);
|
||
if (third === 70 || third === 102) {
|
||
const error = new TypeError(
|
||
"File URL path must not include encoded / characters"
|
||
);
|
||
error.code = "ERR_INVALID_FILE_URL_PATH";
|
||
throw error;
|
||
}
|
||
}
|
||
}
|
||
return decodeURIComponent(pathname);
|
||
}
|
||
|
||
// node_modules/vfile/lib/index.js
|
||
var order = (
|
||
/** @type {const} */
|
||
[
|
||
"history",
|
||
"path",
|
||
"basename",
|
||
"stem",
|
||
"extname",
|
||
"dirname"
|
||
]
|
||
);
|
||
var VFile = class {
|
||
/**
|
||
* Create a new virtual file.
|
||
*
|
||
* `options` is treated as:
|
||
*
|
||
* * `string` or `Uint8Array` — `{value: options}`
|
||
* * `URL` — `{path: options}`
|
||
* * `VFile` — shallow copies its data over to the new file
|
||
* * `object` — all fields are shallow copied over to the new file
|
||
*
|
||
* Path related fields are set in the following order (least specific to
|
||
* most specific): `history`, `path`, `basename`, `stem`, `extname`,
|
||
* `dirname`.
|
||
*
|
||
* You cannot set `dirname` or `extname` without setting either `history`,
|
||
* `path`, `basename`, or `stem` too.
|
||
*
|
||
* @param {Compatible | null | undefined} [value]
|
||
* File value.
|
||
* @returns
|
||
* New instance.
|
||
*/
|
||
constructor(value) {
|
||
let options2;
|
||
if (!value) {
|
||
options2 = {};
|
||
} else if (isUrl(value)) {
|
||
options2 = { path: value };
|
||
} else if (typeof value === "string" || isUint8Array(value)) {
|
||
options2 = { value };
|
||
} else {
|
||
options2 = value;
|
||
}
|
||
this.cwd = "cwd" in options2 ? "" : minproc.cwd();
|
||
this.data = {};
|
||
this.history = [];
|
||
this.messages = [];
|
||
this.value;
|
||
this.map;
|
||
this.result;
|
||
this.stored;
|
||
let index2 = -1;
|
||
while (++index2 < order.length) {
|
||
const field2 = order[index2];
|
||
if (field2 in options2 && options2[field2] !== void 0 && options2[field2] !== null) {
|
||
this[field2] = field2 === "history" ? [...options2[field2]] : options2[field2];
|
||
}
|
||
}
|
||
let field;
|
||
for (field in options2) {
|
||
if (!order.includes(field)) {
|
||
this[field] = options2[field];
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Get the basename (including extname) (example: `'index.min.js'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Basename.
|
||
*/
|
||
get basename() {
|
||
return typeof this.path === "string" ? minpath.basename(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set basename (including extname) (`'index.min.js'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be nullified (use `file.path = file.dirname` instead).
|
||
*
|
||
* @param {string} basename
|
||
* Basename.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set basename(basename2) {
|
||
assertNonEmpty(basename2, "basename");
|
||
assertPart(basename2, "basename");
|
||
this.path = minpath.join(this.dirname || "", basename2);
|
||
}
|
||
/**
|
||
* Get the parent path (example: `'~'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Dirname.
|
||
*/
|
||
get dirname() {
|
||
return typeof this.path === "string" ? minpath.dirname(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set the parent path (example: `'~'`).
|
||
*
|
||
* Cannot be set if there’s no `path` yet.
|
||
*
|
||
* @param {string | undefined} dirname
|
||
* Dirname.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set dirname(dirname2) {
|
||
assertPath2(this.basename, "dirname");
|
||
this.path = minpath.join(dirname2 || "", this.basename);
|
||
}
|
||
/**
|
||
* Get the extname (including dot) (example: `'.js'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Extname.
|
||
*/
|
||
get extname() {
|
||
return typeof this.path === "string" ? minpath.extname(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set the extname (including dot) (example: `'.js'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be set if there’s no `path` yet.
|
||
*
|
||
* @param {string | undefined} extname
|
||
* Extname.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set extname(extname2) {
|
||
assertPart(extname2, "extname");
|
||
assertPath2(this.dirname, "extname");
|
||
if (extname2) {
|
||
if (extname2.codePointAt(0) !== 46) {
|
||
throw new Error("`extname` must start with `.`");
|
||
}
|
||
if (extname2.includes(".", 1)) {
|
||
throw new Error("`extname` cannot contain multiple dots");
|
||
}
|
||
}
|
||
this.path = minpath.join(this.dirname, this.stem + (extname2 || ""));
|
||
}
|
||
/**
|
||
* Get the full path (example: `'~/index.min.js'`).
|
||
*
|
||
* @returns {string}
|
||
* Path.
|
||
*/
|
||
get path() {
|
||
return this.history[this.history.length - 1];
|
||
}
|
||
/**
|
||
* Set the full path (example: `'~/index.min.js'`).
|
||
*
|
||
* Cannot be nullified.
|
||
* You can set a file URL (a `URL` object with a `file:` protocol) which will
|
||
* be turned into a path with `url.fileURLToPath`.
|
||
*
|
||
* @param {URL | string} path
|
||
* Path.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set path(path) {
|
||
if (isUrl(path)) {
|
||
path = urlToPath(path);
|
||
}
|
||
assertNonEmpty(path, "path");
|
||
if (this.path !== path) {
|
||
this.history.push(path);
|
||
}
|
||
}
|
||
/**
|
||
* Get the stem (basename w/o extname) (example: `'index.min'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Stem.
|
||
*/
|
||
get stem() {
|
||
return typeof this.path === "string" ? minpath.basename(this.path, this.extname) : void 0;
|
||
}
|
||
/**
|
||
* Set the stem (basename w/o extname) (example: `'index.min'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be nullified (use `file.path = file.dirname` instead).
|
||
*
|
||
* @param {string} stem
|
||
* Stem.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set stem(stem) {
|
||
assertNonEmpty(stem, "stem");
|
||
assertPart(stem, "stem");
|
||
this.path = minpath.join(this.dirname || "", stem + (this.extname || ""));
|
||
}
|
||
// Normal prototypal methods.
|
||
/**
|
||
* Create a fatal message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `true` (error; file not usable)
|
||
* and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {never}
|
||
* Never.
|
||
* @throws {VFileMessage}
|
||
* Message.
|
||
*/
|
||
fail(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
|
||
message.fatal = true;
|
||
throw message;
|
||
}
|
||
/**
|
||
* Create an info message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `undefined` (info; change
|
||
* likely not needed) and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {VFileMessage}
|
||
* Message.
|
||
*/
|
||
info(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
|
||
message.fatal = void 0;
|
||
return message;
|
||
}
|
||
/**
|
||
* Create a message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `false` (warning; change may be
|
||
* needed) and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {VFileMessage}
|
||
* Message.
|
||
*/
|
||
message(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = new VFileMessage(
|
||
// @ts-expect-error: the overloads are fine.
|
||
causeOrReason,
|
||
optionsOrParentOrPlace,
|
||
origin
|
||
);
|
||
if (this.path) {
|
||
message.name = this.path + ":" + message.name;
|
||
message.file = this.path;
|
||
}
|
||
message.fatal = false;
|
||
this.messages.push(message);
|
||
return message;
|
||
}
|
||
/**
|
||
* Serialize the file.
|
||
*
|
||
* > **Note**: which encodings are supported depends on the engine.
|
||
* > For info on Node.js, see:
|
||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||
*
|
||
* @param {string | null | undefined} [encoding='utf8']
|
||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||
* (default: `'utf-8'`).
|
||
* @returns {string}
|
||
* Serialized file.
|
||
*/
|
||
toString(encoding) {
|
||
if (this.value === void 0) {
|
||
return "";
|
||
}
|
||
if (typeof this.value === "string") {
|
||
return this.value;
|
||
}
|
||
const decoder = new TextDecoder(encoding || void 0);
|
||
return decoder.decode(this.value);
|
||
}
|
||
};
|
||
function assertPart(part, name) {
|
||
if (part && part.includes(minpath.sep)) {
|
||
throw new Error(
|
||
"`" + name + "` cannot be a path: did not expect `" + minpath.sep + "`"
|
||
);
|
||
}
|
||
}
|
||
function assertNonEmpty(part, name) {
|
||
if (!part) {
|
||
throw new Error("`" + name + "` cannot be empty");
|
||
}
|
||
}
|
||
function assertPath2(path, name) {
|
||
if (!path) {
|
||
throw new Error("Setting `" + name + "` requires `path` to be set too");
|
||
}
|
||
}
|
||
function isUint8Array(value) {
|
||
return Boolean(
|
||
value && typeof value === "object" && "byteLength" in value && "byteOffset" in value
|
||
);
|
||
}
|
||
|
||
// node_modules/unified/lib/callable-instance.js
|
||
var CallableInstance = (
|
||
/**
|
||
* @type {new <Parameters extends Array<unknown>, Result>(property: string | symbol) => (...parameters: Parameters) => Result}
|
||
*/
|
||
/** @type {unknown} */
|
||
/**
|
||
* @this {Function}
|
||
* @param {string | symbol} property
|
||
* @returns {(...parameters: Array<unknown>) => unknown}
|
||
*/
|
||
function(property) {
|
||
const self2 = this;
|
||
const constr = self2.constructor;
|
||
const proto = (
|
||
/** @type {Record<string | symbol, Function>} */
|
||
// Prototypes do exist.
|
||
// type-coverage:ignore-next-line
|
||
constr.prototype
|
||
);
|
||
const value = proto[property];
|
||
const apply = function() {
|
||
return value.apply(apply, arguments);
|
||
};
|
||
Object.setPrototypeOf(apply, proto);
|
||
return apply;
|
||
}
|
||
);
|
||
|
||
// node_modules/unified/lib/index.js
|
||
var own4 = {}.hasOwnProperty;
|
||
var Processor = class _Processor extends CallableInstance {
|
||
/**
|
||
* Create a processor.
|
||
*/
|
||
constructor() {
|
||
super("copy");
|
||
this.Compiler = void 0;
|
||
this.Parser = void 0;
|
||
this.attachers = [];
|
||
this.compiler = void 0;
|
||
this.freezeIndex = -1;
|
||
this.frozen = void 0;
|
||
this.namespace = {};
|
||
this.parser = void 0;
|
||
this.transformers = trough();
|
||
}
|
||
/**
|
||
* Copy a processor.
|
||
*
|
||
* @deprecated
|
||
* This is a private internal method and should not be used.
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* New *unfrozen* processor ({@linkcode Processor}) that is
|
||
* configured to work the same as its ancestor.
|
||
* When the descendant processor is configured in the future it does not
|
||
* affect the ancestral processor.
|
||
*/
|
||
copy() {
|
||
const destination = (
|
||
/** @type {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>} */
|
||
new _Processor()
|
||
);
|
||
let index2 = -1;
|
||
while (++index2 < this.attachers.length) {
|
||
const attacher = this.attachers[index2];
|
||
destination.use(...attacher);
|
||
}
|
||
destination.data((0, import_extend.default)(true, {}, this.namespace));
|
||
return destination;
|
||
}
|
||
/**
|
||
* Configure the processor with info available to all plugins.
|
||
* Information is stored in an object.
|
||
*
|
||
* Typically, options can be given to a specific plugin, but sometimes it
|
||
* makes sense to have information shared with several plugins.
|
||
* For example, a list of HTML elements that are self-closing, which is
|
||
* needed during all phases.
|
||
*
|
||
* > **Note**: setting information cannot occur on *frozen* processors.
|
||
* > Call the processor first to create a new unfrozen processor.
|
||
*
|
||
* > **Note**: to register custom data in TypeScript, augment the
|
||
* > {@linkcode Data} interface.
|
||
*
|
||
* @example
|
||
* This example show how to get and set info:
|
||
*
|
||
* ```js
|
||
* import {unified} from 'unified'
|
||
*
|
||
* const processor = unified().data('alpha', 'bravo')
|
||
*
|
||
* processor.data('alpha') // => 'bravo'
|
||
*
|
||
* processor.data() // => {alpha: 'bravo'}
|
||
*
|
||
* processor.data({charlie: 'delta'})
|
||
*
|
||
* processor.data() // => {charlie: 'delta'}
|
||
* ```
|
||
*
|
||
* @template {keyof Data} Key
|
||
*
|
||
* @overload
|
||
* @returns {Data}
|
||
*
|
||
* @overload
|
||
* @param {Data} dataset
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {Key} key
|
||
* @returns {Data[Key]}
|
||
*
|
||
* @overload
|
||
* @param {Key} key
|
||
* @param {Data[Key]} value
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @param {Data | Key} [key]
|
||
* Key to get or set, or entire dataset to set, or nothing to get the
|
||
* entire dataset (optional).
|
||
* @param {Data[Key]} [value]
|
||
* Value to set (optional).
|
||
* @returns {unknown}
|
||
* The current processor when setting, the value at `key` when getting, or
|
||
* the entire dataset when getting without key.
|
||
*/
|
||
data(key, value) {
|
||
if (typeof key === "string") {
|
||
if (arguments.length === 2) {
|
||
assertUnfrozen("data", this.frozen);
|
||
this.namespace[key] = value;
|
||
return this;
|
||
}
|
||
return own4.call(this.namespace, key) && this.namespace[key] || void 0;
|
||
}
|
||
if (key) {
|
||
assertUnfrozen("data", this.frozen);
|
||
this.namespace = key;
|
||
return this;
|
||
}
|
||
return this.namespace;
|
||
}
|
||
/**
|
||
* Freeze a processor.
|
||
*
|
||
* Frozen processors are meant to be extended and not to be configured
|
||
* directly.
|
||
*
|
||
* When a processor is frozen it cannot be unfrozen.
|
||
* New processors working the same way can be created by calling the
|
||
* processor.
|
||
*
|
||
* It’s possible to freeze processors explicitly by calling `.freeze()`.
|
||
* Processors freeze automatically when `.parse()`, `.run()`, `.runSync()`,
|
||
* `.stringify()`, `.process()`, or `.processSync()` are called.
|
||
*
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* The current processor.
|
||
*/
|
||
freeze() {
|
||
if (this.frozen) {
|
||
return this;
|
||
}
|
||
const self2 = (
|
||
/** @type {Processor} */
|
||
/** @type {unknown} */
|
||
this
|
||
);
|
||
while (++this.freezeIndex < this.attachers.length) {
|
||
const [attacher, ...options2] = this.attachers[this.freezeIndex];
|
||
if (options2[0] === false) {
|
||
continue;
|
||
}
|
||
if (options2[0] === true) {
|
||
options2[0] = void 0;
|
||
}
|
||
const transformer = attacher.call(self2, ...options2);
|
||
if (typeof transformer === "function") {
|
||
this.transformers.use(transformer);
|
||
}
|
||
}
|
||
this.frozen = true;
|
||
this.freezeIndex = Number.POSITIVE_INFINITY;
|
||
return this;
|
||
}
|
||
/**
|
||
* Parse text to a syntax tree.
|
||
*
|
||
* > **Note**: `parse` freezes the processor if not already *frozen*.
|
||
*
|
||
* > **Note**: `parse` performs the parse phase, not the run phase or other
|
||
* > phases.
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* file to parse (optional); typically `string` or `VFile`; any value
|
||
* accepted as `x` in `new VFile(x)`.
|
||
* @returns {ParseTree extends undefined ? Node : ParseTree}
|
||
* Syntax tree representing `file`.
|
||
*/
|
||
parse(file) {
|
||
this.freeze();
|
||
const realFile = vfile(file);
|
||
const parser = this.parser || this.Parser;
|
||
assertParser("parse", parser);
|
||
return parser(String(realFile), realFile);
|
||
}
|
||
/**
|
||
* Process the given file as configured on the processor.
|
||
*
|
||
* > **Note**: `process` freezes the processor if not already *frozen*.
|
||
*
|
||
* > **Note**: `process` performs the parse, run, and stringify phases.
|
||
*
|
||
* @overload
|
||
* @param {Compatible | undefined} file
|
||
* @param {ProcessCallback<VFileWithOutput<CompileResult>>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {Compatible | undefined} [file]
|
||
* @returns {Promise<VFileWithOutput<CompileResult>>}
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* File (optional); typically `string` or `VFile`]; any value accepted as
|
||
* `x` in `new VFile(x)`.
|
||
* @param {ProcessCallback<VFileWithOutput<CompileResult>> | undefined} [done]
|
||
* Callback (optional).
|
||
* @returns {Promise<VFile> | undefined}
|
||
* Nothing if `done` is given.
|
||
* Otherwise a promise, rejected with a fatal error or resolved with the
|
||
* processed file.
|
||
*
|
||
* The parsed, transformed, and compiled value is available at
|
||
* `file.value` (see note).
|
||
*
|
||
* > **Note**: unified typically compiles by serializing: most
|
||
* > compilers return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@linkcode CompileResultMap}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
process(file, done) {
|
||
const self2 = this;
|
||
this.freeze();
|
||
assertParser("process", this.parser || this.Parser);
|
||
assertCompiler("process", this.compiler || this.Compiler);
|
||
return done ? executor(void 0, done) : new Promise(executor);
|
||
function executor(resolve, reject) {
|
||
const realFile = vfile(file);
|
||
const parseTree = (
|
||
/** @type {HeadTree extends undefined ? Node : HeadTree} */
|
||
/** @type {unknown} */
|
||
self2.parse(realFile)
|
||
);
|
||
self2.run(parseTree, realFile, function(error, tree, file2) {
|
||
if (error || !tree || !file2) {
|
||
return realDone(error);
|
||
}
|
||
const compileTree = (
|
||
/** @type {CompileTree extends undefined ? Node : CompileTree} */
|
||
/** @type {unknown} */
|
||
tree
|
||
);
|
||
const compileResult = self2.stringify(compileTree, file2);
|
||
if (looksLikeAValue(compileResult)) {
|
||
file2.value = compileResult;
|
||
} else {
|
||
file2.result = compileResult;
|
||
}
|
||
realDone(
|
||
error,
|
||
/** @type {VFileWithOutput<CompileResult>} */
|
||
file2
|
||
);
|
||
});
|
||
function realDone(error, file2) {
|
||
if (error || !file2) {
|
||
reject(error);
|
||
} else if (resolve) {
|
||
resolve(file2);
|
||
} else {
|
||
ok2(done, "`done` is defined if `resolve` is not");
|
||
done(void 0, file2);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Process the given file as configured on the processor.
|
||
*
|
||
* An error is thrown if asynchronous transforms are configured.
|
||
*
|
||
* > **Note**: `processSync` freezes the processor if not already *frozen*.
|
||
*
|
||
* > **Note**: `processSync` performs the parse, run, and stringify phases.
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* File (optional); typically `string` or `VFile`; any value accepted as
|
||
* `x` in `new VFile(x)`.
|
||
* @returns {VFileWithOutput<CompileResult>}
|
||
* The processed file.
|
||
*
|
||
* The parsed, transformed, and compiled value is available at
|
||
* `file.value` (see note).
|
||
*
|
||
* > **Note**: unified typically compiles by serializing: most
|
||
* > compilers return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@linkcode CompileResultMap}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
processSync(file) {
|
||
let complete = false;
|
||
let result;
|
||
this.freeze();
|
||
assertParser("processSync", this.parser || this.Parser);
|
||
assertCompiler("processSync", this.compiler || this.Compiler);
|
||
this.process(file, realDone);
|
||
assertDone("processSync", "process", complete);
|
||
ok2(result, "we either bailed on an error or have a tree");
|
||
return result;
|
||
function realDone(error, file2) {
|
||
complete = true;
|
||
bail(error);
|
||
result = file2;
|
||
}
|
||
}
|
||
/**
|
||
* Run *transformers* on a syntax tree.
|
||
*
|
||
* > **Note**: `run` freezes the processor if not already *frozen*.
|
||
*
|
||
* > **Note**: `run` performs the run phase, not other phases.
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {Compatible | undefined} file
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {Compatible | undefined} [file]
|
||
* @returns {Promise<TailTree extends undefined ? Node : TailTree>}
|
||
*
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* Tree to transform and inspect.
|
||
* @param {(
|
||
* RunCallback<TailTree extends undefined ? Node : TailTree> |
|
||
* Compatible
|
||
* )} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} [done]
|
||
* Callback (optional).
|
||
* @returns {Promise<TailTree extends undefined ? Node : TailTree> | undefined}
|
||
* Nothing if `done` is given.
|
||
* Otherwise, a promise rejected with a fatal error or resolved with the
|
||
* transformed tree.
|
||
*/
|
||
run(tree, file, done) {
|
||
assertNode(tree);
|
||
this.freeze();
|
||
const transformers = this.transformers;
|
||
if (!done && typeof file === "function") {
|
||
done = file;
|
||
file = void 0;
|
||
}
|
||
return done ? executor(void 0, done) : new Promise(executor);
|
||
function executor(resolve, reject) {
|
||
ok2(
|
||
typeof file !== "function",
|
||
"`file` can\u2019t be a `done` anymore, we checked"
|
||
);
|
||
const realFile = vfile(file);
|
||
transformers.run(tree, realFile, realDone);
|
||
function realDone(error, outputTree, file2) {
|
||
const resultingTree = (
|
||
/** @type {TailTree extends undefined ? Node : TailTree} */
|
||
outputTree || tree
|
||
);
|
||
if (error) {
|
||
reject(error);
|
||
} else if (resolve) {
|
||
resolve(resultingTree);
|
||
} else {
|
||
ok2(done, "`done` is defined if `resolve` is not");
|
||
done(void 0, resultingTree, file2);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Run *transformers* on a syntax tree.
|
||
*
|
||
* An error is thrown if asynchronous transforms are configured.
|
||
*
|
||
* > **Note**: `runSync` freezes the processor if not already *frozen*.
|
||
*
|
||
* > **Note**: `runSync` performs the run phase, not other phases.
|
||
*
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* Tree to transform and inspect.
|
||
* @param {Compatible | undefined} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @returns {TailTree extends undefined ? Node : TailTree}
|
||
* Transformed tree.
|
||
*/
|
||
runSync(tree, file) {
|
||
let complete = false;
|
||
let result;
|
||
this.run(tree, file, realDone);
|
||
assertDone("runSync", "run", complete);
|
||
ok2(result, "we either bailed on an error or have a tree");
|
||
return result;
|
||
function realDone(error, tree2) {
|
||
bail(error);
|
||
result = tree2;
|
||
complete = true;
|
||
}
|
||
}
|
||
/**
|
||
* Compile a syntax tree.
|
||
*
|
||
* > **Note**: `stringify` freezes the processor if not already *frozen*.
|
||
*
|
||
* > **Note**: `stringify` performs the stringify phase, not the run phase
|
||
* > or other phases.
|
||
*
|
||
* @param {CompileTree extends undefined ? Node : CompileTree} tree
|
||
* Tree to compile.
|
||
* @param {Compatible | undefined} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @returns {CompileResult extends undefined ? Value : CompileResult}
|
||
* Textual representation of the tree (see note).
|
||
*
|
||
* > **Note**: unified typically compiles by serializing: most compilers
|
||
* > return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@linkcode CompileResultMap}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
stringify(tree, file) {
|
||
this.freeze();
|
||
const realFile = vfile(file);
|
||
const compiler2 = this.compiler || this.Compiler;
|
||
assertCompiler("stringify", compiler2);
|
||
assertNode(tree);
|
||
return compiler2(tree, realFile);
|
||
}
|
||
/**
|
||
* Configure the processor to use a plugin, a list of usable values, or a
|
||
* preset.
|
||
*
|
||
* If the processor is already using a plugin, the previous plugin
|
||
* configuration is changed based on the options that are passed in.
|
||
* In other words, the plugin is not added a second time.
|
||
*
|
||
* > **Note**: `use` cannot be called on *frozen* processors.
|
||
* > Call the processor first to create a new unfrozen processor.
|
||
*
|
||
* @example
|
||
* There are many ways to pass plugins to `.use()`.
|
||
* This example gives an overview:
|
||
*
|
||
* ```js
|
||
* import {unified} from 'unified'
|
||
*
|
||
* unified()
|
||
* // Plugin with options:
|
||
* .use(pluginA, {x: true, y: true})
|
||
* // Passing the same plugin again merges configuration (to `{x: true, y: false, z: true}`):
|
||
* .use(pluginA, {y: false, z: true})
|
||
* // Plugins:
|
||
* .use([pluginB, pluginC])
|
||
* // Two plugins, the second with options:
|
||
* .use([pluginD, [pluginE, {}]])
|
||
* // Preset with plugins and settings:
|
||
* .use({plugins: [pluginF, [pluginG, {}]], settings: {position: false}})
|
||
* // Settings only:
|
||
* .use({settings: {position: false}})
|
||
* ```
|
||
*
|
||
* @template {Array<unknown>} [Parameters=[]]
|
||
* @template {Node | string | undefined} [Input=undefined]
|
||
* @template [Output=Input]
|
||
*
|
||
* @overload
|
||
* @param {Preset | null | undefined} [preset]
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {PluggableList} list
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {Plugin<Parameters, Input, Output>} plugin
|
||
* @param {...(Parameters | [boolean])} parameters
|
||
* @returns {UsePlugin<ParseTree, HeadTree, TailTree, CompileTree, CompileResult, Input, Output>}
|
||
*
|
||
* @param {PluggableList | Plugin | Preset | null | undefined} value
|
||
* Usable value.
|
||
* @param {...unknown} parameters
|
||
* Parameters, when a plugin is given as a usable value.
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* Current processor.
|
||
*/
|
||
use(value, ...parameters) {
|
||
const attachers = this.attachers;
|
||
const namespace = this.namespace;
|
||
assertUnfrozen("use", this.frozen);
|
||
if (value === null || value === void 0) {
|
||
} else if (typeof value === "function") {
|
||
addPlugin(value, parameters);
|
||
} else if (typeof value === "object") {
|
||
if (Array.isArray(value)) {
|
||
addList(value);
|
||
} else {
|
||
addPreset(value);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected usable value, not `" + value + "`");
|
||
}
|
||
return this;
|
||
function add(value2) {
|
||
if (typeof value2 === "function") {
|
||
addPlugin(value2, []);
|
||
} else if (typeof value2 === "object") {
|
||
if (Array.isArray(value2)) {
|
||
const [plugin, ...parameters2] = (
|
||
/** @type {PluginTuple<Array<unknown>>} */
|
||
value2
|
||
);
|
||
addPlugin(plugin, parameters2);
|
||
} else {
|
||
addPreset(value2);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected usable value, not `" + value2 + "`");
|
||
}
|
||
}
|
||
function addPreset(result) {
|
||
if (!("plugins" in result) && !("settings" in result)) {
|
||
throw new Error(
|
||
"Expected usable value but received an empty preset, which is probably a mistake: presets typically come with `plugins` and sometimes with `settings`, but this has neither"
|
||
);
|
||
}
|
||
addList(result.plugins);
|
||
if (result.settings) {
|
||
namespace.settings = (0, import_extend.default)(true, namespace.settings, result.settings);
|
||
}
|
||
}
|
||
function addList(plugins) {
|
||
let index2 = -1;
|
||
if (plugins === null || plugins === void 0) {
|
||
} else if (Array.isArray(plugins)) {
|
||
while (++index2 < plugins.length) {
|
||
const thing = plugins[index2];
|
||
add(thing);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected a list of plugins, not `" + plugins + "`");
|
||
}
|
||
}
|
||
function addPlugin(plugin, parameters2) {
|
||
let index2 = -1;
|
||
let entryIndex = -1;
|
||
while (++index2 < attachers.length) {
|
||
if (attachers[index2][0] === plugin) {
|
||
entryIndex = index2;
|
||
break;
|
||
}
|
||
}
|
||
if (entryIndex === -1) {
|
||
attachers.push([plugin, ...parameters2]);
|
||
} else if (parameters2.length > 0) {
|
||
let [primary, ...rest] = parameters2;
|
||
const currentPrimary = attachers[entryIndex][1];
|
||
if (isPlainObject(currentPrimary) && isPlainObject(primary)) {
|
||
primary = (0, import_extend.default)(true, currentPrimary, primary);
|
||
}
|
||
attachers[entryIndex] = [plugin, primary, ...rest];
|
||
}
|
||
}
|
||
}
|
||
};
|
||
var unified = new Processor().freeze();
|
||
function assertParser(name, value) {
|
||
if (typeof value !== "function") {
|
||
throw new TypeError("Cannot `" + name + "` without `parser`");
|
||
}
|
||
}
|
||
function assertCompiler(name, value) {
|
||
if (typeof value !== "function") {
|
||
throw new TypeError("Cannot `" + name + "` without `compiler`");
|
||
}
|
||
}
|
||
function assertUnfrozen(name, frozen) {
|
||
if (frozen) {
|
||
throw new Error(
|
||
"Cannot call `" + name + "` on a frozen processor.\nCreate a new processor first, by calling it: use `processor()` instead of `processor`."
|
||
);
|
||
}
|
||
}
|
||
function assertNode(node2) {
|
||
if (!isPlainObject(node2) || typeof node2.type !== "string") {
|
||
throw new TypeError("Expected node, got `" + node2 + "`");
|
||
}
|
||
}
|
||
function assertDone(name, asyncName, complete) {
|
||
if (!complete) {
|
||
throw new Error(
|
||
"`" + name + "` finished async. Use `" + asyncName + "` instead"
|
||
);
|
||
}
|
||
}
|
||
function vfile(value) {
|
||
return looksLikeAVFile(value) ? value : new VFile(value);
|
||
}
|
||
function looksLikeAVFile(value) {
|
||
return Boolean(
|
||
value && typeof value === "object" && "message" in value && "messages" in value
|
||
);
|
||
}
|
||
function looksLikeAValue(value) {
|
||
return typeof value === "string" || isUint8Array2(value);
|
||
}
|
||
function isUint8Array2(value) {
|
||
return Boolean(
|
||
value && typeof value === "object" && "byteLength" in value && "byteOffset" in value
|
||
);
|
||
}
|
||
|
||
// node_modules/remark/index.js
|
||
var remark = unified().use(remarkParse).use(remarkStringify).freeze();
|
||
|
||
// node_modules/github-slugger/regex.js
|
||
var regex = /[\0-\x1F!-,\.\/:-@\[-\^`\{-\xA9\xAB-\xB4\xB6-\xB9\xBB-\xBF\xD7\xF7\u02C2-\u02C5\u02D2-\u02DF\u02E5-\u02EB\u02ED\u02EF-\u02FF\u0375\u0378\u0379\u037E\u0380-\u0385\u0387\u038B\u038D\u03A2\u03F6\u0482\u0530\u0557\u0558\u055A-\u055F\u0589-\u0590\u05BE\u05C0\u05C3\u05C6\u05C8-\u05CF\u05EB-\u05EE\u05F3-\u060F\u061B-\u061F\u066A-\u066D\u06D4\u06DD\u06DE\u06E9\u06FD\u06FE\u0700-\u070F\u074B\u074C\u07B2-\u07BF\u07F6-\u07F9\u07FB\u07FC\u07FE\u07FF\u082E-\u083F\u085C-\u085F\u086B-\u089F\u08B5\u08C8-\u08D2\u08E2\u0964\u0965\u0970\u0984\u098D\u098E\u0991\u0992\u09A9\u09B1\u09B3-\u09B5\u09BA\u09BB\u09C5\u09C6\u09C9\u09CA\u09CF-\u09D6\u09D8-\u09DB\u09DE\u09E4\u09E5\u09F2-\u09FB\u09FD\u09FF\u0A00\u0A04\u0A0B-\u0A0E\u0A11\u0A12\u0A29\u0A31\u0A34\u0A37\u0A3A\u0A3B\u0A3D\u0A43-\u0A46\u0A49\u0A4A\u0A4E-\u0A50\u0A52-\u0A58\u0A5D\u0A5F-\u0A65\u0A76-\u0A80\u0A84\u0A8E\u0A92\u0AA9\u0AB1\u0AB4\u0ABA\u0ABB\u0AC6\u0ACA\u0ACE\u0ACF\u0AD1-\u0ADF\u0AE4\u0AE5\u0AF0-\u0AF8\u0B00\u0B04\u0B0D\u0B0E\u0B11\u0B12\u0B29\u0B31\u0B34\u0B3A\u0B3B\u0B45\u0B46\u0B49\u0B4A\u0B4E-\u0B54\u0B58-\u0B5B\u0B5E\u0B64\u0B65\u0B70\u0B72-\u0B81\u0B84\u0B8B-\u0B8D\u0B91\u0B96-\u0B98\u0B9B\u0B9D\u0BA0-\u0BA2\u0BA5-\u0BA7\u0BAB-\u0BAD\u0BBA-\u0BBD\u0BC3-\u0BC5\u0BC9\u0BCE\u0BCF\u0BD1-\u0BD6\u0BD8-\u0BE5\u0BF0-\u0BFF\u0C0D\u0C11\u0C29\u0C3A-\u0C3C\u0C45\u0C49\u0C4E-\u0C54\u0C57\u0C5B-\u0C5F\u0C64\u0C65\u0C70-\u0C7F\u0C84\u0C8D\u0C91\u0CA9\u0CB4\u0CBA\u0CBB\u0CC5\u0CC9\u0CCE-\u0CD4\u0CD7-\u0CDD\u0CDF\u0CE4\u0CE5\u0CF0\u0CF3-\u0CFF\u0D0D\u0D11\u0D45\u0D49\u0D4F-\u0D53\u0D58-\u0D5E\u0D64\u0D65\u0D70-\u0D79\u0D80\u0D84\u0D97-\u0D99\u0DB2\u0DBC\u0DBE\u0DBF\u0DC7-\u0DC9\u0DCB-\u0DCE\u0DD5\u0DD7\u0DE0-\u0DE5\u0DF0\u0DF1\u0DF4-\u0E00\u0E3B-\u0E3F\u0E4F\u0E5A-\u0E80\u0E83\u0E85\u0E8B\u0EA4\u0EA6\u0EBE\u0EBF\u0EC5\u0EC7\u0ECE\u0ECF\u0EDA\u0EDB\u0EE0-\u0EFF\u0F01-\u0F17\u0F1A-\u0F1F\u0F2A-\u0F34\u0F36\u0F38\u0F3A-\u0F3D\u0F48\u0F6D-\u0F70\u0F85\u0F98\u0FBD-\u0FC5\u0FC7-\u0FFF\u104A-\u104F\u109E\u109F\u10C6\u10C8-\u10CC\u10CE\u10CF\u10FB\u1249\u124E\u124F\u1257\u1259\u125E\u125F\u1289\u128E\u128F\u12B1\u12B6\u12B7\u12BF\u12C1\u12C6\u12C7\u12D7\u1311\u1316\u1317\u135B\u135C\u1360-\u137F\u1390-\u139F\u13F6\u13F7\u13FE-\u1400\u166D\u166E\u1680\u169B-\u169F\u16EB-\u16ED\u16F9-\u16FF\u170D\u1715-\u171F\u1735-\u173F\u1754-\u175F\u176D\u1771\u1774-\u177F\u17D4-\u17D6\u17D8-\u17DB\u17DE\u17DF\u17EA-\u180A\u180E\u180F\u181A-\u181F\u1879-\u187F\u18AB-\u18AF\u18F6-\u18FF\u191F\u192C-\u192F\u193C-\u1945\u196E\u196F\u1975-\u197F\u19AC-\u19AF\u19CA-\u19CF\u19DA-\u19FF\u1A1C-\u1A1F\u1A5F\u1A7D\u1A7E\u1A8A-\u1A8F\u1A9A-\u1AA6\u1AA8-\u1AAF\u1AC1-\u1AFF\u1B4C-\u1B4F\u1B5A-\u1B6A\u1B74-\u1B7F\u1BF4-\u1BFF\u1C38-\u1C3F\u1C4A-\u1C4C\u1C7E\u1C7F\u1C89-\u1C8F\u1CBB\u1CBC\u1CC0-\u1CCF\u1CD3\u1CFB-\u1CFF\u1DFA\u1F16\u1F17\u1F1E\u1F1F\u1F46\u1F47\u1F4E\u1F4F\u1F58\u1F5A\u1F5C\u1F5E\u1F7E\u1F7F\u1FB5\u1FBD\u1FBF-\u1FC1\u1FC5\u1FCD-\u1FCF\u1FD4\u1FD5\u1FDC-\u1FDF\u1FED-\u1FF1\u1FF5\u1FFD-\u203E\u2041-\u2053\u2055-\u2070\u2072-\u207E\u2080-\u208F\u209D-\u20CF\u20F1-\u2101\u2103-\u2106\u2108\u2109\u2114\u2116-\u2118\u211E-\u2123\u2125\u2127\u2129\u212E\u213A\u213B\u2140-\u2144\u214A-\u214D\u214F-\u215F\u2189-\u24B5\u24EA-\u2BFF\u2C2F\u2C5F\u2CE5-\u2CEA\u2CF4-\u2CFF\u2D26\u2D28-\u2D2C\u2D2E\u2D2F\u2D68-\u2D6E\u2D70-\u2D7E\u2D97-\u2D9F\u2DA7\u2DAF\u2DB7\u2DBF\u2DC7\u2DCF\u2DD7\u2DDF\u2E00-\u2E2E\u2E30-\u3004\u3008-\u3020\u3030\u3036\u3037\u303D-\u3040\u3097\u3098\u309B\u309C\u30A0\u30FB\u3100-\u3104\u3130\u318F-\u319F\u31C0-\u31EF\u3200-\u33FF\u4DC0-\u4DFF\u9FFD-\u9FFF\uA48D-\uA4CF\uA4FE\uA4FF\uA60D-\uA60F\uA62C-\uA63F\uA673\uA67E\uA6F2-\uA716\uA720\uA721\uA789\uA78A\uA7C0\uA7C1\uA7CB-\uA7F4\uA828-\uA82B\uA82D-\uA83F\uA874-\uA87F\uA8C6-\uA8CF\uA8DA-\uA8DF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA954-\uA95F\uA97D-\uA97F\uA9C1-\uA9CE\uA9DA-\uA9DF\uA9FF\uAA37-\uAA3F\uAA4E\uAA4F\uAA5A-\uAA5F\uAA77-\uAA79\uAAC3-\uAADA\uAADE\uAADF\uAAF0\uAAF1\uAAF7-\uAB00\uAB07\uAB08\uAB0F\uAB10\uAB17-\uAB1F\uAB27\uAB2F\uAB5B\uAB6A-\uAB6F\uABEB\uABEE\uABEF\uABFA-\uABFF\uD7A4-\uD7AF\uD7C7-\uD7CA\uD7FC-\uD7FF\uE000-\uF8FF\uFA6E\uFA6F\uFADA-\uFAFF\uFB07-\uFB12\uFB18-\uFB1C\uFB29\uFB37\uFB3D\uFB3F\uFB42\uFB45\uFBB2-\uFBD2\uFD3E-\uFD4F\uFD90\uFD91\uFDC8-\uFDEF\uFDFC-\uFDFF\uFE10-\uFE1F\uFE30-\uFE32\uFE35-\uFE4C\uFE50-\uFE6F\uFE75\uFEFD-\uFF0F\uFF1A-\uFF20\uFF3B-\uFF3E\uFF40\uFF5B-\uFF65\uFFBF-\uFFC1\uFFC8\uFFC9\uFFD0\uFFD1\uFFD8\uFFD9\uFFDD-\uFFFF]|\uD800[\uDC0C\uDC27\uDC3B\uDC3E\uDC4E\uDC4F\uDC5E-\uDC7F\uDCFB-\uDD3F\uDD75-\uDDFC\uDDFE-\uDE7F\uDE9D-\uDE9F\uDED1-\uDEDF\uDEE1-\uDEFF\uDF20-\uDF2C\uDF4B-\uDF4F\uDF7B-\uDF7F\uDF9E\uDF9F\uDFC4-\uDFC7\uDFD0\uDFD6-\uDFFF]|\uD801[\uDC9E\uDC9F\uDCAA-\uDCAF\uDCD4-\uDCD7\uDCFC-\uDCFF\uDD28-\uDD2F\uDD64-\uDDFF\uDF37-\uDF3F\uDF56-\uDF5F\uDF68-\uDFFF]|\uD802[\uDC06\uDC07\uDC09\uDC36\uDC39-\uDC3B\uDC3D\uDC3E\uDC56-\uDC5F\uDC77-\uDC7F\uDC9F-\uDCDF\uDCF3\uDCF6-\uDCFF\uDD16-\uDD1F\uDD3A-\uDD7F\uDDB8-\uDDBD\uDDC0-\uDDFF\uDE04\uDE07-\uDE0B\uDE14\uDE18\uDE36\uDE37\uDE3B-\uDE3E\uDE40-\uDE5F\uDE7D-\uDE7F\uDE9D-\uDEBF\uDEC8\uDEE7-\uDEFF\uDF36-\uDF3F\uDF56-\uDF5F\uDF73-\uDF7F\uDF92-\uDFFF]|\uD803[\uDC49-\uDC7F\uDCB3-\uDCBF\uDCF3-\uDCFF\uDD28-\uDD2F\uDD3A-\uDE7F\uDEAA\uDEAD-\uDEAF\uDEB2-\uDEFF\uDF1D-\uDF26\uDF28-\uDF2F\uDF51-\uDFAF\uDFC5-\uDFDF\uDFF7-\uDFFF]|\uD804[\uDC47-\uDC65\uDC70-\uDC7E\uDCBB-\uDCCF\uDCE9-\uDCEF\uDCFA-\uDCFF\uDD35\uDD40-\uDD43\uDD48-\uDD4F\uDD74\uDD75\uDD77-\uDD7F\uDDC5-\uDDC8\uDDCD\uDDDB\uDDDD-\uDDFF\uDE12\uDE38-\uDE3D\uDE3F-\uDE7F\uDE87\uDE89\uDE8E\uDE9E\uDEA9-\uDEAF\uDEEB-\uDEEF\uDEFA-\uDEFF\uDF04\uDF0D\uDF0E\uDF11\uDF12\uDF29\uDF31\uDF34\uDF3A\uDF45\uDF46\uDF49\uDF4A\uDF4E\uDF4F\uDF51-\uDF56\uDF58-\uDF5C\uDF64\uDF65\uDF6D-\uDF6F\uDF75-\uDFFF]|\uD805[\uDC4B-\uDC4F\uDC5A-\uDC5D\uDC62-\uDC7F\uDCC6\uDCC8-\uDCCF\uDCDA-\uDD7F\uDDB6\uDDB7\uDDC1-\uDDD7\uDDDE-\uDDFF\uDE41-\uDE43\uDE45-\uDE4F\uDE5A-\uDE7F\uDEB9-\uDEBF\uDECA-\uDEFF\uDF1B\uDF1C\uDF2C-\uDF2F\uDF3A-\uDFFF]|\uD806[\uDC3B-\uDC9F\uDCEA-\uDCFE\uDD07\uDD08\uDD0A\uDD0B\uDD14\uDD17\uDD36\uDD39\uDD3A\uDD44-\uDD4F\uDD5A-\uDD9F\uDDA8\uDDA9\uDDD8\uDDD9\uDDE2\uDDE5-\uDDFF\uDE3F-\uDE46\uDE48-\uDE4F\uDE9A-\uDE9C\uDE9E-\uDEBF\uDEF9-\uDFFF]|\uD807[\uDC09\uDC37\uDC41-\uDC4F\uDC5A-\uDC71\uDC90\uDC91\uDCA8\uDCB7-\uDCFF\uDD07\uDD0A\uDD37-\uDD39\uDD3B\uDD3E\uDD48-\uDD4F\uDD5A-\uDD5F\uDD66\uDD69\uDD8F\uDD92\uDD99-\uDD9F\uDDAA-\uDEDF\uDEF7-\uDFAF\uDFB1-\uDFFF]|\uD808[\uDF9A-\uDFFF]|\uD809[\uDC6F-\uDC7F\uDD44-\uDFFF]|[\uD80A\uD80B\uD80E-\uD810\uD812-\uD819\uD824-\uD82B\uD82D\uD82E\uD830-\uD833\uD837\uD839\uD83D\uD83F\uD87B-\uD87D\uD87F\uD885-\uDB3F\uDB41-\uDBFF][\uDC00-\uDFFF]|\uD80D[\uDC2F-\uDFFF]|\uD811[\uDE47-\uDFFF]|\uD81A[\uDE39-\uDE3F\uDE5F\uDE6A-\uDECF\uDEEE\uDEEF\uDEF5-\uDEFF\uDF37-\uDF3F\uDF44-\uDF4F\uDF5A-\uDF62\uDF78-\uDF7C\uDF90-\uDFFF]|\uD81B[\uDC00-\uDE3F\uDE80-\uDEFF\uDF4B-\uDF4E\uDF88-\uDF8E\uDFA0-\uDFDF\uDFE2\uDFE5-\uDFEF\uDFF2-\uDFFF]|\uD821[\uDFF8-\uDFFF]|\uD823[\uDCD6-\uDCFF\uDD09-\uDFFF]|\uD82C[\uDD1F-\uDD4F\uDD53-\uDD63\uDD68-\uDD6F\uDEFC-\uDFFF]|\uD82F[\uDC6B-\uDC6F\uDC7D-\uDC7F\uDC89-\uDC8F\uDC9A-\uDC9C\uDC9F-\uDFFF]|\uD834[\uDC00-\uDD64\uDD6A-\uDD6C\uDD73-\uDD7A\uDD83\uDD84\uDD8C-\uDDA9\uDDAE-\uDE41\uDE45-\uDFFF]|\uD835[\uDC55\uDC9D\uDCA0\uDCA1\uDCA3\uDCA4\uDCA7\uDCA8\uDCAD\uDCBA\uDCBC\uDCC4\uDD06\uDD0B\uDD0C\uDD15\uDD1D\uDD3A\uDD3F\uDD45\uDD47-\uDD49\uDD51\uDEA6\uDEA7\uDEC1\uDEDB\uDEFB\uDF15\uDF35\uDF4F\uDF6F\uDF89\uDFA9\uDFC3\uDFCC\uDFCD]|\uD836[\uDC00-\uDDFF\uDE37-\uDE3A\uDE6D-\uDE74\uDE76-\uDE83\uDE85-\uDE9A\uDEA0\uDEB0-\uDFFF]|\uD838[\uDC07\uDC19\uDC1A\uDC22\uDC25\uDC2B-\uDCFF\uDD2D-\uDD2F\uDD3E\uDD3F\uDD4A-\uDD4D\uDD4F-\uDEBF\uDEFA-\uDFFF]|\uD83A[\uDCC5-\uDCCF\uDCD7-\uDCFF\uDD4C-\uDD4F\uDD5A-\uDFFF]|\uD83B[\uDC00-\uDDFF\uDE04\uDE20\uDE23\uDE25\uDE26\uDE28\uDE33\uDE38\uDE3A\uDE3C-\uDE41\uDE43-\uDE46\uDE48\uDE4A\uDE4C\uDE50\uDE53\uDE55\uDE56\uDE58\uDE5A\uDE5C\uDE5E\uDE60\uDE63\uDE65\uDE66\uDE6B\uDE73\uDE78\uDE7D\uDE7F\uDE8A\uDE9C-\uDEA0\uDEA4\uDEAA\uDEBC-\uDFFF]|\uD83C[\uDC00-\uDD2F\uDD4A-\uDD4F\uDD6A-\uDD6F\uDD8A-\uDFFF]|\uD83E[\uDC00-\uDFEF\uDFFA-\uDFFF]|\uD869[\uDEDE-\uDEFF]|\uD86D[\uDF35-\uDF3F]|\uD86E[\uDC1E\uDC1F]|\uD873[\uDEA2-\uDEAF]|\uD87A[\uDFE1-\uDFFF]|\uD87E[\uDE1E-\uDFFF]|\uD884[\uDF4B-\uDFFF]|\uDB40[\uDC00-\uDCFF\uDDF0-\uDFFF]/g;
|
||
|
||
// node_modules/github-slugger/index.js
|
||
var own5 = Object.hasOwnProperty;
|
||
var BananaSlug = class {
|
||
/**
|
||
* Create a new slug class.
|
||
*/
|
||
constructor() {
|
||
this.occurrences;
|
||
this.reset();
|
||
}
|
||
/**
|
||
* Generate a unique slug.
|
||
*
|
||
* Tracks previously generated slugs: repeated calls with the same value
|
||
* will result in different slugs.
|
||
* Use the `slug` function to get same slugs.
|
||
*
|
||
* @param {string} value
|
||
* String of text to slugify
|
||
* @param {boolean} [maintainCase=false]
|
||
* Keep the current case, otherwise make all lowercase
|
||
* @return {string}
|
||
* A unique slug string
|
||
*/
|
||
slug(value, maintainCase) {
|
||
const self2 = this;
|
||
let result = slug(value, maintainCase === true);
|
||
const originalSlug = result;
|
||
while (own5.call(self2.occurrences, result)) {
|
||
self2.occurrences[originalSlug]++;
|
||
result = originalSlug + "-" + self2.occurrences[originalSlug];
|
||
}
|
||
self2.occurrences[result] = 0;
|
||
return result;
|
||
}
|
||
/**
|
||
* Reset - Forget all previous slugs
|
||
*
|
||
* @return void
|
||
*/
|
||
reset() {
|
||
this.occurrences = /* @__PURE__ */ Object.create(null);
|
||
}
|
||
};
|
||
function slug(value, maintainCase) {
|
||
if (typeof value !== "string") return "";
|
||
if (!maintainCase) value = value.toLowerCase();
|
||
return value.replace(regex, "").replace(/ /g, "-");
|
||
}
|
||
|
||
// node_modules/mdast-util-toc/lib/to-expression.js
|
||
function toExpression(value) {
|
||
return new RegExp("^(" + value + ")$", "i");
|
||
}
|
||
|
||
// node_modules/mdast-util-toc/lib/search.js
|
||
var slugs = new BananaSlug();
|
||
function search2(root2, expression, settings) {
|
||
const max = "children" in root2 ? root2.children.length : 0;
|
||
const skip = settings.skip ? toExpression(settings.skip) : void 0;
|
||
const parents = convert(
|
||
settings.parents || function(d) {
|
||
return d === root2;
|
||
}
|
||
);
|
||
const map4 = [];
|
||
let index2;
|
||
let endIndex;
|
||
let opening;
|
||
slugs.reset();
|
||
visit(root2, "heading", function(node2, position2, parent) {
|
||
const value = toString(node2, { includeImageAlt: false });
|
||
const id = node2.data && node2.data.hProperties && node2.data.hProperties.id;
|
||
const slug2 = slugs.slug(id || value);
|
||
if (!parents(parent)) {
|
||
return;
|
||
}
|
||
if (position2 !== void 0 && expression && !index2 && expression.test(value)) {
|
||
index2 = position2 + 1;
|
||
opening = node2;
|
||
return;
|
||
}
|
||
if (position2 !== void 0 && opening && !endIndex && node2.depth <= opening.depth) {
|
||
endIndex = position2;
|
||
}
|
||
if ((endIndex || !expression) && (!settings.minDepth || node2.depth >= settings.minDepth) && (!settings.maxDepth || node2.depth <= settings.maxDepth) && (!skip || !skip.test(value))) {
|
||
map4.push({ depth: node2.depth, children: node2.children, id: slug2 });
|
||
}
|
||
});
|
||
return {
|
||
index: index2 === void 0 ? -1 : index2,
|
||
endIndex: index2 === void 0 ? -1 : endIndex || max,
|
||
map: map4
|
||
};
|
||
}
|
||
|
||
// node_modules/@ungap/structured-clone/esm/types.js
|
||
var VOID = -1;
|
||
var PRIMITIVE = 0;
|
||
var ARRAY = 1;
|
||
var OBJECT = 2;
|
||
var DATE = 3;
|
||
var REGEXP = 4;
|
||
var MAP = 5;
|
||
var SET = 6;
|
||
var ERROR = 7;
|
||
var BIGINT = 8;
|
||
|
||
// node_modules/@ungap/structured-clone/esm/deserialize.js
|
||
var env = typeof self === "object" ? self : globalThis;
|
||
var deserializer = ($, _) => {
|
||
const as = (out, index2) => {
|
||
$.set(index2, out);
|
||
return out;
|
||
};
|
||
const unpair = (index2) => {
|
||
if ($.has(index2))
|
||
return $.get(index2);
|
||
const [type, value] = _[index2];
|
||
switch (type) {
|
||
case PRIMITIVE:
|
||
case VOID:
|
||
return as(value, index2);
|
||
case ARRAY: {
|
||
const arr = as([], index2);
|
||
for (const index3 of value)
|
||
arr.push(unpair(index3));
|
||
return arr;
|
||
}
|
||
case OBJECT: {
|
||
const object = as({}, index2);
|
||
for (const [key, index3] of value)
|
||
object[unpair(key)] = unpair(index3);
|
||
return object;
|
||
}
|
||
case DATE:
|
||
return as(new Date(value), index2);
|
||
case REGEXP: {
|
||
const { source, flags } = value;
|
||
return as(new RegExp(source, flags), index2);
|
||
}
|
||
case MAP: {
|
||
const map4 = as(/* @__PURE__ */ new Map(), index2);
|
||
for (const [key, index3] of value)
|
||
map4.set(unpair(key), unpair(index3));
|
||
return map4;
|
||
}
|
||
case SET: {
|
||
const set = as(/* @__PURE__ */ new Set(), index2);
|
||
for (const index3 of value)
|
||
set.add(unpair(index3));
|
||
return set;
|
||
}
|
||
case ERROR: {
|
||
const { name, message } = value;
|
||
return as(new env[name](message), index2);
|
||
}
|
||
case BIGINT:
|
||
return as(BigInt(value), index2);
|
||
case "BigInt":
|
||
return as(Object(BigInt(value)), index2);
|
||
case "ArrayBuffer":
|
||
return as(new Uint8Array(value).buffer, value);
|
||
case "DataView": {
|
||
const { buffer } = new Uint8Array(value);
|
||
return as(new DataView(buffer), value);
|
||
}
|
||
}
|
||
return as(new env[type](value), index2);
|
||
};
|
||
return unpair;
|
||
};
|
||
var deserialize = (serialized) => deserializer(/* @__PURE__ */ new Map(), serialized)(0);
|
||
|
||
// node_modules/@ungap/structured-clone/esm/serialize.js
|
||
var EMPTY = "";
|
||
var { toString: toString2 } = {};
|
||
var { keys } = Object;
|
||
var typeOf = (value) => {
|
||
const type = typeof value;
|
||
if (type !== "object" || !value)
|
||
return [PRIMITIVE, type];
|
||
const asString = toString2.call(value).slice(8, -1);
|
||
switch (asString) {
|
||
case "Array":
|
||
return [ARRAY, EMPTY];
|
||
case "Object":
|
||
return [OBJECT, EMPTY];
|
||
case "Date":
|
||
return [DATE, EMPTY];
|
||
case "RegExp":
|
||
return [REGEXP, EMPTY];
|
||
case "Map":
|
||
return [MAP, EMPTY];
|
||
case "Set":
|
||
return [SET, EMPTY];
|
||
case "DataView":
|
||
return [ARRAY, asString];
|
||
}
|
||
if (asString.includes("Array"))
|
||
return [ARRAY, asString];
|
||
if (asString.includes("Error"))
|
||
return [ERROR, asString];
|
||
return [OBJECT, asString];
|
||
};
|
||
var shouldSkip = ([TYPE, type]) => TYPE === PRIMITIVE && (type === "function" || type === "symbol");
|
||
var serializer = (strict, json, $, _) => {
|
||
const as = (out, value) => {
|
||
const index2 = _.push(out) - 1;
|
||
$.set(value, index2);
|
||
return index2;
|
||
};
|
||
const pair = (value) => {
|
||
if ($.has(value))
|
||
return $.get(value);
|
||
let [TYPE, type] = typeOf(value);
|
||
switch (TYPE) {
|
||
case PRIMITIVE: {
|
||
let entry = value;
|
||
switch (type) {
|
||
case "bigint":
|
||
TYPE = BIGINT;
|
||
entry = value.toString();
|
||
break;
|
||
case "function":
|
||
case "symbol":
|
||
if (strict)
|
||
throw new TypeError("unable to serialize " + type);
|
||
entry = null;
|
||
break;
|
||
case "undefined":
|
||
return as([VOID], value);
|
||
}
|
||
return as([TYPE, entry], value);
|
||
}
|
||
case ARRAY: {
|
||
if (type) {
|
||
let spread = value;
|
||
if (type === "DataView") {
|
||
spread = new Uint8Array(value.buffer);
|
||
} else if (type === "ArrayBuffer") {
|
||
spread = new Uint8Array(value);
|
||
}
|
||
return as([type, [...spread]], value);
|
||
}
|
||
const arr = [];
|
||
const index2 = as([TYPE, arr], value);
|
||
for (const entry of value)
|
||
arr.push(pair(entry));
|
||
return index2;
|
||
}
|
||
case OBJECT: {
|
||
if (type) {
|
||
switch (type) {
|
||
case "BigInt":
|
||
return as([type, value.toString()], value);
|
||
case "Boolean":
|
||
case "Number":
|
||
case "String":
|
||
return as([type, value.valueOf()], value);
|
||
}
|
||
}
|
||
if (json && "toJSON" in value)
|
||
return pair(value.toJSON());
|
||
const entries = [];
|
||
const index2 = as([TYPE, entries], value);
|
||
for (const key of keys(value)) {
|
||
if (strict || !shouldSkip(typeOf(value[key])))
|
||
entries.push([pair(key), pair(value[key])]);
|
||
}
|
||
return index2;
|
||
}
|
||
case DATE:
|
||
return as([TYPE, value.toISOString()], value);
|
||
case REGEXP: {
|
||
const { source, flags } = value;
|
||
return as([TYPE, { source, flags }], value);
|
||
}
|
||
case MAP: {
|
||
const entries = [];
|
||
const index2 = as([TYPE, entries], value);
|
||
for (const [key, entry] of value) {
|
||
if (strict || !(shouldSkip(typeOf(key)) || shouldSkip(typeOf(entry))))
|
||
entries.push([pair(key), pair(entry)]);
|
||
}
|
||
return index2;
|
||
}
|
||
case SET: {
|
||
const entries = [];
|
||
const index2 = as([TYPE, entries], value);
|
||
for (const entry of value) {
|
||
if (strict || !shouldSkip(typeOf(entry)))
|
||
entries.push(pair(entry));
|
||
}
|
||
return index2;
|
||
}
|
||
}
|
||
const { message } = value;
|
||
return as([TYPE, { name: type, message }], value);
|
||
};
|
||
return pair;
|
||
};
|
||
var serialize = (value, { json, lossy } = {}) => {
|
||
const _ = [];
|
||
return serializer(!(json || lossy), !!json, /* @__PURE__ */ new Map(), _)(value), _;
|
||
};
|
||
|
||
// node_modules/@ungap/structured-clone/esm/index.js
|
||
var esm_default = typeof structuredClone === "function" ? (
|
||
/* c8 ignore start */
|
||
(any, options2) => options2 && ("json" in options2 || "lossy" in options2) ? deserialize(serialize(any, options2)) : structuredClone(any)
|
||
) : (any, options2) => deserialize(serialize(any, options2));
|
||
|
||
// node_modules/mdast-util-toc/lib/contents.js
|
||
function contents(map4, settings) {
|
||
const { ordered = false, tight = false, prefix } = settings;
|
||
const table = { type: "list", ordered, spread: false, children: [] };
|
||
let minDepth = Number.POSITIVE_INFINITY;
|
||
let index2 = -1;
|
||
while (++index2 < map4.length) {
|
||
if (map4[index2].depth < minDepth) {
|
||
minDepth = map4[index2].depth;
|
||
}
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < map4.length) {
|
||
map4[index2].depth -= minDepth - 1;
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < map4.length) {
|
||
insert(map4[index2], table, { ordered, tight, prefix });
|
||
}
|
||
return table;
|
||
}
|
||
function insert(entry, parent, settings) {
|
||
let index2 = -1;
|
||
const tail = parent.children[parent.children.length - 1];
|
||
if (parent.type === "list") {
|
||
if (entry.depth === 1) {
|
||
parent.children.push({
|
||
type: "listItem",
|
||
spread: false,
|
||
children: [
|
||
{
|
||
type: "paragraph",
|
||
children: [
|
||
{
|
||
type: "link",
|
||
title: null,
|
||
url: "#" + (settings.prefix || "") + entry.id,
|
||
children: all2(entry.children)
|
||
}
|
||
]
|
||
}
|
||
]
|
||
});
|
||
} else if (parent.children.length > 0) {
|
||
const tail2 = parent.children[parent.children.length - 1];
|
||
insert(entry, tail2, settings);
|
||
} else {
|
||
const item = { type: "listItem", spread: false, children: [] };
|
||
parent.children.push(item);
|
||
insert(entry, item, settings);
|
||
}
|
||
} else if (tail && tail.type === "list") {
|
||
entry.depth--;
|
||
insert(entry, tail, settings);
|
||
} else {
|
||
const item = {
|
||
type: "list",
|
||
ordered: settings.ordered,
|
||
spread: false,
|
||
children: []
|
||
};
|
||
parent.children.push(item);
|
||
entry.depth--;
|
||
insert(entry, item, settings);
|
||
}
|
||
if (parent.type === "list" && !settings.tight) {
|
||
parent.spread = false;
|
||
while (++index2 < parent.children.length) {
|
||
if (parent.children[index2].children.length > 1) {
|
||
parent.spread = true;
|
||
break;
|
||
}
|
||
}
|
||
} else {
|
||
parent.spread = !settings.tight;
|
||
}
|
||
}
|
||
function all2(nodes) {
|
||
const results = [];
|
||
let index2 = -1;
|
||
while (++index2 < nodes.length) {
|
||
const result = one2(nodes[index2]);
|
||
if (Array.isArray(result)) {
|
||
results.push(...result);
|
||
} else {
|
||
results.push(result);
|
||
}
|
||
}
|
||
return results;
|
||
}
|
||
function one2(node2) {
|
||
if (node2.type === "footnoteReference") {
|
||
return [];
|
||
}
|
||
if (node2.type === "link" || node2.type === "linkReference") {
|
||
return all2(node2.children);
|
||
}
|
||
if ("children" in node2) {
|
||
const _a = node2, { children, position: position3 } = _a, copy2 = __objRest(_a, ["children", "position"]);
|
||
return Object.assign(esm_default(copy2), {
|
||
children: all2(node2.children)
|
||
});
|
||
}
|
||
const _b = node2, { position: position2 } = _b, copy = __objRest(_b, ["position"]);
|
||
return esm_default(copy);
|
||
}
|
||
|
||
// node_modules/mdast-util-toc/lib/index.js
|
||
function toc(tree, options2) {
|
||
const settings = options2 || {};
|
||
const heading2 = settings.heading ? toExpression(settings.heading) : void 0;
|
||
const result = search2(tree, heading2, settings);
|
||
return {
|
||
index: heading2 ? result.index : void 0,
|
||
endIndex: heading2 ? result.endIndex : void 0,
|
||
map: result.map.length > 0 ? contents(result.map, settings) : void 0
|
||
};
|
||
}
|
||
|
||
// node_modules/remark-toc/lib/index.js
|
||
function remarkToc(options2) {
|
||
const settings = __spreadProps(__spreadValues({}, options2), {
|
||
heading: options2 && options2.heading || "(table[ -]of[ -])?contents?|toc",
|
||
tight: options2 && typeof options2.tight === "boolean" ? options2.tight : true
|
||
});
|
||
return function(tree) {
|
||
const result = toc(tree, settings);
|
||
if (result.endIndex === void 0 || result.endIndex === -1 || result.index === void 0 || result.index === -1 || !result.map) {
|
||
return;
|
||
}
|
||
tree.children = [
|
||
...tree.children.slice(0, result.index),
|
||
result.map,
|
||
...tree.children.slice(result.endIndex)
|
||
];
|
||
};
|
||
}
|
||
|
||
// node_modules/universal-user-agent/index.js
|
||
function getUserAgent() {
|
||
if (typeof navigator === "object" && "userAgent" in navigator) {
|
||
return navigator.userAgent;
|
||
}
|
||
if (typeof process === "object" && process.version !== void 0) {
|
||
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
|
||
}
|
||
return "<environment undetectable>";
|
||
}
|
||
|
||
// node_modules/before-after-hook/lib/register.js
|
||
function register(state, name, method, options2) {
|
||
if (typeof method !== "function") {
|
||
throw new Error("method for before hook must be a function");
|
||
}
|
||
if (!options2) {
|
||
options2 = {};
|
||
}
|
||
if (Array.isArray(name)) {
|
||
return name.reverse().reduce((callback, name2) => {
|
||
return register.bind(null, state, name2, callback, options2);
|
||
}, method)();
|
||
}
|
||
return Promise.resolve().then(() => {
|
||
if (!state.registry[name]) {
|
||
return method(options2);
|
||
}
|
||
return state.registry[name].reduce((method2, registered) => {
|
||
return registered.hook.bind(null, method2, options2);
|
||
}, method)();
|
||
});
|
||
}
|
||
|
||
// node_modules/before-after-hook/lib/add.js
|
||
function addHook(state, kind, name, hook2) {
|
||
const orig = hook2;
|
||
if (!state.registry[name]) {
|
||
state.registry[name] = [];
|
||
}
|
||
if (kind === "before") {
|
||
hook2 = (method, options2) => {
|
||
return Promise.resolve().then(orig.bind(null, options2)).then(method.bind(null, options2));
|
||
};
|
||
}
|
||
if (kind === "after") {
|
||
hook2 = (method, options2) => {
|
||
let result;
|
||
return Promise.resolve().then(method.bind(null, options2)).then((result_) => {
|
||
result = result_;
|
||
return orig(result, options2);
|
||
}).then(() => {
|
||
return result;
|
||
});
|
||
};
|
||
}
|
||
if (kind === "error") {
|
||
hook2 = (method, options2) => {
|
||
return Promise.resolve().then(method.bind(null, options2)).catch((error) => {
|
||
return orig(error, options2);
|
||
});
|
||
};
|
||
}
|
||
state.registry[name].push({
|
||
hook: hook2,
|
||
orig
|
||
});
|
||
}
|
||
|
||
// node_modules/before-after-hook/lib/remove.js
|
||
function removeHook(state, name, method) {
|
||
if (!state.registry[name]) {
|
||
return;
|
||
}
|
||
const index2 = state.registry[name].map((registered) => {
|
||
return registered.orig;
|
||
}).indexOf(method);
|
||
if (index2 === -1) {
|
||
return;
|
||
}
|
||
state.registry[name].splice(index2, 1);
|
||
}
|
||
|
||
// node_modules/before-after-hook/index.js
|
||
var bind = Function.bind;
|
||
var bindable = bind.bind(bind);
|
||
function bindApi(hook2, state, name) {
|
||
const removeHookRef = bindable(removeHook, null).apply(
|
||
null,
|
||
name ? [state, name] : [state]
|
||
);
|
||
hook2.api = { remove: removeHookRef };
|
||
hook2.remove = removeHookRef;
|
||
["before", "error", "after", "wrap"].forEach((kind) => {
|
||
const args = name ? [state, kind, name] : [state, kind];
|
||
hook2[kind] = hook2.api[kind] = bindable(addHook, null).apply(null, args);
|
||
});
|
||
}
|
||
function Singular() {
|
||
const singularHookName = Symbol("Singular");
|
||
const singularHookState = {
|
||
registry: {}
|
||
};
|
||
const singularHook = register.bind(null, singularHookState, singularHookName);
|
||
bindApi(singularHook, singularHookState, singularHookName);
|
||
return singularHook;
|
||
}
|
||
function Collection() {
|
||
const state = {
|
||
registry: {}
|
||
};
|
||
const hook2 = register.bind(null, state);
|
||
bindApi(hook2, state);
|
||
return hook2;
|
||
}
|
||
var before_after_hook_default = { Singular, Collection };
|
||
|
||
// node_modules/@octokit/endpoint/dist-bundle/index.js
|
||
var VERSION = "0.0.0-development";
|
||
var userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;
|
||
var DEFAULTS = {
|
||
method: "GET",
|
||
baseUrl: "https://api.github.com",
|
||
headers: {
|
||
accept: "application/vnd.github.v3+json",
|
||
"user-agent": userAgent
|
||
},
|
||
mediaType: {
|
||
format: ""
|
||
}
|
||
};
|
||
function lowercaseKeys(object) {
|
||
if (!object) {
|
||
return {};
|
||
}
|
||
return Object.keys(object).reduce((newObj, key) => {
|
||
newObj[key.toLowerCase()] = object[key];
|
||
return newObj;
|
||
}, {});
|
||
}
|
||
function isPlainObject2(value) {
|
||
if (typeof value !== "object" || value === null) return false;
|
||
if (Object.prototype.toString.call(value) !== "[object Object]") return false;
|
||
const proto = Object.getPrototypeOf(value);
|
||
if (proto === null) return true;
|
||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||
}
|
||
function mergeDeep(defaults, options2) {
|
||
const result = Object.assign({}, defaults);
|
||
Object.keys(options2).forEach((key) => {
|
||
if (isPlainObject2(options2[key])) {
|
||
if (!(key in defaults)) Object.assign(result, { [key]: options2[key] });
|
||
else result[key] = mergeDeep(defaults[key], options2[key]);
|
||
} else {
|
||
Object.assign(result, { [key]: options2[key] });
|
||
}
|
||
});
|
||
return result;
|
||
}
|
||
function removeUndefinedProperties(obj) {
|
||
for (const key in obj) {
|
||
if (obj[key] === void 0) {
|
||
delete obj[key];
|
||
}
|
||
}
|
||
return obj;
|
||
}
|
||
function merge(defaults, route, options2) {
|
||
var _a;
|
||
if (typeof route === "string") {
|
||
let [method, url] = route.split(" ");
|
||
options2 = Object.assign(url ? { method, url } : { url: method }, options2);
|
||
} else {
|
||
options2 = Object.assign({}, route);
|
||
}
|
||
options2.headers = lowercaseKeys(options2.headers);
|
||
removeUndefinedProperties(options2);
|
||
removeUndefinedProperties(options2.headers);
|
||
const mergedOptions = mergeDeep(defaults || {}, options2);
|
||
if (options2.url === "/graphql") {
|
||
if (defaults && ((_a = defaults.mediaType.previews) == null ? void 0 : _a.length)) {
|
||
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(
|
||
(preview) => !mergedOptions.mediaType.previews.includes(preview)
|
||
).concat(mergedOptions.mediaType.previews);
|
||
}
|
||
mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, ""));
|
||
}
|
||
return mergedOptions;
|
||
}
|
||
function addQueryParameters(url, parameters) {
|
||
const separator = /\?/.test(url) ? "&" : "?";
|
||
const names = Object.keys(parameters);
|
||
if (names.length === 0) {
|
||
return url;
|
||
}
|
||
return url + separator + names.map((name) => {
|
||
if (name === "q") {
|
||
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
|
||
}
|
||
return `${name}=${encodeURIComponent(parameters[name])}`;
|
||
}).join("&");
|
||
}
|
||
var urlVariableRegex = /\{[^{}}]+\}/g;
|
||
function removeNonChars(variableName) {
|
||
return variableName.replace(new RegExp("(?:^\\W+)|(?:(?<!\\W)\\W+$)", "g"), "").split(/,/);
|
||
}
|
||
function extractUrlVariableNames(url) {
|
||
const matches = url.match(urlVariableRegex);
|
||
if (!matches) {
|
||
return [];
|
||
}
|
||
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
|
||
}
|
||
function omit(object, keysToOmit) {
|
||
const result = { __proto__: null };
|
||
for (const key of Object.keys(object)) {
|
||
if (keysToOmit.indexOf(key) === -1) {
|
||
result[key] = object[key];
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
function encodeReserved(str2) {
|
||
return str2.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {
|
||
if (!/%[0-9A-Fa-f]/.test(part)) {
|
||
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
|
||
}
|
||
return part;
|
||
}).join("");
|
||
}
|
||
function encodeUnreserved(str2) {
|
||
return encodeURIComponent(str2).replace(/[!'()*]/g, function(c) {
|
||
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
|
||
});
|
||
}
|
||
function encodeValue(operator, value, key) {
|
||
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
|
||
if (key) {
|
||
return encodeUnreserved(key) + "=" + value;
|
||
} else {
|
||
return value;
|
||
}
|
||
}
|
||
function isDefined(value) {
|
||
return value !== void 0 && value !== null;
|
||
}
|
||
function isKeyOperator(operator) {
|
||
return operator === ";" || operator === "&" || operator === "?";
|
||
}
|
||
function getValues(context, operator, key, modifier) {
|
||
var value = context[key], result = [];
|
||
if (isDefined(value) && value !== "") {
|
||
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
||
value = value.toString();
|
||
if (modifier && modifier !== "*") {
|
||
value = value.substring(0, parseInt(modifier, 10));
|
||
}
|
||
result.push(
|
||
encodeValue(operator, value, isKeyOperator(operator) ? key : "")
|
||
);
|
||
} else {
|
||
if (modifier === "*") {
|
||
if (Array.isArray(value)) {
|
||
value.filter(isDefined).forEach(function(value2) {
|
||
result.push(
|
||
encodeValue(operator, value2, isKeyOperator(operator) ? key : "")
|
||
);
|
||
});
|
||
} else {
|
||
Object.keys(value).forEach(function(k) {
|
||
if (isDefined(value[k])) {
|
||
result.push(encodeValue(operator, value[k], k));
|
||
}
|
||
});
|
||
}
|
||
} else {
|
||
const tmp = [];
|
||
if (Array.isArray(value)) {
|
||
value.filter(isDefined).forEach(function(value2) {
|
||
tmp.push(encodeValue(operator, value2));
|
||
});
|
||
} else {
|
||
Object.keys(value).forEach(function(k) {
|
||
if (isDefined(value[k])) {
|
||
tmp.push(encodeUnreserved(k));
|
||
tmp.push(encodeValue(operator, value[k].toString()));
|
||
}
|
||
});
|
||
}
|
||
if (isKeyOperator(operator)) {
|
||
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
|
||
} else if (tmp.length !== 0) {
|
||
result.push(tmp.join(","));
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
if (operator === ";") {
|
||
if (isDefined(value)) {
|
||
result.push(encodeUnreserved(key));
|
||
}
|
||
} else if (value === "" && (operator === "&" || operator === "?")) {
|
||
result.push(encodeUnreserved(key) + "=");
|
||
} else if (value === "") {
|
||
result.push("");
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
function parseUrl(template) {
|
||
return {
|
||
expand: expand.bind(null, template)
|
||
};
|
||
}
|
||
function expand(template, context) {
|
||
var operators = ["+", "#", ".", "/", ";", "?", "&"];
|
||
template = template.replace(
|
||
/\{([^\{\}]+)\}|([^\{\}]+)/g,
|
||
function(_, expression, literal) {
|
||
if (expression) {
|
||
let operator = "";
|
||
const values = [];
|
||
if (operators.indexOf(expression.charAt(0)) !== -1) {
|
||
operator = expression.charAt(0);
|
||
expression = expression.substr(1);
|
||
}
|
||
expression.split(/,/g).forEach(function(variable) {
|
||
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
|
||
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
|
||
});
|
||
if (operator && operator !== "+") {
|
||
var separator = ",";
|
||
if (operator === "?") {
|
||
separator = "&";
|
||
} else if (operator !== "#") {
|
||
separator = operator;
|
||
}
|
||
return (values.length !== 0 ? operator : "") + values.join(separator);
|
||
} else {
|
||
return values.join(",");
|
||
}
|
||
} else {
|
||
return encodeReserved(literal);
|
||
}
|
||
}
|
||
);
|
||
if (template === "/") {
|
||
return template;
|
||
} else {
|
||
return template.replace(/\/$/, "");
|
||
}
|
||
}
|
||
function parse3(options2) {
|
||
var _a;
|
||
let method = options2.method.toUpperCase();
|
||
let url = (options2.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
|
||
let headers = Object.assign({}, options2.headers);
|
||
let body;
|
||
let parameters = omit(options2, [
|
||
"method",
|
||
"baseUrl",
|
||
"url",
|
||
"headers",
|
||
"request",
|
||
"mediaType"
|
||
]);
|
||
const urlVariableNames = extractUrlVariableNames(url);
|
||
url = parseUrl(url).expand(parameters);
|
||
if (!/^http/.test(url)) {
|
||
url = options2.baseUrl + url;
|
||
}
|
||
const omittedParameters = Object.keys(options2).filter((option) => urlVariableNames.includes(option)).concat("baseUrl");
|
||
const remainingParameters = omit(parameters, omittedParameters);
|
||
const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
|
||
if (!isBinaryRequest) {
|
||
if (options2.mediaType.format) {
|
||
headers.accept = headers.accept.split(/,/).map(
|
||
(format) => format.replace(
|
||
/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/,
|
||
`application/vnd$1$2.${options2.mediaType.format}`
|
||
)
|
||
).join(",");
|
||
}
|
||
if (url.endsWith("/graphql")) {
|
||
if ((_a = options2.mediaType.previews) == null ? void 0 : _a.length) {
|
||
const previewsFromAcceptHeader = headers.accept.match(new RegExp("(?<![\\w-])[\\w-]+(?=-preview)", "g")) || [];
|
||
headers.accept = previewsFromAcceptHeader.concat(options2.mediaType.previews).map((preview) => {
|
||
const format = options2.mediaType.format ? `.${options2.mediaType.format}` : "+json";
|
||
return `application/vnd.github.${preview}-preview${format}`;
|
||
}).join(",");
|
||
}
|
||
}
|
||
}
|
||
if (["GET", "HEAD"].includes(method)) {
|
||
url = addQueryParameters(url, remainingParameters);
|
||
} else {
|
||
if ("data" in remainingParameters) {
|
||
body = remainingParameters.data;
|
||
} else {
|
||
if (Object.keys(remainingParameters).length) {
|
||
body = remainingParameters;
|
||
}
|
||
}
|
||
}
|
||
if (!headers["content-type"] && typeof body !== "undefined") {
|
||
headers["content-type"] = "application/json; charset=utf-8";
|
||
}
|
||
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
|
||
body = "";
|
||
}
|
||
return Object.assign(
|
||
{ method, url, headers },
|
||
typeof body !== "undefined" ? { body } : null,
|
||
options2.request ? { request: options2.request } : null
|
||
);
|
||
}
|
||
function endpointWithDefaults(defaults, route, options2) {
|
||
return parse3(merge(defaults, route, options2));
|
||
}
|
||
function withDefaults(oldDefaults, newDefaults) {
|
||
const DEFAULTS2 = merge(oldDefaults, newDefaults);
|
||
const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);
|
||
return Object.assign(endpoint2, {
|
||
DEFAULTS: DEFAULTS2,
|
||
defaults: withDefaults.bind(null, DEFAULTS2),
|
||
merge: merge.bind(null, DEFAULTS2),
|
||
parse: parse3
|
||
});
|
||
}
|
||
var endpoint = withDefaults(null, DEFAULTS);
|
||
|
||
// node_modules/@octokit/request/dist-bundle/index.js
|
||
var import_fast_content_type_parse = __toESM(require_fast_content_type_parse(), 1);
|
||
|
||
// node_modules/@octokit/request-error/dist-src/index.js
|
||
var RequestError = class extends Error {
|
||
constructor(message, statusCode, options2) {
|
||
super(message);
|
||
__publicField(this, "name");
|
||
/**
|
||
* http status code
|
||
*/
|
||
__publicField(this, "status");
|
||
/**
|
||
* Request options that lead to the error.
|
||
*/
|
||
__publicField(this, "request");
|
||
/**
|
||
* Response object if a response was received
|
||
*/
|
||
__publicField(this, "response");
|
||
this.name = "HttpError";
|
||
this.status = Number.parseInt(statusCode);
|
||
if (Number.isNaN(this.status)) {
|
||
this.status = 0;
|
||
}
|
||
if ("response" in options2) {
|
||
this.response = options2.response;
|
||
}
|
||
const requestCopy = Object.assign({}, options2.request);
|
||
if (options2.request.headers.authorization) {
|
||
requestCopy.headers = Object.assign({}, options2.request.headers, {
|
||
authorization: options2.request.headers.authorization.replace(
|
||
new RegExp("(?<! ) .*$"),
|
||
" [REDACTED]"
|
||
)
|
||
});
|
||
}
|
||
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
||
this.request = requestCopy;
|
||
}
|
||
};
|
||
|
||
// node_modules/@octokit/request/dist-bundle/index.js
|
||
var VERSION2 = "0.0.0-development";
|
||
var defaults_default = {
|
||
headers: {
|
||
"user-agent": `octokit-request.js/${VERSION2} ${getUserAgent()}`
|
||
}
|
||
};
|
||
function isPlainObject3(value) {
|
||
if (typeof value !== "object" || value === null) return false;
|
||
if (Object.prototype.toString.call(value) !== "[object Object]") return false;
|
||
const proto = Object.getPrototypeOf(value);
|
||
if (proto === null) return true;
|
||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||
}
|
||
function fetchWrapper(requestOptions) {
|
||
return __async(this, null, function* () {
|
||
var _a, _b, _c, _d, _e;
|
||
const fetch = ((_a = requestOptions.request) == null ? void 0 : _a.fetch) || globalThis.fetch;
|
||
if (!fetch) {
|
||
throw new Error(
|
||
"fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing"
|
||
);
|
||
}
|
||
const log = ((_b = requestOptions.request) == null ? void 0 : _b.log) || console;
|
||
const parseSuccessResponseBody = ((_c = requestOptions.request) == null ? void 0 : _c.parseSuccessResponseBody) !== false;
|
||
const body = isPlainObject3(requestOptions.body) || Array.isArray(requestOptions.body) ? JSON.stringify(requestOptions.body) : requestOptions.body;
|
||
const requestHeaders = Object.fromEntries(
|
||
Object.entries(requestOptions.headers).map(([name, value]) => [
|
||
name,
|
||
String(value)
|
||
])
|
||
);
|
||
let fetchResponse;
|
||
try {
|
||
fetchResponse = yield fetch(requestOptions.url, __spreadValues({
|
||
method: requestOptions.method,
|
||
body,
|
||
redirect: (_d = requestOptions.request) == null ? void 0 : _d.redirect,
|
||
headers: requestHeaders,
|
||
signal: (_e = requestOptions.request) == null ? void 0 : _e.signal
|
||
}, requestOptions.body && { duplex: "half" }));
|
||
} catch (error) {
|
||
let message = "Unknown Error";
|
||
if (error instanceof Error) {
|
||
if (error.name === "AbortError") {
|
||
error.status = 500;
|
||
throw error;
|
||
}
|
||
message = error.message;
|
||
if (error.name === "TypeError" && "cause" in error) {
|
||
if (error.cause instanceof Error) {
|
||
message = error.cause.message;
|
||
} else if (typeof error.cause === "string") {
|
||
message = error.cause;
|
||
}
|
||
}
|
||
}
|
||
const requestError = new RequestError(message, 500, {
|
||
request: requestOptions
|
||
});
|
||
requestError.cause = error;
|
||
throw requestError;
|
||
}
|
||
const status = fetchResponse.status;
|
||
const url = fetchResponse.url;
|
||
const responseHeaders = {};
|
||
for (const [key, value] of fetchResponse.headers) {
|
||
responseHeaders[key] = value;
|
||
}
|
||
const octokitResponse = {
|
||
url,
|
||
status,
|
||
headers: responseHeaders,
|
||
data: ""
|
||
};
|
||
if ("deprecation" in responseHeaders) {
|
||
const matches = responseHeaders.link && responseHeaders.link.match(/<([^<>]+)>; rel="deprecation"/);
|
||
const deprecationLink = matches && matches.pop();
|
||
log.warn(
|
||
`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${responseHeaders.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`
|
||
);
|
||
}
|
||
if (status === 204 || status === 205) {
|
||
return octokitResponse;
|
||
}
|
||
if (requestOptions.method === "HEAD") {
|
||
if (status < 400) {
|
||
return octokitResponse;
|
||
}
|
||
throw new RequestError(fetchResponse.statusText, status, {
|
||
response: octokitResponse,
|
||
request: requestOptions
|
||
});
|
||
}
|
||
if (status === 304) {
|
||
octokitResponse.data = yield getResponseData(fetchResponse);
|
||
throw new RequestError("Not modified", status, {
|
||
response: octokitResponse,
|
||
request: requestOptions
|
||
});
|
||
}
|
||
if (status >= 400) {
|
||
octokitResponse.data = yield getResponseData(fetchResponse);
|
||
throw new RequestError(toErrorMessage(octokitResponse.data), status, {
|
||
response: octokitResponse,
|
||
request: requestOptions
|
||
});
|
||
}
|
||
octokitResponse.data = parseSuccessResponseBody ? yield getResponseData(fetchResponse) : fetchResponse.body;
|
||
return octokitResponse;
|
||
});
|
||
}
|
||
function getResponseData(response) {
|
||
return __async(this, null, function* () {
|
||
var _a;
|
||
const contentType = response.headers.get("content-type");
|
||
if (!contentType) {
|
||
return response.text().catch(() => "");
|
||
}
|
||
const mimetype = (0, import_fast_content_type_parse.safeParse)(contentType);
|
||
if (isJSONResponse(mimetype)) {
|
||
let text4 = "";
|
||
try {
|
||
text4 = yield response.text();
|
||
return JSON.parse(text4);
|
||
} catch (err) {
|
||
return text4;
|
||
}
|
||
} else if (mimetype.type.startsWith("text/") || ((_a = mimetype.parameters.charset) == null ? void 0 : _a.toLowerCase()) === "utf-8") {
|
||
return response.text().catch(() => "");
|
||
} else {
|
||
return response.arrayBuffer().catch(() => new ArrayBuffer(0));
|
||
}
|
||
});
|
||
}
|
||
function isJSONResponse(mimetype) {
|
||
return mimetype.type === "application/json" || mimetype.type === "application/scim+json";
|
||
}
|
||
function toErrorMessage(data) {
|
||
if (typeof data === "string") {
|
||
return data;
|
||
}
|
||
if (data instanceof ArrayBuffer) {
|
||
return "Unknown error";
|
||
}
|
||
if ("message" in data) {
|
||
const suffix = "documentation_url" in data ? ` - ${data.documentation_url}` : "";
|
||
return Array.isArray(data.errors) ? `${data.message}: ${data.errors.map((v) => JSON.stringify(v)).join(", ")}${suffix}` : `${data.message}${suffix}`;
|
||
}
|
||
return `Unknown error: ${JSON.stringify(data)}`;
|
||
}
|
||
function withDefaults2(oldEndpoint, newDefaults) {
|
||
const endpoint2 = oldEndpoint.defaults(newDefaults);
|
||
const newApi = function(route, parameters) {
|
||
const endpointOptions = endpoint2.merge(route, parameters);
|
||
if (!endpointOptions.request || !endpointOptions.request.hook) {
|
||
return fetchWrapper(endpoint2.parse(endpointOptions));
|
||
}
|
||
const request2 = (route2, parameters2) => {
|
||
return fetchWrapper(
|
||
endpoint2.parse(endpoint2.merge(route2, parameters2))
|
||
);
|
||
};
|
||
Object.assign(request2, {
|
||
endpoint: endpoint2,
|
||
defaults: withDefaults2.bind(null, endpoint2)
|
||
});
|
||
return endpointOptions.request.hook(request2, endpointOptions);
|
||
};
|
||
return Object.assign(newApi, {
|
||
endpoint: endpoint2,
|
||
defaults: withDefaults2.bind(null, endpoint2)
|
||
});
|
||
}
|
||
var request = withDefaults2(endpoint, defaults_default);
|
||
|
||
// node_modules/@octokit/graphql/dist-bundle/index.js
|
||
var VERSION3 = "0.0.0-development";
|
||
function _buildMessageForResponseErrors(data) {
|
||
return `Request failed due to following response errors:
|
||
` + data.errors.map((e) => ` - ${e.message}`).join("\n");
|
||
}
|
||
var GraphqlResponseError = class extends Error {
|
||
constructor(request2, headers, response) {
|
||
super(_buildMessageForResponseErrors(response));
|
||
__publicField(this, "name", "GraphqlResponseError");
|
||
__publicField(this, "errors");
|
||
__publicField(this, "data");
|
||
this.request = request2;
|
||
this.headers = headers;
|
||
this.response = response;
|
||
this.errors = response.errors;
|
||
this.data = response.data;
|
||
if (Error.captureStackTrace) {
|
||
Error.captureStackTrace(this, this.constructor);
|
||
}
|
||
}
|
||
};
|
||
var NON_VARIABLE_OPTIONS = [
|
||
"method",
|
||
"baseUrl",
|
||
"url",
|
||
"headers",
|
||
"request",
|
||
"query",
|
||
"mediaType"
|
||
];
|
||
var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
|
||
var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
|
||
function graphql(request2, query, options2) {
|
||
if (options2) {
|
||
if (typeof query === "string" && "query" in options2) {
|
||
return Promise.reject(
|
||
new Error(`[@octokit/graphql] "query" cannot be used as variable name`)
|
||
);
|
||
}
|
||
for (const key in options2) {
|
||
if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
|
||
return Promise.reject(
|
||
new Error(
|
||
`[@octokit/graphql] "${key}" cannot be used as variable name`
|
||
)
|
||
);
|
||
}
|
||
}
|
||
const parsedOptions = typeof query === "string" ? Object.assign({ query }, options2) : query;
|
||
const requestOptions = Object.keys(
|
||
parsedOptions
|
||
).reduce((result, key) => {
|
||
if (NON_VARIABLE_OPTIONS.includes(key)) {
|
||
result[key] = parsedOptions[key];
|
||
return result;
|
||
}
|
||
if (!result.variables) {
|
||
result.variables = {};
|
||
}
|
||
result.variables[key] = parsedOptions[key];
|
||
return result;
|
||
}, {});
|
||
const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;
|
||
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
|
||
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
|
||
}
|
||
return request2(requestOptions).then((response) => {
|
||
if (response.data.errors) {
|
||
const headers = {};
|
||
for (const key of Object.keys(response.headers)) {
|
||
headers[key] = response.headers[key];
|
||
}
|
||
throw new GraphqlResponseError(
|
||
requestOptions,
|
||
headers,
|
||
response.data
|
||
);
|
||
}
|
||
return response.data.data;
|
||
});
|
||
}
|
||
function withDefaults3(request2, newDefaults) {
|
||
const newRequest = request2.defaults(newDefaults);
|
||
const newApi = (query, options2) => {
|
||
return graphql(newRequest, query, options2);
|
||
};
|
||
return Object.assign(newApi, {
|
||
defaults: withDefaults3.bind(null, newRequest),
|
||
endpoint: newRequest.endpoint
|
||
});
|
||
}
|
||
var graphql2 = withDefaults3(request, {
|
||
headers: {
|
||
"user-agent": `octokit-graphql.js/${VERSION3} ${getUserAgent()}`
|
||
},
|
||
method: "POST",
|
||
url: "/graphql"
|
||
});
|
||
function withCustomRequest(customRequest) {
|
||
return withDefaults3(customRequest, {
|
||
method: "POST",
|
||
url: "/graphql"
|
||
});
|
||
}
|
||
|
||
// node_modules/@octokit/auth-token/dist-bundle/index.js
|
||
var REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
|
||
var REGEX_IS_INSTALLATION = /^ghs_/;
|
||
var REGEX_IS_USER_TO_SERVER = /^ghu_/;
|
||
function auth(token) {
|
||
return __async(this, null, function* () {
|
||
const isApp = token.split(/\./).length === 3;
|
||
const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
|
||
const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
|
||
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
|
||
return {
|
||
type: "token",
|
||
token,
|
||
tokenType
|
||
};
|
||
});
|
||
}
|
||
function withAuthorizationPrefix(token) {
|
||
if (token.split(/\./).length === 3) {
|
||
return `bearer ${token}`;
|
||
}
|
||
return `token ${token}`;
|
||
}
|
||
function hook(token, request2, route, parameters) {
|
||
return __async(this, null, function* () {
|
||
const endpoint2 = request2.endpoint.merge(
|
||
route,
|
||
parameters
|
||
);
|
||
endpoint2.headers.authorization = withAuthorizationPrefix(token);
|
||
return request2(endpoint2);
|
||
});
|
||
}
|
||
var createTokenAuth = function createTokenAuth2(token) {
|
||
if (!token) {
|
||
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
|
||
}
|
||
if (typeof token !== "string") {
|
||
throw new Error(
|
||
"[@octokit/auth-token] Token passed to createTokenAuth is not a string"
|
||
);
|
||
}
|
||
token = token.replace(/^(token|bearer) +/i, "");
|
||
return Object.assign(auth.bind(null, token), {
|
||
hook: hook.bind(null, token)
|
||
});
|
||
};
|
||
|
||
// node_modules/@octokit/core/dist-src/version.js
|
||
var VERSION4 = "6.1.4";
|
||
|
||
// node_modules/@octokit/core/dist-src/index.js
|
||
var noop = () => {
|
||
};
|
||
var consoleWarn = console.warn.bind(console);
|
||
var consoleError = console.error.bind(console);
|
||
var userAgentTrail = `octokit-core.js/${VERSION4} ${getUserAgent()}`;
|
||
var Octokit = class {
|
||
constructor(options2 = {}) {
|
||
// assigned during constructor
|
||
__publicField(this, "request");
|
||
__publicField(this, "graphql");
|
||
__publicField(this, "log");
|
||
__publicField(this, "hook");
|
||
// TODO: type `octokit.auth` based on passed options.authStrategy
|
||
__publicField(this, "auth");
|
||
const hook2 = new before_after_hook_default.Collection();
|
||
const requestDefaults = {
|
||
baseUrl: request.endpoint.DEFAULTS.baseUrl,
|
||
headers: {},
|
||
request: Object.assign({}, options2.request, {
|
||
// @ts-ignore internal usage only, no need to type
|
||
hook: hook2.bind(null, "request")
|
||
}),
|
||
mediaType: {
|
||
previews: [],
|
||
format: ""
|
||
}
|
||
};
|
||
requestDefaults.headers["user-agent"] = options2.userAgent ? `${options2.userAgent} ${userAgentTrail}` : userAgentTrail;
|
||
if (options2.baseUrl) {
|
||
requestDefaults.baseUrl = options2.baseUrl;
|
||
}
|
||
if (options2.previews) {
|
||
requestDefaults.mediaType.previews = options2.previews;
|
||
}
|
||
if (options2.timeZone) {
|
||
requestDefaults.headers["time-zone"] = options2.timeZone;
|
||
}
|
||
this.request = request.defaults(requestDefaults);
|
||
this.graphql = withCustomRequest(this.request).defaults(requestDefaults);
|
||
this.log = Object.assign(
|
||
{
|
||
debug: noop,
|
||
info: noop,
|
||
warn: consoleWarn,
|
||
error: consoleError
|
||
},
|
||
options2.log
|
||
);
|
||
this.hook = hook2;
|
||
if (!options2.authStrategy) {
|
||
if (!options2.auth) {
|
||
this.auth = () => __async(this, null, function* () {
|
||
return {
|
||
type: "unauthenticated"
|
||
};
|
||
});
|
||
} else {
|
||
const auth2 = createTokenAuth(options2.auth);
|
||
hook2.wrap("request", auth2.hook);
|
||
this.auth = auth2;
|
||
}
|
||
} else {
|
||
const _a = options2, { authStrategy } = _a, otherOptions = __objRest(_a, ["authStrategy"]);
|
||
const auth2 = authStrategy(
|
||
Object.assign(
|
||
{
|
||
request: this.request,
|
||
log: this.log,
|
||
// we pass the current octokit instance as well as its constructor options
|
||
// to allow for authentication strategies that return a new octokit instance
|
||
// that shares the same internal state as the current one. The original
|
||
// requirement for this was the "event-octokit" authentication strategy
|
||
// of https://github.com/probot/octokit-auth-probot.
|
||
octokit: this,
|
||
octokitOptions: otherOptions
|
||
},
|
||
options2.auth
|
||
)
|
||
);
|
||
hook2.wrap("request", auth2.hook);
|
||
this.auth = auth2;
|
||
}
|
||
const classConstructor = this.constructor;
|
||
for (let i = 0; i < classConstructor.plugins.length; ++i) {
|
||
Object.assign(this, classConstructor.plugins[i](this, options2));
|
||
}
|
||
}
|
||
static defaults(defaults) {
|
||
const OctokitWithDefaults = class extends this {
|
||
constructor(...args) {
|
||
const options2 = args[0] || {};
|
||
if (typeof defaults === "function") {
|
||
super(defaults(options2));
|
||
return;
|
||
}
|
||
super(
|
||
Object.assign(
|
||
{},
|
||
defaults,
|
||
options2,
|
||
options2.userAgent && defaults.userAgent ? {
|
||
userAgent: `${options2.userAgent} ${defaults.userAgent}`
|
||
} : null
|
||
)
|
||
);
|
||
}
|
||
};
|
||
return OctokitWithDefaults;
|
||
}
|
||
/**
|
||
* Attach a plugin (or many) to your Octokit instance.
|
||
*
|
||
* @example
|
||
* const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
|
||
*/
|
||
static plugin(...newPlugins) {
|
||
var _a;
|
||
const currentPlugins = this.plugins;
|
||
const NewOctokit = (_a = class extends this {
|
||
}, __publicField(_a, "plugins", currentPlugins.concat(
|
||
newPlugins.filter((plugin) => !currentPlugins.includes(plugin))
|
||
)), _a);
|
||
return NewOctokit;
|
||
}
|
||
};
|
||
__publicField(Octokit, "VERSION", VERSION4);
|
||
__publicField(Octokit, "plugins", []);
|
||
|
||
// node_modules/@octokit/rest/node_modules/@octokit/plugin-request-log/dist-src/version.js
|
||
var VERSION5 = "5.3.1";
|
||
|
||
// node_modules/@octokit/rest/node_modules/@octokit/plugin-request-log/dist-src/index.js
|
||
function requestLog(octokit) {
|
||
octokit.hook.wrap("request", (request2, options2) => {
|
||
octokit.log.debug("request", options2);
|
||
const start = Date.now();
|
||
const requestOptions = octokit.request.endpoint.parse(options2);
|
||
const path = requestOptions.url.replace(options2.baseUrl, "");
|
||
return request2(options2).then((response) => {
|
||
const requestId = response.headers["x-github-request-id"];
|
||
octokit.log.info(
|
||
`${requestOptions.method} ${path} - ${response.status} with id ${requestId} in ${Date.now() - start}ms`
|
||
);
|
||
return response;
|
||
}).catch((error) => {
|
||
var _a;
|
||
const requestId = ((_a = error.response) == null ? void 0 : _a.headers["x-github-request-id"]) || "UNKNOWN";
|
||
octokit.log.error(
|
||
`${requestOptions.method} ${path} - ${error.status} with id ${requestId} in ${Date.now() - start}ms`
|
||
);
|
||
throw error;
|
||
});
|
||
});
|
||
}
|
||
requestLog.VERSION = VERSION5;
|
||
|
||
// node_modules/@octokit/plugin-paginate-rest/dist-bundle/index.js
|
||
var VERSION6 = "0.0.0-development";
|
||
function normalizePaginatedListResponse(response) {
|
||
if (!response.data) {
|
||
return __spreadProps(__spreadValues({}, response), {
|
||
data: []
|
||
});
|
||
}
|
||
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
|
||
if (!responseNeedsNormalization) return response;
|
||
const incompleteResults = response.data.incomplete_results;
|
||
const repositorySelection = response.data.repository_selection;
|
||
const totalCount = response.data.total_count;
|
||
delete response.data.incomplete_results;
|
||
delete response.data.repository_selection;
|
||
delete response.data.total_count;
|
||
const namespaceKey = Object.keys(response.data)[0];
|
||
const data = response.data[namespaceKey];
|
||
response.data = data;
|
||
if (typeof incompleteResults !== "undefined") {
|
||
response.data.incomplete_results = incompleteResults;
|
||
}
|
||
if (typeof repositorySelection !== "undefined") {
|
||
response.data.repository_selection = repositorySelection;
|
||
}
|
||
response.data.total_count = totalCount;
|
||
return response;
|
||
}
|
||
function iterator(octokit, route, parameters) {
|
||
const options2 = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
|
||
const requestMethod = typeof route === "function" ? route : octokit.request;
|
||
const method = options2.method;
|
||
const headers = options2.headers;
|
||
let url = options2.url;
|
||
return {
|
||
[Symbol.asyncIterator]: () => ({
|
||
next() {
|
||
return __async(this, null, function* () {
|
||
if (!url) return { done: true };
|
||
try {
|
||
const response = yield requestMethod({ method, url, headers });
|
||
const normalizedResponse = normalizePaginatedListResponse(response);
|
||
url = ((normalizedResponse.headers.link || "").match(
|
||
/<([^<>]+)>;\s*rel="next"/
|
||
) || [])[1];
|
||
return { value: normalizedResponse };
|
||
} catch (error) {
|
||
if (error.status !== 409) throw error;
|
||
url = "";
|
||
return {
|
||
value: {
|
||
status: 200,
|
||
headers: {},
|
||
data: []
|
||
}
|
||
};
|
||
}
|
||
});
|
||
}
|
||
})
|
||
};
|
||
}
|
||
function paginate(octokit, route, parameters, mapFn) {
|
||
if (typeof parameters === "function") {
|
||
mapFn = parameters;
|
||
parameters = void 0;
|
||
}
|
||
return gather(
|
||
octokit,
|
||
[],
|
||
iterator(octokit, route, parameters)[Symbol.asyncIterator](),
|
||
mapFn
|
||
);
|
||
}
|
||
function gather(octokit, results, iterator2, mapFn) {
|
||
return iterator2.next().then((result) => {
|
||
if (result.done) {
|
||
return results;
|
||
}
|
||
let earlyExit = false;
|
||
function done() {
|
||
earlyExit = true;
|
||
}
|
||
results = results.concat(
|
||
mapFn ? mapFn(result.value, done) : result.value.data
|
||
);
|
||
if (earlyExit) {
|
||
return results;
|
||
}
|
||
return gather(octokit, results, iterator2, mapFn);
|
||
});
|
||
}
|
||
var composePaginateRest = Object.assign(paginate, {
|
||
iterator
|
||
});
|
||
function paginateRest(octokit) {
|
||
return {
|
||
paginate: Object.assign(paginate.bind(null, octokit), {
|
||
iterator: iterator.bind(null, octokit)
|
||
})
|
||
};
|
||
}
|
||
paginateRest.VERSION = VERSION6;
|
||
|
||
// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js
|
||
var VERSION7 = "13.3.0";
|
||
|
||
// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js
|
||
var Endpoints = {
|
||
actions: {
|
||
addCustomLabelsToSelfHostedRunnerForOrg: [
|
||
"POST /orgs/{org}/actions/runners/{runner_id}/labels"
|
||
],
|
||
addCustomLabelsToSelfHostedRunnerForRepo: [
|
||
"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
|
||
],
|
||
addRepoAccessToSelfHostedRunnerGroupInOrg: [
|
||
"PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}"
|
||
],
|
||
addSelectedRepoToOrgSecret: [
|
||
"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
addSelectedRepoToOrgVariable: [
|
||
"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
|
||
],
|
||
approveWorkflowRun: [
|
||
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"
|
||
],
|
||
cancelWorkflowRun: [
|
||
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"
|
||
],
|
||
createEnvironmentVariable: [
|
||
"POST /repos/{owner}/{repo}/environments/{environment_name}/variables"
|
||
],
|
||
createOrUpdateEnvironmentSecret: [
|
||
"PUT /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}"
|
||
],
|
||
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
|
||
createOrUpdateRepoSecret: [
|
||
"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"
|
||
],
|
||
createOrgVariable: ["POST /orgs/{org}/actions/variables"],
|
||
createRegistrationTokenForOrg: [
|
||
"POST /orgs/{org}/actions/runners/registration-token"
|
||
],
|
||
createRegistrationTokenForRepo: [
|
||
"POST /repos/{owner}/{repo}/actions/runners/registration-token"
|
||
],
|
||
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
|
||
createRemoveTokenForRepo: [
|
||
"POST /repos/{owner}/{repo}/actions/runners/remove-token"
|
||
],
|
||
createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"],
|
||
createWorkflowDispatch: [
|
||
"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"
|
||
],
|
||
deleteActionsCacheById: [
|
||
"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"
|
||
],
|
||
deleteActionsCacheByKey: [
|
||
"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"
|
||
],
|
||
deleteArtifact: [
|
||
"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"
|
||
],
|
||
deleteEnvironmentSecret: [
|
||
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}"
|
||
],
|
||
deleteEnvironmentVariable: [
|
||
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}"
|
||
],
|
||
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
|
||
deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"],
|
||
deleteRepoSecret: [
|
||
"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"
|
||
],
|
||
deleteRepoVariable: [
|
||
"DELETE /repos/{owner}/{repo}/actions/variables/{name}"
|
||
],
|
||
deleteSelfHostedRunnerFromOrg: [
|
||
"DELETE /orgs/{org}/actions/runners/{runner_id}"
|
||
],
|
||
deleteSelfHostedRunnerFromRepo: [
|
||
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"
|
||
],
|
||
deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
|
||
deleteWorkflowRunLogs: [
|
||
"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
|
||
],
|
||
disableSelectedRepositoryGithubActionsOrganization: [
|
||
"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"
|
||
],
|
||
disableWorkflow: [
|
||
"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"
|
||
],
|
||
downloadArtifact: [
|
||
"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"
|
||
],
|
||
downloadJobLogsForWorkflowRun: [
|
||
"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"
|
||
],
|
||
downloadWorkflowRunAttemptLogs: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"
|
||
],
|
||
downloadWorkflowRunLogs: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
|
||
],
|
||
enableSelectedRepositoryGithubActionsOrganization: [
|
||
"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"
|
||
],
|
||
enableWorkflow: [
|
||
"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"
|
||
],
|
||
forceCancelWorkflowRun: [
|
||
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel"
|
||
],
|
||
generateRunnerJitconfigForOrg: [
|
||
"POST /orgs/{org}/actions/runners/generate-jitconfig"
|
||
],
|
||
generateRunnerJitconfigForRepo: [
|
||
"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"
|
||
],
|
||
getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
|
||
getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
|
||
getActionsCacheUsageByRepoForOrg: [
|
||
"GET /orgs/{org}/actions/cache/usage-by-repository"
|
||
],
|
||
getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
|
||
getAllowedActionsOrganization: [
|
||
"GET /orgs/{org}/actions/permissions/selected-actions"
|
||
],
|
||
getAllowedActionsRepository: [
|
||
"GET /repos/{owner}/{repo}/actions/permissions/selected-actions"
|
||
],
|
||
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
|
||
getCustomOidcSubClaimForRepo: [
|
||
"GET /repos/{owner}/{repo}/actions/oidc/customization/sub"
|
||
],
|
||
getEnvironmentPublicKey: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/public-key"
|
||
],
|
||
getEnvironmentSecret: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}"
|
||
],
|
||
getEnvironmentVariable: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}"
|
||
],
|
||
getGithubActionsDefaultWorkflowPermissionsOrganization: [
|
||
"GET /orgs/{org}/actions/permissions/workflow"
|
||
],
|
||
getGithubActionsDefaultWorkflowPermissionsRepository: [
|
||
"GET /repos/{owner}/{repo}/actions/permissions/workflow"
|
||
],
|
||
getGithubActionsPermissionsOrganization: [
|
||
"GET /orgs/{org}/actions/permissions"
|
||
],
|
||
getGithubActionsPermissionsRepository: [
|
||
"GET /repos/{owner}/{repo}/actions/permissions"
|
||
],
|
||
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
|
||
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
|
||
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
|
||
getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"],
|
||
getPendingDeploymentsForRun: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
|
||
],
|
||
getRepoPermissions: [
|
||
"GET /repos/{owner}/{repo}/actions/permissions",
|
||
{},
|
||
{ renamed: ["actions", "getGithubActionsPermissionsRepository"] }
|
||
],
|
||
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
|
||
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
|
||
getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"],
|
||
getReviewsForRun: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"
|
||
],
|
||
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
|
||
getSelfHostedRunnerForRepo: [
|
||
"GET /repos/{owner}/{repo}/actions/runners/{runner_id}"
|
||
],
|
||
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
|
||
getWorkflowAccessToRepository: [
|
||
"GET /repos/{owner}/{repo}/actions/permissions/access"
|
||
],
|
||
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
|
||
getWorkflowRunAttempt: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"
|
||
],
|
||
getWorkflowRunUsage: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"
|
||
],
|
||
getWorkflowUsage: [
|
||
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"
|
||
],
|
||
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
|
||
listEnvironmentSecrets: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/secrets"
|
||
],
|
||
listEnvironmentVariables: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/variables"
|
||
],
|
||
listJobsForWorkflowRun: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"
|
||
],
|
||
listJobsForWorkflowRunAttempt: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"
|
||
],
|
||
listLabelsForSelfHostedRunnerForOrg: [
|
||
"GET /orgs/{org}/actions/runners/{runner_id}/labels"
|
||
],
|
||
listLabelsForSelfHostedRunnerForRepo: [
|
||
"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
|
||
],
|
||
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
|
||
listOrgVariables: ["GET /orgs/{org}/actions/variables"],
|
||
listRepoOrganizationSecrets: [
|
||
"GET /repos/{owner}/{repo}/actions/organization-secrets"
|
||
],
|
||
listRepoOrganizationVariables: [
|
||
"GET /repos/{owner}/{repo}/actions/organization-variables"
|
||
],
|
||
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
|
||
listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"],
|
||
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
|
||
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
|
||
listRunnerApplicationsForRepo: [
|
||
"GET /repos/{owner}/{repo}/actions/runners/downloads"
|
||
],
|
||
listSelectedReposForOrgSecret: [
|
||
"GET /orgs/{org}/actions/secrets/{secret_name}/repositories"
|
||
],
|
||
listSelectedReposForOrgVariable: [
|
||
"GET /orgs/{org}/actions/variables/{name}/repositories"
|
||
],
|
||
listSelectedRepositoriesEnabledGithubActionsOrganization: [
|
||
"GET /orgs/{org}/actions/permissions/repositories"
|
||
],
|
||
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
|
||
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
|
||
listWorkflowRunArtifacts: [
|
||
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
|
||
],
|
||
listWorkflowRuns: [
|
||
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"
|
||
],
|
||
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
|
||
reRunJobForWorkflowRun: [
|
||
"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"
|
||
],
|
||
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
|
||
reRunWorkflowFailedJobs: [
|
||
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"
|
||
],
|
||
removeAllCustomLabelsFromSelfHostedRunnerForOrg: [
|
||
"DELETE /orgs/{org}/actions/runners/{runner_id}/labels"
|
||
],
|
||
removeAllCustomLabelsFromSelfHostedRunnerForRepo: [
|
||
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
|
||
],
|
||
removeCustomLabelFromSelfHostedRunnerForOrg: [
|
||
"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"
|
||
],
|
||
removeCustomLabelFromSelfHostedRunnerForRepo: [
|
||
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"
|
||
],
|
||
removeSelectedRepoFromOrgSecret: [
|
||
"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
removeSelectedRepoFromOrgVariable: [
|
||
"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
|
||
],
|
||
reviewCustomGatesForRun: [
|
||
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"
|
||
],
|
||
reviewPendingDeploymentsForRun: [
|
||
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
|
||
],
|
||
setAllowedActionsOrganization: [
|
||
"PUT /orgs/{org}/actions/permissions/selected-actions"
|
||
],
|
||
setAllowedActionsRepository: [
|
||
"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"
|
||
],
|
||
setCustomLabelsForSelfHostedRunnerForOrg: [
|
||
"PUT /orgs/{org}/actions/runners/{runner_id}/labels"
|
||
],
|
||
setCustomLabelsForSelfHostedRunnerForRepo: [
|
||
"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
|
||
],
|
||
setCustomOidcSubClaimForRepo: [
|
||
"PUT /repos/{owner}/{repo}/actions/oidc/customization/sub"
|
||
],
|
||
setGithubActionsDefaultWorkflowPermissionsOrganization: [
|
||
"PUT /orgs/{org}/actions/permissions/workflow"
|
||
],
|
||
setGithubActionsDefaultWorkflowPermissionsRepository: [
|
||
"PUT /repos/{owner}/{repo}/actions/permissions/workflow"
|
||
],
|
||
setGithubActionsPermissionsOrganization: [
|
||
"PUT /orgs/{org}/actions/permissions"
|
||
],
|
||
setGithubActionsPermissionsRepository: [
|
||
"PUT /repos/{owner}/{repo}/actions/permissions"
|
||
],
|
||
setSelectedReposForOrgSecret: [
|
||
"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"
|
||
],
|
||
setSelectedReposForOrgVariable: [
|
||
"PUT /orgs/{org}/actions/variables/{name}/repositories"
|
||
],
|
||
setSelectedRepositoriesEnabledGithubActionsOrganization: [
|
||
"PUT /orgs/{org}/actions/permissions/repositories"
|
||
],
|
||
setWorkflowAccessToRepository: [
|
||
"PUT /repos/{owner}/{repo}/actions/permissions/access"
|
||
],
|
||
updateEnvironmentVariable: [
|
||
"PATCH /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}"
|
||
],
|
||
updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"],
|
||
updateRepoVariable: [
|
||
"PATCH /repos/{owner}/{repo}/actions/variables/{name}"
|
||
]
|
||
},
|
||
activity: {
|
||
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
|
||
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
|
||
deleteThreadSubscription: [
|
||
"DELETE /notifications/threads/{thread_id}/subscription"
|
||
],
|
||
getFeeds: ["GET /feeds"],
|
||
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
|
||
getThread: ["GET /notifications/threads/{thread_id}"],
|
||
getThreadSubscriptionForAuthenticatedUser: [
|
||
"GET /notifications/threads/{thread_id}/subscription"
|
||
],
|
||
listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
|
||
listNotificationsForAuthenticatedUser: ["GET /notifications"],
|
||
listOrgEventsForAuthenticatedUser: [
|
||
"GET /users/{username}/events/orgs/{org}"
|
||
],
|
||
listPublicEvents: ["GET /events"],
|
||
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
|
||
listPublicEventsForUser: ["GET /users/{username}/events/public"],
|
||
listPublicOrgEvents: ["GET /orgs/{org}/events"],
|
||
listReceivedEventsForUser: ["GET /users/{username}/received_events"],
|
||
listReceivedPublicEventsForUser: [
|
||
"GET /users/{username}/received_events/public"
|
||
],
|
||
listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
|
||
listRepoNotificationsForAuthenticatedUser: [
|
||
"GET /repos/{owner}/{repo}/notifications"
|
||
],
|
||
listReposStarredByAuthenticatedUser: ["GET /user/starred"],
|
||
listReposStarredByUser: ["GET /users/{username}/starred"],
|
||
listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
|
||
listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
|
||
listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
|
||
listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
|
||
markNotificationsAsRead: ["PUT /notifications"],
|
||
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
|
||
markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"],
|
||
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
|
||
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
|
||
setThreadSubscription: [
|
||
"PUT /notifications/threads/{thread_id}/subscription"
|
||
],
|
||
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
|
||
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
|
||
},
|
||
apps: {
|
||
addRepoToInstallation: [
|
||
"PUT /user/installations/{installation_id}/repositories/{repository_id}",
|
||
{},
|
||
{ renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }
|
||
],
|
||
addRepoToInstallationForAuthenticatedUser: [
|
||
"PUT /user/installations/{installation_id}/repositories/{repository_id}"
|
||
],
|
||
checkToken: ["POST /applications/{client_id}/token"],
|
||
createFromManifest: ["POST /app-manifests/{code}/conversions"],
|
||
createInstallationAccessToken: [
|
||
"POST /app/installations/{installation_id}/access_tokens"
|
||
],
|
||
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
|
||
deleteInstallation: ["DELETE /app/installations/{installation_id}"],
|
||
deleteToken: ["DELETE /applications/{client_id}/token"],
|
||
getAuthenticated: ["GET /app"],
|
||
getBySlug: ["GET /apps/{app_slug}"],
|
||
getInstallation: ["GET /app/installations/{installation_id}"],
|
||
getOrgInstallation: ["GET /orgs/{org}/installation"],
|
||
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
|
||
getSubscriptionPlanForAccount: [
|
||
"GET /marketplace_listing/accounts/{account_id}"
|
||
],
|
||
getSubscriptionPlanForAccountStubbed: [
|
||
"GET /marketplace_listing/stubbed/accounts/{account_id}"
|
||
],
|
||
getUserInstallation: ["GET /users/{username}/installation"],
|
||
getWebhookConfigForApp: ["GET /app/hook/config"],
|
||
getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
|
||
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
|
||
listAccountsForPlanStubbed: [
|
||
"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"
|
||
],
|
||
listInstallationReposForAuthenticatedUser: [
|
||
"GET /user/installations/{installation_id}/repositories"
|
||
],
|
||
listInstallationRequestsForAuthenticatedApp: [
|
||
"GET /app/installation-requests"
|
||
],
|
||
listInstallations: ["GET /app/installations"],
|
||
listInstallationsForAuthenticatedUser: ["GET /user/installations"],
|
||
listPlans: ["GET /marketplace_listing/plans"],
|
||
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
|
||
listReposAccessibleToInstallation: ["GET /installation/repositories"],
|
||
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
|
||
listSubscriptionsForAuthenticatedUserStubbed: [
|
||
"GET /user/marketplace_purchases/stubbed"
|
||
],
|
||
listWebhookDeliveries: ["GET /app/hook/deliveries"],
|
||
redeliverWebhookDelivery: [
|
||
"POST /app/hook/deliveries/{delivery_id}/attempts"
|
||
],
|
||
removeRepoFromInstallation: [
|
||
"DELETE /user/installations/{installation_id}/repositories/{repository_id}",
|
||
{},
|
||
{ renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] }
|
||
],
|
||
removeRepoFromInstallationForAuthenticatedUser: [
|
||
"DELETE /user/installations/{installation_id}/repositories/{repository_id}"
|
||
],
|
||
resetToken: ["PATCH /applications/{client_id}/token"],
|
||
revokeInstallationAccessToken: ["DELETE /installation/token"],
|
||
scopeToken: ["POST /applications/{client_id}/token/scoped"],
|
||
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
|
||
unsuspendInstallation: [
|
||
"DELETE /app/installations/{installation_id}/suspended"
|
||
],
|
||
updateWebhookConfigForApp: ["PATCH /app/hook/config"]
|
||
},
|
||
billing: {
|
||
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
|
||
getGithubActionsBillingUser: [
|
||
"GET /users/{username}/settings/billing/actions"
|
||
],
|
||
getGithubBillingUsageReportOrg: [
|
||
"GET /organizations/{org}/settings/billing/usage"
|
||
],
|
||
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
|
||
getGithubPackagesBillingUser: [
|
||
"GET /users/{username}/settings/billing/packages"
|
||
],
|
||
getSharedStorageBillingOrg: [
|
||
"GET /orgs/{org}/settings/billing/shared-storage"
|
||
],
|
||
getSharedStorageBillingUser: [
|
||
"GET /users/{username}/settings/billing/shared-storage"
|
||
]
|
||
},
|
||
checks: {
|
||
create: ["POST /repos/{owner}/{repo}/check-runs"],
|
||
createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
|
||
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
|
||
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
|
||
listAnnotations: [
|
||
"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"
|
||
],
|
||
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
|
||
listForSuite: [
|
||
"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"
|
||
],
|
||
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
|
||
rerequestRun: [
|
||
"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"
|
||
],
|
||
rerequestSuite: [
|
||
"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"
|
||
],
|
||
setSuitesPreferences: [
|
||
"PATCH /repos/{owner}/{repo}/check-suites/preferences"
|
||
],
|
||
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
|
||
},
|
||
codeScanning: {
|
||
commitAutofix: [
|
||
"POST /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/autofix/commits"
|
||
],
|
||
createAutofix: [
|
||
"POST /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/autofix"
|
||
],
|
||
createVariantAnalysis: [
|
||
"POST /repos/{owner}/{repo}/code-scanning/codeql/variant-analyses"
|
||
],
|
||
deleteAnalysis: [
|
||
"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"
|
||
],
|
||
deleteCodeqlDatabase: [
|
||
"DELETE /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
|
||
],
|
||
getAlert: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}",
|
||
{},
|
||
{ renamedParameters: { alert_id: "alert_number" } }
|
||
],
|
||
getAnalysis: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"
|
||
],
|
||
getAutofix: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/autofix"
|
||
],
|
||
getCodeqlDatabase: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
|
||
],
|
||
getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"],
|
||
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
|
||
getVariantAnalysis: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}"
|
||
],
|
||
getVariantAnalysisRepoTask: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}/repos/{repo_owner}/{repo_name}"
|
||
],
|
||
listAlertInstances: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"
|
||
],
|
||
listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
|
||
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
|
||
listAlertsInstances: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
|
||
{},
|
||
{ renamed: ["codeScanning", "listAlertInstances"] }
|
||
],
|
||
listCodeqlDatabases: [
|
||
"GET /repos/{owner}/{repo}/code-scanning/codeql/databases"
|
||
],
|
||
listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
|
||
updateAlert: [
|
||
"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"
|
||
],
|
||
updateDefaultSetup: [
|
||
"PATCH /repos/{owner}/{repo}/code-scanning/default-setup"
|
||
],
|
||
uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
|
||
},
|
||
codeSecurity: {
|
||
attachConfiguration: [
|
||
"POST /orgs/{org}/code-security/configurations/{configuration_id}/attach"
|
||
],
|
||
attachEnterpriseConfiguration: [
|
||
"POST /enterprises/{enterprise}/code-security/configurations/{configuration_id}/attach"
|
||
],
|
||
createConfiguration: ["POST /orgs/{org}/code-security/configurations"],
|
||
createConfigurationForEnterprise: [
|
||
"POST /enterprises/{enterprise}/code-security/configurations"
|
||
],
|
||
deleteConfiguration: [
|
||
"DELETE /orgs/{org}/code-security/configurations/{configuration_id}"
|
||
],
|
||
deleteConfigurationForEnterprise: [
|
||
"DELETE /enterprises/{enterprise}/code-security/configurations/{configuration_id}"
|
||
],
|
||
detachConfiguration: [
|
||
"DELETE /orgs/{org}/code-security/configurations/detach"
|
||
],
|
||
getConfiguration: [
|
||
"GET /orgs/{org}/code-security/configurations/{configuration_id}"
|
||
],
|
||
getConfigurationForRepository: [
|
||
"GET /repos/{owner}/{repo}/code-security-configuration"
|
||
],
|
||
getConfigurationsForEnterprise: [
|
||
"GET /enterprises/{enterprise}/code-security/configurations"
|
||
],
|
||
getConfigurationsForOrg: ["GET /orgs/{org}/code-security/configurations"],
|
||
getDefaultConfigurations: [
|
||
"GET /orgs/{org}/code-security/configurations/defaults"
|
||
],
|
||
getDefaultConfigurationsForEnterprise: [
|
||
"GET /enterprises/{enterprise}/code-security/configurations/defaults"
|
||
],
|
||
getRepositoriesForConfiguration: [
|
||
"GET /orgs/{org}/code-security/configurations/{configuration_id}/repositories"
|
||
],
|
||
getRepositoriesForEnterpriseConfiguration: [
|
||
"GET /enterprises/{enterprise}/code-security/configurations/{configuration_id}/repositories"
|
||
],
|
||
getSingleConfigurationForEnterprise: [
|
||
"GET /enterprises/{enterprise}/code-security/configurations/{configuration_id}"
|
||
],
|
||
setConfigurationAsDefault: [
|
||
"PUT /orgs/{org}/code-security/configurations/{configuration_id}/defaults"
|
||
],
|
||
setConfigurationAsDefaultForEnterprise: [
|
||
"PUT /enterprises/{enterprise}/code-security/configurations/{configuration_id}/defaults"
|
||
],
|
||
updateConfiguration: [
|
||
"PATCH /orgs/{org}/code-security/configurations/{configuration_id}"
|
||
],
|
||
updateEnterpriseConfiguration: [
|
||
"PATCH /enterprises/{enterprise}/code-security/configurations/{configuration_id}"
|
||
]
|
||
},
|
||
codesOfConduct: {
|
||
getAllCodesOfConduct: ["GET /codes_of_conduct"],
|
||
getConductCode: ["GET /codes_of_conduct/{key}"]
|
||
},
|
||
codespaces: {
|
||
addRepositoryForSecretForAuthenticatedUser: [
|
||
"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
addSelectedRepoToOrgSecret: [
|
||
"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
checkPermissionsForDevcontainer: [
|
||
"GET /repos/{owner}/{repo}/codespaces/permissions_check"
|
||
],
|
||
codespaceMachinesForAuthenticatedUser: [
|
||
"GET /user/codespaces/{codespace_name}/machines"
|
||
],
|
||
createForAuthenticatedUser: ["POST /user/codespaces"],
|
||
createOrUpdateOrgSecret: [
|
||
"PUT /orgs/{org}/codespaces/secrets/{secret_name}"
|
||
],
|
||
createOrUpdateRepoSecret: [
|
||
"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
|
||
],
|
||
createOrUpdateSecretForAuthenticatedUser: [
|
||
"PUT /user/codespaces/secrets/{secret_name}"
|
||
],
|
||
createWithPrForAuthenticatedUser: [
|
||
"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"
|
||
],
|
||
createWithRepoForAuthenticatedUser: [
|
||
"POST /repos/{owner}/{repo}/codespaces"
|
||
],
|
||
deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
|
||
deleteFromOrganization: [
|
||
"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"
|
||
],
|
||
deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"],
|
||
deleteRepoSecret: [
|
||
"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
|
||
],
|
||
deleteSecretForAuthenticatedUser: [
|
||
"DELETE /user/codespaces/secrets/{secret_name}"
|
||
],
|
||
exportForAuthenticatedUser: [
|
||
"POST /user/codespaces/{codespace_name}/exports"
|
||
],
|
||
getCodespacesForUserInOrg: [
|
||
"GET /orgs/{org}/members/{username}/codespaces"
|
||
],
|
||
getExportDetailsForAuthenticatedUser: [
|
||
"GET /user/codespaces/{codespace_name}/exports/{export_id}"
|
||
],
|
||
getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
|
||
getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"],
|
||
getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"],
|
||
getPublicKeyForAuthenticatedUser: [
|
||
"GET /user/codespaces/secrets/public-key"
|
||
],
|
||
getRepoPublicKey: [
|
||
"GET /repos/{owner}/{repo}/codespaces/secrets/public-key"
|
||
],
|
||
getRepoSecret: [
|
||
"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
|
||
],
|
||
getSecretForAuthenticatedUser: [
|
||
"GET /user/codespaces/secrets/{secret_name}"
|
||
],
|
||
listDevcontainersInRepositoryForAuthenticatedUser: [
|
||
"GET /repos/{owner}/{repo}/codespaces/devcontainers"
|
||
],
|
||
listForAuthenticatedUser: ["GET /user/codespaces"],
|
||
listInOrganization: [
|
||
"GET /orgs/{org}/codespaces",
|
||
{},
|
||
{ renamedParameters: { org_id: "org" } }
|
||
],
|
||
listInRepositoryForAuthenticatedUser: [
|
||
"GET /repos/{owner}/{repo}/codespaces"
|
||
],
|
||
listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"],
|
||
listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
|
||
listRepositoriesForSecretForAuthenticatedUser: [
|
||
"GET /user/codespaces/secrets/{secret_name}/repositories"
|
||
],
|
||
listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
|
||
listSelectedReposForOrgSecret: [
|
||
"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
|
||
],
|
||
preFlightWithRepoForAuthenticatedUser: [
|
||
"GET /repos/{owner}/{repo}/codespaces/new"
|
||
],
|
||
publishForAuthenticatedUser: [
|
||
"POST /user/codespaces/{codespace_name}/publish"
|
||
],
|
||
removeRepositoryForSecretForAuthenticatedUser: [
|
||
"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
removeSelectedRepoFromOrgSecret: [
|
||
"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
repoMachinesForAuthenticatedUser: [
|
||
"GET /repos/{owner}/{repo}/codespaces/machines"
|
||
],
|
||
setRepositoriesForSecretForAuthenticatedUser: [
|
||
"PUT /user/codespaces/secrets/{secret_name}/repositories"
|
||
],
|
||
setSelectedReposForOrgSecret: [
|
||
"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
|
||
],
|
||
startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
|
||
stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
|
||
stopInOrganization: [
|
||
"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"
|
||
],
|
||
updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
|
||
},
|
||
copilot: {
|
||
addCopilotSeatsForTeams: [
|
||
"POST /orgs/{org}/copilot/billing/selected_teams"
|
||
],
|
||
addCopilotSeatsForUsers: [
|
||
"POST /orgs/{org}/copilot/billing/selected_users"
|
||
],
|
||
cancelCopilotSeatAssignmentForTeams: [
|
||
"DELETE /orgs/{org}/copilot/billing/selected_teams"
|
||
],
|
||
cancelCopilotSeatAssignmentForUsers: [
|
||
"DELETE /orgs/{org}/copilot/billing/selected_users"
|
||
],
|
||
copilotMetricsForOrganization: ["GET /orgs/{org}/copilot/metrics"],
|
||
copilotMetricsForTeam: ["GET /orgs/{org}/team/{team_slug}/copilot/metrics"],
|
||
getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"],
|
||
getCopilotSeatDetailsForUser: [
|
||
"GET /orgs/{org}/members/{username}/copilot"
|
||
],
|
||
listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"],
|
||
usageMetricsForOrg: ["GET /orgs/{org}/copilot/usage"],
|
||
usageMetricsForTeam: ["GET /orgs/{org}/team/{team_slug}/copilot/usage"]
|
||
},
|
||
dependabot: {
|
||
addSelectedRepoToOrgSecret: [
|
||
"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
createOrUpdateOrgSecret: [
|
||
"PUT /orgs/{org}/dependabot/secrets/{secret_name}"
|
||
],
|
||
createOrUpdateRepoSecret: [
|
||
"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
|
||
],
|
||
deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
|
||
deleteRepoSecret: [
|
||
"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
|
||
],
|
||
getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"],
|
||
getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
|
||
getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
|
||
getRepoPublicKey: [
|
||
"GET /repos/{owner}/{repo}/dependabot/secrets/public-key"
|
||
],
|
||
getRepoSecret: [
|
||
"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
|
||
],
|
||
listAlertsForEnterprise: [
|
||
"GET /enterprises/{enterprise}/dependabot/alerts"
|
||
],
|
||
listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"],
|
||
listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"],
|
||
listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
|
||
listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
|
||
listSelectedReposForOrgSecret: [
|
||
"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
|
||
],
|
||
removeSelectedRepoFromOrgSecret: [
|
||
"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
|
||
],
|
||
setSelectedReposForOrgSecret: [
|
||
"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
|
||
],
|
||
updateAlert: [
|
||
"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"
|
||
]
|
||
},
|
||
dependencyGraph: {
|
||
createRepositorySnapshot: [
|
||
"POST /repos/{owner}/{repo}/dependency-graph/snapshots"
|
||
],
|
||
diffRange: [
|
||
"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"
|
||
],
|
||
exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"]
|
||
},
|
||
emojis: { get: ["GET /emojis"] },
|
||
gists: {
|
||
checkIsStarred: ["GET /gists/{gist_id}/star"],
|
||
create: ["POST /gists"],
|
||
createComment: ["POST /gists/{gist_id}/comments"],
|
||
delete: ["DELETE /gists/{gist_id}"],
|
||
deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
|
||
fork: ["POST /gists/{gist_id}/forks"],
|
||
get: ["GET /gists/{gist_id}"],
|
||
getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
|
||
getRevision: ["GET /gists/{gist_id}/{sha}"],
|
||
list: ["GET /gists"],
|
||
listComments: ["GET /gists/{gist_id}/comments"],
|
||
listCommits: ["GET /gists/{gist_id}/commits"],
|
||
listForUser: ["GET /users/{username}/gists"],
|
||
listForks: ["GET /gists/{gist_id}/forks"],
|
||
listPublic: ["GET /gists/public"],
|
||
listStarred: ["GET /gists/starred"],
|
||
star: ["PUT /gists/{gist_id}/star"],
|
||
unstar: ["DELETE /gists/{gist_id}/star"],
|
||
update: ["PATCH /gists/{gist_id}"],
|
||
updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
|
||
},
|
||
git: {
|
||
createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
|
||
createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
|
||
createRef: ["POST /repos/{owner}/{repo}/git/refs"],
|
||
createTag: ["POST /repos/{owner}/{repo}/git/tags"],
|
||
createTree: ["POST /repos/{owner}/{repo}/git/trees"],
|
||
deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
|
||
getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
|
||
getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
|
||
getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
|
||
getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
|
||
getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
|
||
listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
|
||
updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
|
||
},
|
||
gitignore: {
|
||
getAllTemplates: ["GET /gitignore/templates"],
|
||
getTemplate: ["GET /gitignore/templates/{name}"]
|
||
},
|
||
interactions: {
|
||
getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
|
||
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
|
||
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
|
||
getRestrictionsForYourPublicRepos: [
|
||
"GET /user/interaction-limits",
|
||
{},
|
||
{ renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }
|
||
],
|
||
removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
|
||
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
|
||
removeRestrictionsForRepo: [
|
||
"DELETE /repos/{owner}/{repo}/interaction-limits"
|
||
],
|
||
removeRestrictionsForYourPublicRepos: [
|
||
"DELETE /user/interaction-limits",
|
||
{},
|
||
{ renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }
|
||
],
|
||
setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
|
||
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
|
||
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
|
||
setRestrictionsForYourPublicRepos: [
|
||
"PUT /user/interaction-limits",
|
||
{},
|
||
{ renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }
|
||
]
|
||
},
|
||
issues: {
|
||
addAssignees: [
|
||
"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"
|
||
],
|
||
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
||
addSubIssue: [
|
||
"POST /repos/{owner}/{repo}/issues/{issue_number}/sub_issues"
|
||
],
|
||
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
|
||
checkUserCanBeAssignedToIssue: [
|
||
"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"
|
||
],
|
||
create: ["POST /repos/{owner}/{repo}/issues"],
|
||
createComment: [
|
||
"POST /repos/{owner}/{repo}/issues/{issue_number}/comments"
|
||
],
|
||
createLabel: ["POST /repos/{owner}/{repo}/labels"],
|
||
createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
|
||
deleteComment: [
|
||
"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"
|
||
],
|
||
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
|
||
deleteMilestone: [
|
||
"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"
|
||
],
|
||
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
|
||
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
||
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
|
||
getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
|
||
getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
|
||
list: ["GET /issues"],
|
||
listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
|
||
listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
|
||
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
|
||
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
|
||
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
|
||
listEventsForTimeline: [
|
||
"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"
|
||
],
|
||
listForAuthenticatedUser: ["GET /user/issues"],
|
||
listForOrg: ["GET /orgs/{org}/issues"],
|
||
listForRepo: ["GET /repos/{owner}/{repo}/issues"],
|
||
listLabelsForMilestone: [
|
||
"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"
|
||
],
|
||
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
|
||
listLabelsOnIssue: [
|
||
"GET /repos/{owner}/{repo}/issues/{issue_number}/labels"
|
||
],
|
||
listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
|
||
listSubIssues: [
|
||
"GET /repos/{owner}/{repo}/issues/{issue_number}/sub_issues"
|
||
],
|
||
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
||
removeAllLabels: [
|
||
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"
|
||
],
|
||
removeAssignees: [
|
||
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"
|
||
],
|
||
removeLabel: [
|
||
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"
|
||
],
|
||
removeSubIssue: [
|
||
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/sub_issue"
|
||
],
|
||
reprioritizeSubIssue: [
|
||
"PATCH /repos/{owner}/{repo}/issues/{issue_number}/sub_issues/priority"
|
||
],
|
||
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
||
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
||
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
|
||
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
||
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
|
||
updateMilestone: [
|
||
"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"
|
||
]
|
||
},
|
||
licenses: {
|
||
get: ["GET /licenses/{license}"],
|
||
getAllCommonlyUsed: ["GET /licenses"],
|
||
getForRepo: ["GET /repos/{owner}/{repo}/license"]
|
||
},
|
||
markdown: {
|
||
render: ["POST /markdown"],
|
||
renderRaw: [
|
||
"POST /markdown/raw",
|
||
{ headers: { "content-type": "text/plain; charset=utf-8" } }
|
||
]
|
||
},
|
||
meta: {
|
||
get: ["GET /meta"],
|
||
getAllVersions: ["GET /versions"],
|
||
getOctocat: ["GET /octocat"],
|
||
getZen: ["GET /zen"],
|
||
root: ["GET /"]
|
||
},
|
||
migrations: {
|
||
deleteArchiveForAuthenticatedUser: [
|
||
"DELETE /user/migrations/{migration_id}/archive"
|
||
],
|
||
deleteArchiveForOrg: [
|
||
"DELETE /orgs/{org}/migrations/{migration_id}/archive"
|
||
],
|
||
downloadArchiveForOrg: [
|
||
"GET /orgs/{org}/migrations/{migration_id}/archive"
|
||
],
|
||
getArchiveForAuthenticatedUser: [
|
||
"GET /user/migrations/{migration_id}/archive"
|
||
],
|
||
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
|
||
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
|
||
listForAuthenticatedUser: ["GET /user/migrations"],
|
||
listForOrg: ["GET /orgs/{org}/migrations"],
|
||
listReposForAuthenticatedUser: [
|
||
"GET /user/migrations/{migration_id}/repositories"
|
||
],
|
||
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
|
||
listReposForUser: [
|
||
"GET /user/migrations/{migration_id}/repositories",
|
||
{},
|
||
{ renamed: ["migrations", "listReposForAuthenticatedUser"] }
|
||
],
|
||
startForAuthenticatedUser: ["POST /user/migrations"],
|
||
startForOrg: ["POST /orgs/{org}/migrations"],
|
||
unlockRepoForAuthenticatedUser: [
|
||
"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"
|
||
],
|
||
unlockRepoForOrg: [
|
||
"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
|
||
]
|
||
},
|
||
oidc: {
|
||
getOidcCustomSubTemplateForOrg: [
|
||
"GET /orgs/{org}/actions/oidc/customization/sub"
|
||
],
|
||
updateOidcCustomSubTemplateForOrg: [
|
||
"PUT /orgs/{org}/actions/oidc/customization/sub"
|
||
]
|
||
},
|
||
orgs: {
|
||
addSecurityManagerTeam: [
|
||
"PUT /orgs/{org}/security-managers/teams/{team_slug}",
|
||
{},
|
||
{
|
||
deprecated: "octokit.rest.orgs.addSecurityManagerTeam() is deprecated, see https://docs.github.com/rest/orgs/security-managers#add-a-security-manager-team"
|
||
}
|
||
],
|
||
assignTeamToOrgRole: [
|
||
"PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
|
||
],
|
||
assignUserToOrgRole: [
|
||
"PUT /orgs/{org}/organization-roles/users/{username}/{role_id}"
|
||
],
|
||
blockUser: ["PUT /orgs/{org}/blocks/{username}"],
|
||
cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
|
||
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
|
||
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
|
||
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
|
||
convertMemberToOutsideCollaborator: [
|
||
"PUT /orgs/{org}/outside_collaborators/{username}"
|
||
],
|
||
createInvitation: ["POST /orgs/{org}/invitations"],
|
||
createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"],
|
||
createOrUpdateCustomPropertiesValuesForRepos: [
|
||
"PATCH /orgs/{org}/properties/values"
|
||
],
|
||
createOrUpdateCustomProperty: [
|
||
"PUT /orgs/{org}/properties/schema/{custom_property_name}"
|
||
],
|
||
createWebhook: ["POST /orgs/{org}/hooks"],
|
||
delete: ["DELETE /orgs/{org}"],
|
||
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
|
||
enableOrDisableSecurityProductOnAllOrgRepos: [
|
||
"POST /orgs/{org}/{security_product}/{enablement}",
|
||
{},
|
||
{
|
||
deprecated: "octokit.rest.orgs.enableOrDisableSecurityProductOnAllOrgRepos() is deprecated, see https://docs.github.com/rest/orgs/orgs#enable-or-disable-a-security-feature-for-an-organization"
|
||
}
|
||
],
|
||
get: ["GET /orgs/{org}"],
|
||
getAllCustomProperties: ["GET /orgs/{org}/properties/schema"],
|
||
getCustomProperty: [
|
||
"GET /orgs/{org}/properties/schema/{custom_property_name}"
|
||
],
|
||
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
|
||
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
|
||
getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"],
|
||
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
|
||
getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
|
||
getWebhookDelivery: [
|
||
"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"
|
||
],
|
||
list: ["GET /organizations"],
|
||
listAppInstallations: ["GET /orgs/{org}/installations"],
|
||
listAttestations: ["GET /orgs/{org}/attestations/{subject_digest}"],
|
||
listBlockedUsers: ["GET /orgs/{org}/blocks"],
|
||
listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"],
|
||
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
|
||
listForAuthenticatedUser: ["GET /user/orgs"],
|
||
listForUser: ["GET /users/{username}/orgs"],
|
||
listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
|
||
listMembers: ["GET /orgs/{org}/members"],
|
||
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
|
||
listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"],
|
||
listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"],
|
||
listOrgRoles: ["GET /orgs/{org}/organization-roles"],
|
||
listOrganizationFineGrainedPermissions: [
|
||
"GET /orgs/{org}/organization-fine-grained-permissions"
|
||
],
|
||
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
|
||
listPatGrantRepositories: [
|
||
"GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories"
|
||
],
|
||
listPatGrantRequestRepositories: [
|
||
"GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories"
|
||
],
|
||
listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"],
|
||
listPatGrants: ["GET /orgs/{org}/personal-access-tokens"],
|
||
listPendingInvitations: ["GET /orgs/{org}/invitations"],
|
||
listPublicMembers: ["GET /orgs/{org}/public_members"],
|
||
listSecurityManagerTeams: [
|
||
"GET /orgs/{org}/security-managers",
|
||
{},
|
||
{
|
||
deprecated: "octokit.rest.orgs.listSecurityManagerTeams() is deprecated, see https://docs.github.com/rest/orgs/security-managers#list-security-manager-teams"
|
||
}
|
||
],
|
||
listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
|
||
listWebhooks: ["GET /orgs/{org}/hooks"],
|
||
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
|
||
redeliverWebhookDelivery: [
|
||
"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
|
||
],
|
||
removeCustomProperty: [
|
||
"DELETE /orgs/{org}/properties/schema/{custom_property_name}"
|
||
],
|
||
removeMember: ["DELETE /orgs/{org}/members/{username}"],
|
||
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
|
||
removeOutsideCollaborator: [
|
||
"DELETE /orgs/{org}/outside_collaborators/{username}"
|
||
],
|
||
removePublicMembershipForAuthenticatedUser: [
|
||
"DELETE /orgs/{org}/public_members/{username}"
|
||
],
|
||
removeSecurityManagerTeam: [
|
||
"DELETE /orgs/{org}/security-managers/teams/{team_slug}",
|
||
{},
|
||
{
|
||
deprecated: "octokit.rest.orgs.removeSecurityManagerTeam() is deprecated, see https://docs.github.com/rest/orgs/security-managers#remove-a-security-manager-team"
|
||
}
|
||
],
|
||
reviewPatGrantRequest: [
|
||
"POST /orgs/{org}/personal-access-token-requests/{pat_request_id}"
|
||
],
|
||
reviewPatGrantRequestsInBulk: [
|
||
"POST /orgs/{org}/personal-access-token-requests"
|
||
],
|
||
revokeAllOrgRolesTeam: [
|
||
"DELETE /orgs/{org}/organization-roles/teams/{team_slug}"
|
||
],
|
||
revokeAllOrgRolesUser: [
|
||
"DELETE /orgs/{org}/organization-roles/users/{username}"
|
||
],
|
||
revokeOrgRoleTeam: [
|
||
"DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
|
||
],
|
||
revokeOrgRoleUser: [
|
||
"DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}"
|
||
],
|
||
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
|
||
setPublicMembershipForAuthenticatedUser: [
|
||
"PUT /orgs/{org}/public_members/{username}"
|
||
],
|
||
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
|
||
update: ["PATCH /orgs/{org}"],
|
||
updateMembershipForAuthenticatedUser: [
|
||
"PATCH /user/memberships/orgs/{org}"
|
||
],
|
||
updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"],
|
||
updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"],
|
||
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
|
||
updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
|
||
},
|
||
packages: {
|
||
deletePackageForAuthenticatedUser: [
|
||
"DELETE /user/packages/{package_type}/{package_name}"
|
||
],
|
||
deletePackageForOrg: [
|
||
"DELETE /orgs/{org}/packages/{package_type}/{package_name}"
|
||
],
|
||
deletePackageForUser: [
|
||
"DELETE /users/{username}/packages/{package_type}/{package_name}"
|
||
],
|
||
deletePackageVersionForAuthenticatedUser: [
|
||
"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
|
||
],
|
||
deletePackageVersionForOrg: [
|
||
"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
|
||
],
|
||
deletePackageVersionForUser: [
|
||
"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
|
||
],
|
||
getAllPackageVersionsForAPackageOwnedByAnOrg: [
|
||
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
|
||
{},
|
||
{ renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }
|
||
],
|
||
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [
|
||
"GET /user/packages/{package_type}/{package_name}/versions",
|
||
{},
|
||
{
|
||
renamed: [
|
||
"packages",
|
||
"getAllPackageVersionsForPackageOwnedByAuthenticatedUser"
|
||
]
|
||
}
|
||
],
|
||
getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [
|
||
"GET /user/packages/{package_type}/{package_name}/versions"
|
||
],
|
||
getAllPackageVersionsForPackageOwnedByOrg: [
|
||
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions"
|
||
],
|
||
getAllPackageVersionsForPackageOwnedByUser: [
|
||
"GET /users/{username}/packages/{package_type}/{package_name}/versions"
|
||
],
|
||
getPackageForAuthenticatedUser: [
|
||
"GET /user/packages/{package_type}/{package_name}"
|
||
],
|
||
getPackageForOrganization: [
|
||
"GET /orgs/{org}/packages/{package_type}/{package_name}"
|
||
],
|
||
getPackageForUser: [
|
||
"GET /users/{username}/packages/{package_type}/{package_name}"
|
||
],
|
||
getPackageVersionForAuthenticatedUser: [
|
||
"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
|
||
],
|
||
getPackageVersionForOrganization: [
|
||
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
|
||
],
|
||
getPackageVersionForUser: [
|
||
"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
|
||
],
|
||
listDockerMigrationConflictingPackagesForAuthenticatedUser: [
|
||
"GET /user/docker/conflicts"
|
||
],
|
||
listDockerMigrationConflictingPackagesForOrganization: [
|
||
"GET /orgs/{org}/docker/conflicts"
|
||
],
|
||
listDockerMigrationConflictingPackagesForUser: [
|
||
"GET /users/{username}/docker/conflicts"
|
||
],
|
||
listPackagesForAuthenticatedUser: ["GET /user/packages"],
|
||
listPackagesForOrganization: ["GET /orgs/{org}/packages"],
|
||
listPackagesForUser: ["GET /users/{username}/packages"],
|
||
restorePackageForAuthenticatedUser: [
|
||
"POST /user/packages/{package_type}/{package_name}/restore{?token}"
|
||
],
|
||
restorePackageForOrg: [
|
||
"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"
|
||
],
|
||
restorePackageForUser: [
|
||
"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"
|
||
],
|
||
restorePackageVersionForAuthenticatedUser: [
|
||
"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
|
||
],
|
||
restorePackageVersionForOrg: [
|
||
"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
|
||
],
|
||
restorePackageVersionForUser: [
|
||
"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
|
||
]
|
||
},
|
||
privateRegistries: {
|
||
createOrgPrivateRegistry: ["POST /orgs/{org}/private-registries"],
|
||
deleteOrgPrivateRegistry: [
|
||
"DELETE /orgs/{org}/private-registries/{secret_name}"
|
||
],
|
||
getOrgPrivateRegistry: ["GET /orgs/{org}/private-registries/{secret_name}"],
|
||
getOrgPublicKey: ["GET /orgs/{org}/private-registries/public-key"],
|
||
listOrgPrivateRegistries: ["GET /orgs/{org}/private-registries"],
|
||
updateOrgPrivateRegistry: [
|
||
"PATCH /orgs/{org}/private-registries/{secret_name}"
|
||
]
|
||
},
|
||
projects: {
|
||
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
|
||
createCard: ["POST /projects/columns/{column_id}/cards"],
|
||
createColumn: ["POST /projects/{project_id}/columns"],
|
||
createForAuthenticatedUser: ["POST /user/projects"],
|
||
createForOrg: ["POST /orgs/{org}/projects"],
|
||
createForRepo: ["POST /repos/{owner}/{repo}/projects"],
|
||
delete: ["DELETE /projects/{project_id}"],
|
||
deleteCard: ["DELETE /projects/columns/cards/{card_id}"],
|
||
deleteColumn: ["DELETE /projects/columns/{column_id}"],
|
||
get: ["GET /projects/{project_id}"],
|
||
getCard: ["GET /projects/columns/cards/{card_id}"],
|
||
getColumn: ["GET /projects/columns/{column_id}"],
|
||
getPermissionForUser: [
|
||
"GET /projects/{project_id}/collaborators/{username}/permission"
|
||
],
|
||
listCards: ["GET /projects/columns/{column_id}/cards"],
|
||
listCollaborators: ["GET /projects/{project_id}/collaborators"],
|
||
listColumns: ["GET /projects/{project_id}/columns"],
|
||
listForOrg: ["GET /orgs/{org}/projects"],
|
||
listForRepo: ["GET /repos/{owner}/{repo}/projects"],
|
||
listForUser: ["GET /users/{username}/projects"],
|
||
moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
|
||
moveColumn: ["POST /projects/columns/{column_id}/moves"],
|
||
removeCollaborator: [
|
||
"DELETE /projects/{project_id}/collaborators/{username}"
|
||
],
|
||
update: ["PATCH /projects/{project_id}"],
|
||
updateCard: ["PATCH /projects/columns/cards/{card_id}"],
|
||
updateColumn: ["PATCH /projects/columns/{column_id}"]
|
||
},
|
||
pulls: {
|
||
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
||
create: ["POST /repos/{owner}/{repo}/pulls"],
|
||
createReplyForReviewComment: [
|
||
"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"
|
||
],
|
||
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
||
createReviewComment: [
|
||
"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"
|
||
],
|
||
deletePendingReview: [
|
||
"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
|
||
],
|
||
deleteReviewComment: [
|
||
"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"
|
||
],
|
||
dismissReview: [
|
||
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"
|
||
],
|
||
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
|
||
getReview: [
|
||
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
|
||
],
|
||
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
|
||
list: ["GET /repos/{owner}/{repo}/pulls"],
|
||
listCommentsForReview: [
|
||
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"
|
||
],
|
||
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
|
||
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
|
||
listRequestedReviewers: [
|
||
"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
|
||
],
|
||
listReviewComments: [
|
||
"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"
|
||
],
|
||
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
|
||
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
||
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
||
removeRequestedReviewers: [
|
||
"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
|
||
],
|
||
requestReviewers: [
|
||
"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
|
||
],
|
||
submitReview: [
|
||
"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"
|
||
],
|
||
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
|
||
updateBranch: [
|
||
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"
|
||
],
|
||
updateReview: [
|
||
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
|
||
],
|
||
updateReviewComment: [
|
||
"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"
|
||
]
|
||
},
|
||
rateLimit: { get: ["GET /rate_limit"] },
|
||
reactions: {
|
||
createForCommitComment: [
|
||
"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"
|
||
],
|
||
createForIssue: [
|
||
"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"
|
||
],
|
||
createForIssueComment: [
|
||
"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
|
||
],
|
||
createForPullRequestReviewComment: [
|
||
"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
|
||
],
|
||
createForRelease: [
|
||
"POST /repos/{owner}/{repo}/releases/{release_id}/reactions"
|
||
],
|
||
createForTeamDiscussionCommentInOrg: [
|
||
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
|
||
],
|
||
createForTeamDiscussionInOrg: [
|
||
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
|
||
],
|
||
deleteForCommitComment: [
|
||
"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"
|
||
],
|
||
deleteForIssue: [
|
||
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"
|
||
],
|
||
deleteForIssueComment: [
|
||
"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"
|
||
],
|
||
deleteForPullRequestComment: [
|
||
"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"
|
||
],
|
||
deleteForRelease: [
|
||
"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"
|
||
],
|
||
deleteForTeamDiscussion: [
|
||
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"
|
||
],
|
||
deleteForTeamDiscussionComment: [
|
||
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"
|
||
],
|
||
listForCommitComment: [
|
||
"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"
|
||
],
|
||
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
|
||
listForIssueComment: [
|
||
"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
|
||
],
|
||
listForPullRequestReviewComment: [
|
||
"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
|
||
],
|
||
listForRelease: [
|
||
"GET /repos/{owner}/{repo}/releases/{release_id}/reactions"
|
||
],
|
||
listForTeamDiscussionCommentInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
|
||
],
|
||
listForTeamDiscussionInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
|
||
]
|
||
},
|
||
repos: {
|
||
acceptInvitation: [
|
||
"PATCH /user/repository_invitations/{invitation_id}",
|
||
{},
|
||
{ renamed: ["repos", "acceptInvitationForAuthenticatedUser"] }
|
||
],
|
||
acceptInvitationForAuthenticatedUser: [
|
||
"PATCH /user/repository_invitations/{invitation_id}"
|
||
],
|
||
addAppAccessRestrictions: [
|
||
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
|
||
{},
|
||
{ mapToData: "apps" }
|
||
],
|
||
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
|
||
addStatusCheckContexts: [
|
||
"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
|
||
{},
|
||
{ mapToData: "contexts" }
|
||
],
|
||
addTeamAccessRestrictions: [
|
||
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
|
||
{},
|
||
{ mapToData: "teams" }
|
||
],
|
||
addUserAccessRestrictions: [
|
||
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
|
||
{},
|
||
{ mapToData: "users" }
|
||
],
|
||
cancelPagesDeployment: [
|
||
"POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel"
|
||
],
|
||
checkAutomatedSecurityFixes: [
|
||
"GET /repos/{owner}/{repo}/automated-security-fixes"
|
||
],
|
||
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
|
||
checkPrivateVulnerabilityReporting: [
|
||
"GET /repos/{owner}/{repo}/private-vulnerability-reporting"
|
||
],
|
||
checkVulnerabilityAlerts: [
|
||
"GET /repos/{owner}/{repo}/vulnerability-alerts"
|
||
],
|
||
codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
|
||
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
|
||
compareCommitsWithBasehead: [
|
||
"GET /repos/{owner}/{repo}/compare/{basehead}"
|
||
],
|
||
createAttestation: ["POST /repos/{owner}/{repo}/attestations"],
|
||
createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
|
||
createCommitComment: [
|
||
"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"
|
||
],
|
||
createCommitSignatureProtection: [
|
||
"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
|
||
],
|
||
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
|
||
createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
|
||
createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
|
||
createDeploymentBranchPolicy: [
|
||
"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
|
||
],
|
||
createDeploymentProtectionRule: [
|
||
"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
|
||
],
|
||
createDeploymentStatus: [
|
||
"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
|
||
],
|
||
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
|
||
createForAuthenticatedUser: ["POST /user/repos"],
|
||
createFork: ["POST /repos/{owner}/{repo}/forks"],
|
||
createInOrg: ["POST /orgs/{org}/repos"],
|
||
createOrUpdateCustomPropertiesValues: [
|
||
"PATCH /repos/{owner}/{repo}/properties/values"
|
||
],
|
||
createOrUpdateEnvironment: [
|
||
"PUT /repos/{owner}/{repo}/environments/{environment_name}"
|
||
],
|
||
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
|
||
createOrgRuleset: ["POST /orgs/{org}/rulesets"],
|
||
createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"],
|
||
createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
|
||
createRelease: ["POST /repos/{owner}/{repo}/releases"],
|
||
createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"],
|
||
createUsingTemplate: [
|
||
"POST /repos/{template_owner}/{template_repo}/generate"
|
||
],
|
||
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
|
||
declineInvitation: [
|
||
"DELETE /user/repository_invitations/{invitation_id}",
|
||
{},
|
||
{ renamed: ["repos", "declineInvitationForAuthenticatedUser"] }
|
||
],
|
||
declineInvitationForAuthenticatedUser: [
|
||
"DELETE /user/repository_invitations/{invitation_id}"
|
||
],
|
||
delete: ["DELETE /repos/{owner}/{repo}"],
|
||
deleteAccessRestrictions: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
|
||
],
|
||
deleteAdminBranchProtection: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
|
||
],
|
||
deleteAnEnvironment: [
|
||
"DELETE /repos/{owner}/{repo}/environments/{environment_name}"
|
||
],
|
||
deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
|
||
deleteBranchProtection: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection"
|
||
],
|
||
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
|
||
deleteCommitSignatureProtection: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
|
||
],
|
||
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
|
||
deleteDeployment: [
|
||
"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"
|
||
],
|
||
deleteDeploymentBranchPolicy: [
|
||
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
|
||
],
|
||
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
|
||
deleteInvitation: [
|
||
"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"
|
||
],
|
||
deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"],
|
||
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
|
||
deletePullRequestReviewProtection: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
|
||
],
|
||
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
|
||
deleteReleaseAsset: [
|
||
"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"
|
||
],
|
||
deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
|
||
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
|
||
disableAutomatedSecurityFixes: [
|
||
"DELETE /repos/{owner}/{repo}/automated-security-fixes"
|
||
],
|
||
disableDeploymentProtectionRule: [
|
||
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
|
||
],
|
||
disablePrivateVulnerabilityReporting: [
|
||
"DELETE /repos/{owner}/{repo}/private-vulnerability-reporting"
|
||
],
|
||
disableVulnerabilityAlerts: [
|
||
"DELETE /repos/{owner}/{repo}/vulnerability-alerts"
|
||
],
|
||
downloadArchive: [
|
||
"GET /repos/{owner}/{repo}/zipball/{ref}",
|
||
{},
|
||
{ renamed: ["repos", "downloadZipballArchive"] }
|
||
],
|
||
downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
|
||
downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
|
||
enableAutomatedSecurityFixes: [
|
||
"PUT /repos/{owner}/{repo}/automated-security-fixes"
|
||
],
|
||
enablePrivateVulnerabilityReporting: [
|
||
"PUT /repos/{owner}/{repo}/private-vulnerability-reporting"
|
||
],
|
||
enableVulnerabilityAlerts: [
|
||
"PUT /repos/{owner}/{repo}/vulnerability-alerts"
|
||
],
|
||
generateReleaseNotes: [
|
||
"POST /repos/{owner}/{repo}/releases/generate-notes"
|
||
],
|
||
get: ["GET /repos/{owner}/{repo}"],
|
||
getAccessRestrictions: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
|
||
],
|
||
getAdminBranchProtection: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
|
||
],
|
||
getAllDeploymentProtectionRules: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
|
||
],
|
||
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
|
||
getAllStatusCheckContexts: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"
|
||
],
|
||
getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
|
||
getAppsWithAccessToProtectedBranch: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"
|
||
],
|
||
getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
|
||
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
|
||
getBranchProtection: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection"
|
||
],
|
||
getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"],
|
||
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
|
||
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
|
||
getCollaboratorPermissionLevel: [
|
||
"GET /repos/{owner}/{repo}/collaborators/{username}/permission"
|
||
],
|
||
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
|
||
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
|
||
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
|
||
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
|
||
getCommitSignatureProtection: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
|
||
],
|
||
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
|
||
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
|
||
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
|
||
getCustomDeploymentProtectionRule: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
|
||
],
|
||
getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"],
|
||
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
|
||
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
|
||
getDeploymentBranchPolicy: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
|
||
],
|
||
getDeploymentStatus: [
|
||
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"
|
||
],
|
||
getEnvironment: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}"
|
||
],
|
||
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
|
||
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
|
||
getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"],
|
||
getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"],
|
||
getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"],
|
||
getOrgRulesets: ["GET /orgs/{org}/rulesets"],
|
||
getPages: ["GET /repos/{owner}/{repo}/pages"],
|
||
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
|
||
getPagesDeployment: [
|
||
"GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}"
|
||
],
|
||
getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
|
||
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
|
||
getPullRequestReviewProtection: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
|
||
],
|
||
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
|
||
getReadme: ["GET /repos/{owner}/{repo}/readme"],
|
||
getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
|
||
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
|
||
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
||
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
|
||
getRepoRuleSuite: [
|
||
"GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}"
|
||
],
|
||
getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"],
|
||
getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
|
||
getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"],
|
||
getStatusChecksProtection: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
|
||
],
|
||
getTeamsWithAccessToProtectedBranch: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"
|
||
],
|
||
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
|
||
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
|
||
getUsersWithAccessToProtectedBranch: [
|
||
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"
|
||
],
|
||
getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
|
||
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
|
||
getWebhookConfigForRepo: [
|
||
"GET /repos/{owner}/{repo}/hooks/{hook_id}/config"
|
||
],
|
||
getWebhookDelivery: [
|
||
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"
|
||
],
|
||
listActivities: ["GET /repos/{owner}/{repo}/activity"],
|
||
listAttestations: [
|
||
"GET /repos/{owner}/{repo}/attestations/{subject_digest}"
|
||
],
|
||
listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
|
||
listBranches: ["GET /repos/{owner}/{repo}/branches"],
|
||
listBranchesForHeadCommit: [
|
||
"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"
|
||
],
|
||
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
|
||
listCommentsForCommit: [
|
||
"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"
|
||
],
|
||
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
|
||
listCommitStatusesForRef: [
|
||
"GET /repos/{owner}/{repo}/commits/{ref}/statuses"
|
||
],
|
||
listCommits: ["GET /repos/{owner}/{repo}/commits"],
|
||
listContributors: ["GET /repos/{owner}/{repo}/contributors"],
|
||
listCustomDeploymentRuleIntegrations: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"
|
||
],
|
||
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
|
||
listDeploymentBranchPolicies: [
|
||
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
|
||
],
|
||
listDeploymentStatuses: [
|
||
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
|
||
],
|
||
listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
|
||
listForAuthenticatedUser: ["GET /user/repos"],
|
||
listForOrg: ["GET /orgs/{org}/repos"],
|
||
listForUser: ["GET /users/{username}/repos"],
|
||
listForks: ["GET /repos/{owner}/{repo}/forks"],
|
||
listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
|
||
listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
|
||
listLanguages: ["GET /repos/{owner}/{repo}/languages"],
|
||
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
|
||
listPublic: ["GET /repositories"],
|
||
listPullRequestsAssociatedWithCommit: [
|
||
"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"
|
||
],
|
||
listReleaseAssets: [
|
||
"GET /repos/{owner}/{repo}/releases/{release_id}/assets"
|
||
],
|
||
listReleases: ["GET /repos/{owner}/{repo}/releases"],
|
||
listTags: ["GET /repos/{owner}/{repo}/tags"],
|
||
listTeams: ["GET /repos/{owner}/{repo}/teams"],
|
||
listWebhookDeliveries: [
|
||
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"
|
||
],
|
||
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
|
||
merge: ["POST /repos/{owner}/{repo}/merges"],
|
||
mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
|
||
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
|
||
redeliverWebhookDelivery: [
|
||
"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
|
||
],
|
||
removeAppAccessRestrictions: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
|
||
{},
|
||
{ mapToData: "apps" }
|
||
],
|
||
removeCollaborator: [
|
||
"DELETE /repos/{owner}/{repo}/collaborators/{username}"
|
||
],
|
||
removeStatusCheckContexts: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
|
||
{},
|
||
{ mapToData: "contexts" }
|
||
],
|
||
removeStatusCheckProtection: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
|
||
],
|
||
removeTeamAccessRestrictions: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
|
||
{},
|
||
{ mapToData: "teams" }
|
||
],
|
||
removeUserAccessRestrictions: [
|
||
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
|
||
{},
|
||
{ mapToData: "users" }
|
||
],
|
||
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
|
||
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
|
||
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
|
||
setAdminBranchProtection: [
|
||
"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
|
||
],
|
||
setAppAccessRestrictions: [
|
||
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
|
||
{},
|
||
{ mapToData: "apps" }
|
||
],
|
||
setStatusCheckContexts: [
|
||
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
|
||
{},
|
||
{ mapToData: "contexts" }
|
||
],
|
||
setTeamAccessRestrictions: [
|
||
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
|
||
{},
|
||
{ mapToData: "teams" }
|
||
],
|
||
setUserAccessRestrictions: [
|
||
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
|
||
{},
|
||
{ mapToData: "users" }
|
||
],
|
||
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
|
||
transfer: ["POST /repos/{owner}/{repo}/transfer"],
|
||
update: ["PATCH /repos/{owner}/{repo}"],
|
||
updateBranchProtection: [
|
||
"PUT /repos/{owner}/{repo}/branches/{branch}/protection"
|
||
],
|
||
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
|
||
updateDeploymentBranchPolicy: [
|
||
"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
|
||
],
|
||
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
|
||
updateInvitation: [
|
||
"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"
|
||
],
|
||
updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"],
|
||
updatePullRequestReviewProtection: [
|
||
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
|
||
],
|
||
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
|
||
updateReleaseAsset: [
|
||
"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"
|
||
],
|
||
updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
|
||
updateStatusCheckPotection: [
|
||
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks",
|
||
{},
|
||
{ renamed: ["repos", "updateStatusCheckProtection"] }
|
||
],
|
||
updateStatusCheckProtection: [
|
||
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
|
||
],
|
||
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
|
||
updateWebhookConfigForRepo: [
|
||
"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"
|
||
],
|
||
uploadReleaseAsset: [
|
||
"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}",
|
||
{ baseUrl: "https://uploads.github.com" }
|
||
]
|
||
},
|
||
search: {
|
||
code: ["GET /search/code"],
|
||
commits: ["GET /search/commits"],
|
||
issuesAndPullRequests: ["GET /search/issues"],
|
||
labels: ["GET /search/labels"],
|
||
repos: ["GET /search/repositories"],
|
||
topics: ["GET /search/topics"],
|
||
users: ["GET /search/users"]
|
||
},
|
||
secretScanning: {
|
||
createPushProtectionBypass: [
|
||
"POST /repos/{owner}/{repo}/secret-scanning/push-protection-bypasses"
|
||
],
|
||
getAlert: [
|
||
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
|
||
],
|
||
getScanHistory: ["GET /repos/{owner}/{repo}/secret-scanning/scan-history"],
|
||
listAlertsForEnterprise: [
|
||
"GET /enterprises/{enterprise}/secret-scanning/alerts"
|
||
],
|
||
listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
|
||
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
|
||
listLocationsForAlert: [
|
||
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"
|
||
],
|
||
updateAlert: [
|
||
"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
|
||
]
|
||
},
|
||
securityAdvisories: {
|
||
createFork: [
|
||
"POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks"
|
||
],
|
||
createPrivateVulnerabilityReport: [
|
||
"POST /repos/{owner}/{repo}/security-advisories/reports"
|
||
],
|
||
createRepositoryAdvisory: [
|
||
"POST /repos/{owner}/{repo}/security-advisories"
|
||
],
|
||
createRepositoryAdvisoryCveRequest: [
|
||
"POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve"
|
||
],
|
||
getGlobalAdvisory: ["GET /advisories/{ghsa_id}"],
|
||
getRepositoryAdvisory: [
|
||
"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
|
||
],
|
||
listGlobalAdvisories: ["GET /advisories"],
|
||
listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"],
|
||
listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"],
|
||
updateRepositoryAdvisory: [
|
||
"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
|
||
]
|
||
},
|
||
teams: {
|
||
addOrUpdateMembershipForUserInOrg: [
|
||
"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"
|
||
],
|
||
addOrUpdateProjectPermissionsInOrg: [
|
||
"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"
|
||
],
|
||
addOrUpdateRepoPermissionsInOrg: [
|
||
"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
|
||
],
|
||
checkPermissionsForProjectInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"
|
||
],
|
||
checkPermissionsForRepoInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
|
||
],
|
||
create: ["POST /orgs/{org}/teams"],
|
||
createDiscussionCommentInOrg: [
|
||
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
|
||
],
|
||
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
|
||
deleteDiscussionCommentInOrg: [
|
||
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
|
||
],
|
||
deleteDiscussionInOrg: [
|
||
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
|
||
],
|
||
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
|
||
getByName: ["GET /orgs/{org}/teams/{team_slug}"],
|
||
getDiscussionCommentInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
|
||
],
|
||
getDiscussionInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
|
||
],
|
||
getMembershipForUserInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/memberships/{username}"
|
||
],
|
||
list: ["GET /orgs/{org}/teams"],
|
||
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
|
||
listDiscussionCommentsInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
|
||
],
|
||
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
|
||
listForAuthenticatedUser: ["GET /user/teams"],
|
||
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
|
||
listPendingInvitationsInOrg: [
|
||
"GET /orgs/{org}/teams/{team_slug}/invitations"
|
||
],
|
||
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
|
||
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
|
||
removeMembershipForUserInOrg: [
|
||
"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"
|
||
],
|
||
removeProjectInOrg: [
|
||
"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"
|
||
],
|
||
removeRepoInOrg: [
|
||
"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
|
||
],
|
||
updateDiscussionCommentInOrg: [
|
||
"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
|
||
],
|
||
updateDiscussionInOrg: [
|
||
"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
|
||
],
|
||
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
|
||
},
|
||
users: {
|
||
addEmailForAuthenticated: [
|
||
"POST /user/emails",
|
||
{},
|
||
{ renamed: ["users", "addEmailForAuthenticatedUser"] }
|
||
],
|
||
addEmailForAuthenticatedUser: ["POST /user/emails"],
|
||
addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"],
|
||
block: ["PUT /user/blocks/{username}"],
|
||
checkBlocked: ["GET /user/blocks/{username}"],
|
||
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
|
||
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
|
||
createGpgKeyForAuthenticated: [
|
||
"POST /user/gpg_keys",
|
||
{},
|
||
{ renamed: ["users", "createGpgKeyForAuthenticatedUser"] }
|
||
],
|
||
createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
|
||
createPublicSshKeyForAuthenticated: [
|
||
"POST /user/keys",
|
||
{},
|
||
{ renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] }
|
||
],
|
||
createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
|
||
createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"],
|
||
deleteEmailForAuthenticated: [
|
||
"DELETE /user/emails",
|
||
{},
|
||
{ renamed: ["users", "deleteEmailForAuthenticatedUser"] }
|
||
],
|
||
deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
|
||
deleteGpgKeyForAuthenticated: [
|
||
"DELETE /user/gpg_keys/{gpg_key_id}",
|
||
{},
|
||
{ renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] }
|
||
],
|
||
deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
|
||
deletePublicSshKeyForAuthenticated: [
|
||
"DELETE /user/keys/{key_id}",
|
||
{},
|
||
{ renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] }
|
||
],
|
||
deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
|
||
deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"],
|
||
deleteSshSigningKeyForAuthenticatedUser: [
|
||
"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"
|
||
],
|
||
follow: ["PUT /user/following/{username}"],
|
||
getAuthenticated: ["GET /user"],
|
||
getById: ["GET /user/{account_id}"],
|
||
getByUsername: ["GET /users/{username}"],
|
||
getContextForUser: ["GET /users/{username}/hovercard"],
|
||
getGpgKeyForAuthenticated: [
|
||
"GET /user/gpg_keys/{gpg_key_id}",
|
||
{},
|
||
{ renamed: ["users", "getGpgKeyForAuthenticatedUser"] }
|
||
],
|
||
getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
|
||
getPublicSshKeyForAuthenticated: [
|
||
"GET /user/keys/{key_id}",
|
||
{},
|
||
{ renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] }
|
||
],
|
||
getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
|
||
getSshSigningKeyForAuthenticatedUser: [
|
||
"GET /user/ssh_signing_keys/{ssh_signing_key_id}"
|
||
],
|
||
list: ["GET /users"],
|
||
listAttestations: ["GET /users/{username}/attestations/{subject_digest}"],
|
||
listBlockedByAuthenticated: [
|
||
"GET /user/blocks",
|
||
{},
|
||
{ renamed: ["users", "listBlockedByAuthenticatedUser"] }
|
||
],
|
||
listBlockedByAuthenticatedUser: ["GET /user/blocks"],
|
||
listEmailsForAuthenticated: [
|
||
"GET /user/emails",
|
||
{},
|
||
{ renamed: ["users", "listEmailsForAuthenticatedUser"] }
|
||
],
|
||
listEmailsForAuthenticatedUser: ["GET /user/emails"],
|
||
listFollowedByAuthenticated: [
|
||
"GET /user/following",
|
||
{},
|
||
{ renamed: ["users", "listFollowedByAuthenticatedUser"] }
|
||
],
|
||
listFollowedByAuthenticatedUser: ["GET /user/following"],
|
||
listFollowersForAuthenticatedUser: ["GET /user/followers"],
|
||
listFollowersForUser: ["GET /users/{username}/followers"],
|
||
listFollowingForUser: ["GET /users/{username}/following"],
|
||
listGpgKeysForAuthenticated: [
|
||
"GET /user/gpg_keys",
|
||
{},
|
||
{ renamed: ["users", "listGpgKeysForAuthenticatedUser"] }
|
||
],
|
||
listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
|
||
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
|
||
listPublicEmailsForAuthenticated: [
|
||
"GET /user/public_emails",
|
||
{},
|
||
{ renamed: ["users", "listPublicEmailsForAuthenticatedUser"] }
|
||
],
|
||
listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
|
||
listPublicKeysForUser: ["GET /users/{username}/keys"],
|
||
listPublicSshKeysForAuthenticated: [
|
||
"GET /user/keys",
|
||
{},
|
||
{ renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] }
|
||
],
|
||
listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
|
||
listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"],
|
||
listSocialAccountsForUser: ["GET /users/{username}/social_accounts"],
|
||
listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"],
|
||
listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"],
|
||
setPrimaryEmailVisibilityForAuthenticated: [
|
||
"PATCH /user/email/visibility",
|
||
{},
|
||
{ renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] }
|
||
],
|
||
setPrimaryEmailVisibilityForAuthenticatedUser: [
|
||
"PATCH /user/email/visibility"
|
||
],
|
||
unblock: ["DELETE /user/blocks/{username}"],
|
||
unfollow: ["DELETE /user/following/{username}"],
|
||
updateAuthenticated: ["PATCH /user"]
|
||
}
|
||
};
|
||
var endpoints_default = Endpoints;
|
||
|
||
// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js
|
||
var endpointMethodsMap = /* @__PURE__ */ new Map();
|
||
for (const [scope, endpoints] of Object.entries(endpoints_default)) {
|
||
for (const [methodName, endpoint2] of Object.entries(endpoints)) {
|
||
const [route, defaults, decorations] = endpoint2;
|
||
const [method, url] = route.split(/ /);
|
||
const endpointDefaults = Object.assign(
|
||
{
|
||
method,
|
||
url
|
||
},
|
||
defaults
|
||
);
|
||
if (!endpointMethodsMap.has(scope)) {
|
||
endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());
|
||
}
|
||
endpointMethodsMap.get(scope).set(methodName, {
|
||
scope,
|
||
methodName,
|
||
endpointDefaults,
|
||
decorations
|
||
});
|
||
}
|
||
}
|
||
var handler = {
|
||
has({ scope }, methodName) {
|
||
return endpointMethodsMap.get(scope).has(methodName);
|
||
},
|
||
getOwnPropertyDescriptor(target, methodName) {
|
||
return {
|
||
value: this.get(target, methodName),
|
||
// ensures method is in the cache
|
||
configurable: true,
|
||
writable: true,
|
||
enumerable: true
|
||
};
|
||
},
|
||
defineProperty(target, methodName, descriptor) {
|
||
Object.defineProperty(target.cache, methodName, descriptor);
|
||
return true;
|
||
},
|
||
deleteProperty(target, methodName) {
|
||
delete target.cache[methodName];
|
||
return true;
|
||
},
|
||
ownKeys({ scope }) {
|
||
return [...endpointMethodsMap.get(scope).keys()];
|
||
},
|
||
set(target, methodName, value) {
|
||
return target.cache[methodName] = value;
|
||
},
|
||
get({ octokit, scope, cache }, methodName) {
|
||
if (cache[methodName]) {
|
||
return cache[methodName];
|
||
}
|
||
const method = endpointMethodsMap.get(scope).get(methodName);
|
||
if (!method) {
|
||
return void 0;
|
||
}
|
||
const { endpointDefaults, decorations } = method;
|
||
if (decorations) {
|
||
cache[methodName] = decorate(
|
||
octokit,
|
||
scope,
|
||
methodName,
|
||
endpointDefaults,
|
||
decorations
|
||
);
|
||
} else {
|
||
cache[methodName] = octokit.request.defaults(endpointDefaults);
|
||
}
|
||
return cache[methodName];
|
||
}
|
||
};
|
||
function endpointsToMethods(octokit) {
|
||
const newMethods = {};
|
||
for (const scope of endpointMethodsMap.keys()) {
|
||
newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);
|
||
}
|
||
return newMethods;
|
||
}
|
||
function decorate(octokit, scope, methodName, defaults, decorations) {
|
||
const requestWithDefaults = octokit.request.defaults(defaults);
|
||
function withDecorations(...args) {
|
||
let options2 = requestWithDefaults.endpoint.merge(...args);
|
||
if (decorations.mapToData) {
|
||
options2 = Object.assign({}, options2, {
|
||
data: options2[decorations.mapToData],
|
||
[decorations.mapToData]: void 0
|
||
});
|
||
return requestWithDefaults(options2);
|
||
}
|
||
if (decorations.renamed) {
|
||
const [newScope, newMethodName] = decorations.renamed;
|
||
octokit.log.warn(
|
||
`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`
|
||
);
|
||
}
|
||
if (decorations.deprecated) {
|
||
octokit.log.warn(decorations.deprecated);
|
||
}
|
||
if (decorations.renamedParameters) {
|
||
const options22 = requestWithDefaults.endpoint.merge(...args);
|
||
for (const [name, alias] of Object.entries(
|
||
decorations.renamedParameters
|
||
)) {
|
||
if (name in options22) {
|
||
octokit.log.warn(
|
||
`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`
|
||
);
|
||
if (!(alias in options22)) {
|
||
options22[alias] = options22[name];
|
||
}
|
||
delete options22[name];
|
||
}
|
||
}
|
||
return requestWithDefaults(options22);
|
||
}
|
||
return requestWithDefaults(...args);
|
||
}
|
||
return Object.assign(withDecorations, requestWithDefaults);
|
||
}
|
||
|
||
// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js
|
||
function restEndpointMethods(octokit) {
|
||
const api = endpointsToMethods(octokit);
|
||
return {
|
||
rest: api
|
||
};
|
||
}
|
||
restEndpointMethods.VERSION = VERSION7;
|
||
function legacyRestEndpointMethods(octokit) {
|
||
const api = endpointsToMethods(octokit);
|
||
return __spreadProps(__spreadValues({}, api), {
|
||
rest: api
|
||
});
|
||
}
|
||
legacyRestEndpointMethods.VERSION = VERSION7;
|
||
|
||
// node_modules/@octokit/rest/dist-src/version.js
|
||
var VERSION8 = "21.1.1";
|
||
|
||
// node_modules/@octokit/rest/dist-src/index.js
|
||
var Octokit2 = Octokit.plugin(requestLog, legacyRestEndpointMethods, paginateRest).defaults(
|
||
{
|
||
userAgent: `octokit-rest.js/${VERSION8}`
|
||
}
|
||
);
|
||
|
||
// src/shared-gists.ts
|
||
var import_gray_matter = __toESM(require_gray_matter());
|
||
var getBaseUrlForSharedGist = (sharedGist) => sharedGist.baseUrl || DOTCOM_BASE_URL;
|
||
var getTargetForSharedGist = (sharedGist) => getBaseUrlForSharedGist(sharedGist) === DOTCOM_BASE_URL ? "dotcom" /* Dotcom */ : "github_enterprise_server" /* GitHubEnterpriseServer */;
|
||
var getSharedGistsForFile = (fileContents, target) => {
|
||
const { data } = (0, import_gray_matter.default)(fileContents);
|
||
const gists = data.gists || [];
|
||
return gists.filter((gist) => {
|
||
if (typeof target === "undefined") {
|
||
return true;
|
||
}
|
||
return getTargetForSharedGist(gist) === target;
|
||
});
|
||
};
|
||
var upsertSharedGistForFile = (sharedGist, fileContents) => {
|
||
const { data, content: content3 } = (0, import_gray_matter.default)(fileContents);
|
||
const existingSharedGists = data.gists || [];
|
||
const matchingGist = existingSharedGists.find(
|
||
(existingSharedGist) => existingSharedGist.id === sharedGist.id
|
||
);
|
||
if (matchingGist) {
|
||
const otherGists = existingSharedGists.filter(
|
||
(existingSharedGist) => existingSharedGist !== matchingGist
|
||
);
|
||
const gists = [...otherGists, sharedGist];
|
||
const updatedData = __spreadProps(__spreadValues({}, data), { gists });
|
||
return import_gray_matter.default.stringify(content3, updatedData);
|
||
} else {
|
||
const gists = [...existingSharedGists, sharedGist];
|
||
const updatedData = __spreadProps(__spreadValues({}, data), { gists });
|
||
return import_gray_matter.default.stringify(content3, updatedData);
|
||
}
|
||
};
|
||
var removeSharedGistForFile = (sharedGist, fileContents) => {
|
||
const { data, content: content3 } = (0, import_gray_matter.default)(fileContents);
|
||
const existingSharedGists = data.gists || [];
|
||
const sharedGistsWithGistRemoved = existingSharedGists.filter(
|
||
(existingSharedGist) => existingSharedGist.id !== sharedGist.id
|
||
);
|
||
const updatedData = __spreadProps(__spreadValues({}, data), { gists: sharedGistsWithGistRemoved });
|
||
if (sharedGistsWithGistRemoved.length === 0) delete updatedData.gists;
|
||
return import_gray_matter.default.stringify(content3, updatedData);
|
||
};
|
||
|
||
// src/storage.ts
|
||
var DOTCOM_ACCESS_TOKEN_LOCAL_STORAGE_KEY = "share_as_gist_dotcom_access_token";
|
||
var GHES_BASE_URL_LOCAL_STORAGE_KEY = "share_as_gist_ghes_base_url";
|
||
var GHES_ACCESS_TOKEN_LOCAL_STORAGE_KEY = "share_as_gist_ghes_access_token";
|
||
var getDotcomAccessToken = () => localStorage.getItem(DOTCOM_ACCESS_TOKEN_LOCAL_STORAGE_KEY);
|
||
var setDotcomAccessToken = (accessToken) => localStorage.setItem(DOTCOM_ACCESS_TOKEN_LOCAL_STORAGE_KEY, accessToken);
|
||
var isDotcomEnabled = () => !!getDotcomAccessToken();
|
||
var getGhesBaseUrl = () => localStorage.getItem(GHES_BASE_URL_LOCAL_STORAGE_KEY);
|
||
var setGhesBaseUrl = (baseUrl) => localStorage.setItem(GHES_BASE_URL_LOCAL_STORAGE_KEY, baseUrl);
|
||
var getGhesAccessToken = () => localStorage.getItem(GHES_ACCESS_TOKEN_LOCAL_STORAGE_KEY);
|
||
var setGhesAccessToken = (accessToken) => localStorage.setItem(GHES_ACCESS_TOKEN_LOCAL_STORAGE_KEY, accessToken);
|
||
var isGhesEnabled = () => !!getGhesBaseUrl() && !!getGhesAccessToken();
|
||
var getTargetBaseUrl = (target) => {
|
||
switch (target) {
|
||
case "dotcom" /* Dotcom */:
|
||
return DOTCOM_BASE_URL;
|
||
case "github_enterprise_server" /* GitHubEnterpriseServer */:
|
||
return getGhesBaseUrl();
|
||
}
|
||
};
|
||
var getAccessTokenForBaseUrl = (baseUrl) => {
|
||
if (baseUrl === DOTCOM_BASE_URL) {
|
||
return getDotcomAccessToken();
|
||
} else {
|
||
return getGhesAccessToken();
|
||
}
|
||
};
|
||
|
||
// src/gists.ts
|
||
var DOTCOM_BASE_URL = "https://api.github.com";
|
||
var updateGist = (opts) => __async(void 0, null, function* () {
|
||
const { sharedGist, content: content3 } = opts;
|
||
const baseUrl = getBaseUrlForSharedGist(sharedGist);
|
||
const accessToken = getAccessTokenForBaseUrl(baseUrl);
|
||
if (!accessToken) {
|
||
return {
|
||
status: "failed" /* Failed */,
|
||
sharedGist,
|
||
errorMessage: `No access token found for the ${baseUrl} target.`
|
||
};
|
||
}
|
||
try {
|
||
const octokit = new Octokit2({
|
||
auth: accessToken,
|
||
baseUrl
|
||
});
|
||
const response = yield octokit.rest.gists.update({
|
||
gist_id: sharedGist.id,
|
||
files: {
|
||
[sharedGist.filename]: { content: content3 }
|
||
}
|
||
});
|
||
return {
|
||
status: "succeeded" /* Succeeded */,
|
||
sharedGist: __spreadProps(__spreadValues({}, sharedGist), { updatedAt: response.data.updated_at }),
|
||
errorMessage: null
|
||
};
|
||
} catch (e) {
|
||
return {
|
||
status: "failed" /* Failed */,
|
||
sharedGist,
|
||
errorMessage: e.message
|
||
};
|
||
}
|
||
});
|
||
var deleteGist = (opts) => __async(void 0, null, function* () {
|
||
const { sharedGist } = opts;
|
||
const baseUrl = getBaseUrlForSharedGist(sharedGist);
|
||
const accessToken = getAccessTokenForBaseUrl(baseUrl);
|
||
if (!accessToken) {
|
||
return {
|
||
status: "failed" /* Failed */,
|
||
errorMessage: `No access token found for the ${baseUrl} target.`
|
||
};
|
||
}
|
||
try {
|
||
const octokit = new Octokit2({
|
||
auth: accessToken,
|
||
baseUrl
|
||
});
|
||
yield octokit.rest.gists.delete({
|
||
gist_id: sharedGist.id
|
||
});
|
||
return {
|
||
status: "succeeded" /* Succeeded */,
|
||
errorMessage: null
|
||
};
|
||
} catch (e) {
|
||
let errorMessage = "An unknown error occurred.";
|
||
if (e instanceof Error) {
|
||
errorMessage = e.message;
|
||
}
|
||
return {
|
||
status: "failed" /* Failed */,
|
||
errorMessage
|
||
};
|
||
}
|
||
});
|
||
var createGist = (opts) => __async(void 0, null, function* () {
|
||
try {
|
||
const { content: content3, description, filename, isPublic, target } = opts;
|
||
const baseUrl = getTargetBaseUrl(target);
|
||
const accessToken = getAccessTokenForBaseUrl(baseUrl);
|
||
const octokit = new Octokit2({
|
||
auth: accessToken,
|
||
baseUrl
|
||
});
|
||
const response = yield octokit.rest.gists.create({
|
||
description: description || filename,
|
||
public: isPublic,
|
||
files: {
|
||
[filename]: { content: content3 }
|
||
}
|
||
});
|
||
return {
|
||
status: "succeeded" /* Succeeded */,
|
||
sharedGist: {
|
||
id: response.data.id,
|
||
url: response.data.html_url,
|
||
createdAt: response.data.created_at,
|
||
updatedAt: response.data.updated_at,
|
||
filename,
|
||
isPublic,
|
||
baseUrl
|
||
},
|
||
errorMessage: null
|
||
};
|
||
} catch (e) {
|
||
return {
|
||
status: "failed" /* Failed */,
|
||
sharedGist: null,
|
||
errorMessage: e.message
|
||
};
|
||
}
|
||
});
|
||
|
||
// main.ts
|
||
var DEFAULT_SETTINGS = {
|
||
includeFrontMatter: false,
|
||
enableUpdatingGistsAfterCreation: true,
|
||
enableAutoSaving: false,
|
||
showAutoSaveNotice: false,
|
||
includeTableOfContents: false
|
||
};
|
||
var getLatestSettings = (plugin) => __async(void 0, null, function* () {
|
||
yield plugin.loadSettings();
|
||
return plugin.settings;
|
||
});
|
||
var addTableOfContents = (content3) => __async(void 0, null, function* () {
|
||
const resultAsVFile = yield remark().use(remarkToc).process(content3);
|
||
return String(resultAsVFile);
|
||
});
|
||
var stripFrontMatter = (content3) => (0, import_gray_matter2.default)(content3).content;
|
||
var withExistingSharedGist = (app, plugin, callback, noGistNotice) => __async(void 0, null, function* () {
|
||
const view = app.workspace.getActiveViewOfType(import_obsidian.MarkdownView);
|
||
if (!view) {
|
||
new import_obsidian.Notice("No active file");
|
||
return;
|
||
}
|
||
const editor = view.editor;
|
||
const originalContent = editor.getValue();
|
||
const existingSharedGists = getSharedGistsForFile(originalContent);
|
||
if (existingSharedGists.length === 0) {
|
||
const { enableUpdatingGistsAfterCreation } = yield getLatestSettings(plugin);
|
||
if (!enableUpdatingGistsAfterCreation) {
|
||
new import_obsidian.Notice(
|
||
"You need to enable 'Update gists after creation' in Settings to use this command."
|
||
);
|
||
return;
|
||
} else {
|
||
new import_obsidian.Notice(noGistNotice);
|
||
}
|
||
} else if (existingSharedGists.length > 1) {
|
||
new SelectExistingGistModal(
|
||
app,
|
||
existingSharedGists,
|
||
false,
|
||
callback
|
||
).open();
|
||
} else {
|
||
yield callback(existingSharedGists[0]);
|
||
}
|
||
});
|
||
var copyGistUrlEditorCallback = (opts) => () => __async(void 0, null, function* () {
|
||
const { app, plugin } = opts;
|
||
yield withExistingSharedGist(
|
||
app,
|
||
plugin,
|
||
(sharedGist) => __async(void 0, null, function* () {
|
||
navigator.clipboard.writeText(sharedGist.url);
|
||
new import_obsidian.Notice("Copied gist URL to clipboard.");
|
||
}),
|
||
"You must share this note as a gist before you can copy its URL to the clipboard."
|
||
);
|
||
});
|
||
var openGistEditorCallback = (opts) => () => __async(void 0, null, function* () {
|
||
const { app, plugin } = opts;
|
||
yield withExistingSharedGist(
|
||
app,
|
||
plugin,
|
||
(sharedGist) => __async(void 0, null, function* () {
|
||
window.open(sharedGist.url, "_blank");
|
||
}),
|
||
"You must share this note as a gist before you can open its gist."
|
||
);
|
||
});
|
||
var deleteGistEditorCallback = (opts) => () => __async(void 0, null, function* () {
|
||
const { app, plugin } = opts;
|
||
const view = app.workspace.getActiveViewOfType(import_obsidian.MarkdownView);
|
||
if (!view) {
|
||
return new import_obsidian.Notice("No active file");
|
||
}
|
||
const editor = view.editor;
|
||
const originalContent = editor.getValue();
|
||
yield withExistingSharedGist(
|
||
app,
|
||
plugin,
|
||
(sharedGist) => __async(void 0, null, function* () {
|
||
const result = yield deleteGist({ sharedGist });
|
||
if (result.status === "succeeded" /* Succeeded */) {
|
||
const updatedContent = removeSharedGistForFile(
|
||
sharedGist,
|
||
originalContent
|
||
);
|
||
editor.setValue(updatedContent);
|
||
new import_obsidian.Notice("Gist deleted");
|
||
} else {
|
||
new import_obsidian.Notice(`Error: ${result.errorMessage}`);
|
||
}
|
||
}),
|
||
"There are no gists associated with this note."
|
||
);
|
||
});
|
||
var shareGistEditorCallback = (opts) => () => __async(void 0, null, function* () {
|
||
const { isPublic, app, plugin, target } = opts;
|
||
const {
|
||
enableUpdatingGistsAfterCreation,
|
||
includeFrontMatter,
|
||
includeTableOfContents
|
||
} = yield getLatestSettings(plugin);
|
||
const view = app.workspace.getActiveViewOfType(import_obsidian.MarkdownView);
|
||
if (!view) {
|
||
return new import_obsidian.Notice("No active file");
|
||
}
|
||
const editor = view.editor;
|
||
const originalContent = editor.getValue();
|
||
const filename = view.file.name;
|
||
const existingSharedGists = getSharedGistsForFile(
|
||
originalContent,
|
||
target
|
||
).filter((sharedGist) => sharedGist.isPublic === isPublic);
|
||
const baseContent = includeFrontMatter ? originalContent : stripFrontMatter(originalContent);
|
||
const content3 = includeTableOfContents ? yield addTableOfContents(baseContent) : baseContent;
|
||
if (enableUpdatingGistsAfterCreation && existingSharedGists.length) {
|
||
new SelectExistingGistModal(
|
||
app,
|
||
existingSharedGists,
|
||
true,
|
||
(sharedGist) => __async(void 0, null, function* () {
|
||
if (sharedGist) {
|
||
const result = yield updateGist({
|
||
sharedGist,
|
||
content: content3
|
||
});
|
||
if (result.status === "succeeded" /* Succeeded */) {
|
||
navigator.clipboard.writeText(result.sharedGist.url);
|
||
new import_obsidian.Notice(
|
||
`Copied ${isPublic ? "public" : "private"} gist URL to clipboard`
|
||
);
|
||
const updatedContent = upsertSharedGistForFile(
|
||
result.sharedGist,
|
||
originalContent
|
||
);
|
||
editor.setValue(updatedContent);
|
||
} else {
|
||
new import_obsidian.Notice(`Error: ${result.errorMessage}`);
|
||
}
|
||
} else {
|
||
new SetGistDescriptionModal(app, filename, (description) => __async(void 0, null, function* () {
|
||
const result = yield createGist({
|
||
target,
|
||
content: content3,
|
||
description,
|
||
filename,
|
||
isPublic
|
||
});
|
||
if (result.status === "succeeded" /* Succeeded */) {
|
||
navigator.clipboard.writeText(result.sharedGist.url);
|
||
new import_obsidian.Notice(
|
||
`Copied ${isPublic ? "public" : "private"} gist URL to clipboard`
|
||
);
|
||
const updatedContent = upsertSharedGistForFile(
|
||
result.sharedGist,
|
||
originalContent
|
||
);
|
||
editor.setValue(updatedContent);
|
||
} else {
|
||
new import_obsidian.Notice(`Error: ${result.errorMessage}`);
|
||
}
|
||
})).open();
|
||
}
|
||
})
|
||
).open();
|
||
} else {
|
||
new SetGistDescriptionModal(app, filename, (description) => __async(void 0, null, function* () {
|
||
const result = yield createGist({
|
||
target,
|
||
content: content3,
|
||
description,
|
||
filename,
|
||
isPublic
|
||
});
|
||
if (result.status === "succeeded" /* Succeeded */) {
|
||
navigator.clipboard.writeText(result.sharedGist.url);
|
||
new import_obsidian.Notice(
|
||
`Copied ${isPublic ? "public" : "private"} gist URL to clipboard`
|
||
);
|
||
if (enableUpdatingGistsAfterCreation) {
|
||
const updatedContent = upsertSharedGistForFile(
|
||
result.sharedGist,
|
||
originalContent
|
||
);
|
||
app.vault.modify(view.file, updatedContent);
|
||
editor.refresh();
|
||
}
|
||
} else {
|
||
new import_obsidian.Notice(`GitHub API error: ${result.errorMessage}`);
|
||
}
|
||
})).open();
|
||
}
|
||
});
|
||
var documentChangedAutoSaveCallback = (opts) => __async(void 0, null, function* () {
|
||
const { plugin, file, content: rawContent } = opts;
|
||
const { includeFrontMatter, showAutoSaveNotice, includeTableOfContents } = yield getLatestSettings(plugin);
|
||
const existingSharedGists = getSharedGistsForFile(rawContent);
|
||
const baseContent = includeFrontMatter ? rawContent : stripFrontMatter(rawContent);
|
||
const content3 = includeTableOfContents ? yield addTableOfContents(baseContent) : baseContent;
|
||
if (existingSharedGists.length) {
|
||
for (const sharedGist of existingSharedGists) {
|
||
const result = yield updateGist({
|
||
sharedGist,
|
||
content: content3
|
||
});
|
||
if (result.status === "succeeded" /* Succeeded */) {
|
||
const updatedContent = upsertSharedGistForFile(
|
||
result.sharedGist,
|
||
rawContent
|
||
);
|
||
yield file.vault.adapter.write(file.path, updatedContent);
|
||
if (showAutoSaveNotice) {
|
||
return new import_obsidian.Notice("Gist updated");
|
||
}
|
||
} else {
|
||
return new import_obsidian.Notice(`Error: ${result.errorMessage}`);
|
||
}
|
||
}
|
||
}
|
||
});
|
||
var hasAtLeastOneSharedGist = (editor) => {
|
||
const originalContent = editor.getValue();
|
||
const existingSharedGists = getSharedGistsForFile(originalContent);
|
||
return existingSharedGists.length > 0;
|
||
};
|
||
var ShareAsGistPlugin = class extends import_obsidian.Plugin {
|
||
onload() {
|
||
return __async(this, null, function* () {
|
||
yield this.loadSettings();
|
||
this.registerCommands();
|
||
this.addModifyCallback();
|
||
this.addSettingTab(new ShareAsGistSettingTab(this.app, this));
|
||
});
|
||
}
|
||
addEditorCommandWithCheck(opts) {
|
||
const { id, name, performCheck, callback } = opts;
|
||
this.addCommand({
|
||
id,
|
||
name,
|
||
editorCheckCallback: (checking, editor, ctx) => {
|
||
if (performCheck(editor, ctx)) {
|
||
if (checking) {
|
||
return true;
|
||
}
|
||
callback(editor, ctx);
|
||
}
|
||
}
|
||
});
|
||
}
|
||
registerCommands() {
|
||
this.addEditorCommandWithCheck({
|
||
id: "share-as-private-dotcom-gist",
|
||
name: "Share as private gist on GitHub.com",
|
||
callback: shareGistEditorCallback({
|
||
plugin: this,
|
||
app: this.app,
|
||
isPublic: false,
|
||
target: "dotcom" /* Dotcom */
|
||
}),
|
||
performCheck: isDotcomEnabled
|
||
});
|
||
this.addEditorCommandWithCheck({
|
||
id: "share-as-public-dotcom-gist",
|
||
name: "Share as public gist on GitHub.com",
|
||
callback: shareGistEditorCallback({
|
||
plugin: this,
|
||
app: this.app,
|
||
isPublic: true,
|
||
target: "dotcom" /* Dotcom */
|
||
}),
|
||
performCheck: isDotcomEnabled
|
||
});
|
||
this.addEditorCommandWithCheck({
|
||
id: "share-as-private-ghes-gist",
|
||
name: "Share as private gist on GitHub Enterprise Server",
|
||
callback: shareGistEditorCallback({
|
||
plugin: this,
|
||
app: this.app,
|
||
isPublic: false,
|
||
target: "github_enterprise_server" /* GitHubEnterpriseServer */
|
||
}),
|
||
performCheck: isGhesEnabled
|
||
});
|
||
this.addEditorCommandWithCheck({
|
||
id: "share-as-public-ghes-gist",
|
||
name: "Share as public gist on GitHub Enterprise Server",
|
||
callback: shareGistEditorCallback({
|
||
plugin: this,
|
||
app: this.app,
|
||
isPublic: true,
|
||
target: "github_enterprise_server" /* GitHubEnterpriseServer */
|
||
}),
|
||
performCheck: isGhesEnabled
|
||
});
|
||
this.addEditorCommandWithCheck({
|
||
id: "copy-gist-url",
|
||
name: "Copy gist URL",
|
||
callback: copyGistUrlEditorCallback({
|
||
plugin: this,
|
||
app: this.app
|
||
}),
|
||
performCheck: hasAtLeastOneSharedGist
|
||
});
|
||
this.addEditorCommandWithCheck({
|
||
id: "open-gist-url",
|
||
name: "Open gist",
|
||
callback: openGistEditorCallback({
|
||
plugin: this,
|
||
app: this.app
|
||
}),
|
||
performCheck: hasAtLeastOneSharedGist
|
||
});
|
||
this.addEditorCommandWithCheck({
|
||
id: "delete-gist",
|
||
name: "Delete gist",
|
||
callback: deleteGistEditorCallback({
|
||
plugin: this,
|
||
app: this.app
|
||
}),
|
||
performCheck: hasAtLeastOneSharedGist
|
||
});
|
||
}
|
||
addModifyCallback() {
|
||
const previousContents = {};
|
||
const debouncedCallbacks = {};
|
||
this.app.vault.on("modify", (file) => __async(this, null, function* () {
|
||
const content3 = yield file.vault.adapter.read(file.path);
|
||
if (stripFrontMatter(content3) === previousContents[file.path]) {
|
||
return;
|
||
}
|
||
previousContents[file.path] = stripFrontMatter(content3);
|
||
if (!debouncedCallbacks[file.path]) {
|
||
debouncedCallbacks[file.path] = (0, import_obsidian.debounce)(
|
||
(content4, file2) => __async(this, null, function* () {
|
||
return yield documentChangedAutoSaveCallback({
|
||
plugin: this,
|
||
app: this.app,
|
||
content: content4,
|
||
file: file2
|
||
});
|
||
}),
|
||
15 * 1e3,
|
||
true
|
||
);
|
||
}
|
||
const { enableAutoSaving } = yield getLatestSettings(this);
|
||
if (enableAutoSaving) {
|
||
yield debouncedCallbacks[file.path](content3, file);
|
||
}
|
||
}));
|
||
}
|
||
loadSettings() {
|
||
return __async(this, null, function* () {
|
||
this.settings = Object.assign({}, DEFAULT_SETTINGS, yield this.loadData());
|
||
});
|
||
}
|
||
saveSettings() {
|
||
return __async(this, null, function* () {
|
||
yield this.saveData(this.settings);
|
||
});
|
||
}
|
||
};
|
||
var SelectExistingGistModal = class extends import_obsidian.SuggestModal {
|
||
constructor(app, sharedGists, allowCreatingNewGist, onSubmit) {
|
||
super(app);
|
||
this.sharedGists = sharedGists;
|
||
this.allowCreatingNewGist = allowCreatingNewGist;
|
||
this.onSubmit = onSubmit;
|
||
}
|
||
getSuggestions() {
|
||
if (this.allowCreatingNewGist) {
|
||
return this.sharedGists.concat(null);
|
||
} else {
|
||
return this.sharedGists;
|
||
}
|
||
}
|
||
renderSuggestion(sharedGist, el) {
|
||
if (sharedGist === null) {
|
||
el.createEl("div", { text: "Create new gist" });
|
||
} else {
|
||
const targetLabel = getTargetForSharedGist(sharedGist) === "dotcom" /* Dotcom */ ? "GitHub.com" : new URL(sharedGist.baseUrl).host;
|
||
el.createEl("div", {
|
||
text: (sharedGist.isPublic ? "Public gist" : "Private gist") + ` on ${targetLabel}`
|
||
});
|
||
el.createEl("small", { text: `Created at ${sharedGist.createdAt}` });
|
||
}
|
||
}
|
||
onChooseSuggestion(sharedGist) {
|
||
this.onSubmit(sharedGist).then(() => this.close());
|
||
}
|
||
};
|
||
var SetGistDescriptionModal = class extends import_obsidian.Modal {
|
||
constructor(app, filename, onSubmit) {
|
||
super(app);
|
||
this.description = null;
|
||
this.description = filename;
|
||
this.onSubmit = onSubmit;
|
||
}
|
||
onOpen() {
|
||
const { contentEl } = this;
|
||
contentEl.createEl("h1", { text: "Set a description for your gist" });
|
||
contentEl.createEl("p", {
|
||
text: "Hit the Return key to continue",
|
||
attr: { style: "font-style: italic" }
|
||
});
|
||
new import_obsidian.Setting(contentEl).setName("Description").addTextArea((text4) => {
|
||
text4.inputEl.setCssStyles({ width: "100%" });
|
||
text4.setValue(this.description).onChange((value) => {
|
||
this.description = value;
|
||
});
|
||
});
|
||
new import_obsidian.Setting(contentEl).addButton(
|
||
(btn) => btn.setButtonText("Create gist").setCta().onClick(() => {
|
||
this.close();
|
||
this.onSubmit(this.description);
|
||
})
|
||
);
|
||
this.scope.register([], "Enter", (evt) => {
|
||
evt.preventDefault();
|
||
if (evt.isComposing) {
|
||
return;
|
||
}
|
||
this.close();
|
||
this.onSubmit(this.description);
|
||
});
|
||
}
|
||
onClose() {
|
||
const { contentEl } = this;
|
||
contentEl.empty();
|
||
}
|
||
};
|
||
var ShareAsGistSettingTab = class extends import_obsidian.PluginSettingTab {
|
||
constructor(app, plugin) {
|
||
super(app, plugin);
|
||
this.plugin = plugin;
|
||
}
|
||
display() {
|
||
const { containerEl } = this;
|
||
const dotcomAccessToken = getDotcomAccessToken();
|
||
const ghesBaseUrl = getGhesBaseUrl();
|
||
const ghesAccessToken = getGhesAccessToken();
|
||
containerEl.empty();
|
||
containerEl.createEl("h2", { text: "Share as Gist" });
|
||
containerEl.createEl("h3", { text: "GitHub.com" });
|
||
new import_obsidian.Setting(containerEl).setName("Personal access token").setDesc(
|
||
'An access token for GitHub.com with permission to write gists. You can create one from "Settings" in your GitHub account.'
|
||
).addText(
|
||
(text4) => text4.setPlaceholder("Your personal access token").setValue(dotcomAccessToken).onChange((value) => __async(this, null, function* () {
|
||
setDotcomAccessToken(value);
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
containerEl.createEl("h3", { text: "GitHub Enterprise Server" });
|
||
new import_obsidian.Setting(containerEl).setName("Base URL").setDesc(
|
||
"The base URL for the GitHub REST API on your GitHub Enterprise Server instance. This usually ends with `/api/v3`."
|
||
).addText(
|
||
(text4) => text4.setPlaceholder("https://github.example.com/api/v3").setValue(ghesBaseUrl).onChange((value) => __async(this, null, function* () {
|
||
setGhesBaseUrl(value);
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
new import_obsidian.Setting(containerEl).setName("Personal access token").setDesc(
|
||
'An access token for your GitHub Enterprise Server instance with permission to write gists. You can create one from "Settings" in your GitHub account.'
|
||
).addText(
|
||
(text4) => text4.setPlaceholder("Your personal access token").setValue(ghesAccessToken).onChange((value) => __async(this, null, function* () {
|
||
setGhesAccessToken(value);
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
containerEl.createEl("h3", { text: "Advanced options" });
|
||
new import_obsidian.Setting(containerEl).setName("Enable updating gists after creation").setDesc(
|
||
"Whether gists should be updateable through this plugin after creation. If this is turned on, when you create a gist, you will be able to choose to update an existing gist (if one exists) or create a brand new one. To make this possible, front matter will be added to your notes to track gists that you have created. If this is turned off, a brand new gist will always be created."
|
||
).addToggle(
|
||
(toggle) => toggle.setValue(this.plugin.settings.enableUpdatingGistsAfterCreation).onChange((value) => __async(this, null, function* () {
|
||
this.plugin.settings.enableUpdatingGistsAfterCreation = value;
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
new import_obsidian.Setting(containerEl).setName("Include front matter in gists").setDesc(
|
||
"Whether the front matter should be included or stripped away when a note is shared as a gist"
|
||
).addToggle(
|
||
(toggle) => toggle.setValue(this.plugin.settings.includeFrontMatter).onChange((value) => __async(this, null, function* () {
|
||
this.plugin.settings.includeFrontMatter = value;
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
new import_obsidian.Setting(containerEl).setName("Enable auto-saving Gists after edit").setDesc("Whether to update linked gists when the document is updated").addToggle(
|
||
(toggle) => toggle.setValue(this.plugin.settings.enableAutoSaving).onChange((value) => __async(this, null, function* () {
|
||
this.plugin.settings.enableAutoSaving = value;
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
new import_obsidian.Setting(containerEl).setName("Enable auto-save notice").setDesc("Whether to show a notice when a linked gist is auto-saved").addToggle(
|
||
(toggle) => toggle.setValue(this.plugin.settings.showAutoSaveNotice).onChange((value) => __async(this, null, function* () {
|
||
this.plugin.settings.showAutoSaveNotice = value;
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
new import_obsidian.Setting(containerEl).setName("Include table of contents").setDesc(
|
||
"Whether to automatically generate and include a table of contents in the shared gist. The table of contents will be inserted directly after the `Contents` or `Table of Contents` heading, if one exists."
|
||
).addToggle(
|
||
(toggle) => toggle.setValue(this.plugin.settings.includeTableOfContents).onChange((value) => __async(this, null, function* () {
|
||
this.plugin.settings.includeTableOfContents = value;
|
||
yield this.plugin.saveSettings();
|
||
}))
|
||
);
|
||
}
|
||
};
|
||
/*! Bundled license information:
|
||
|
||
is-extendable/index.js:
|
||
(*!
|
||
* is-extendable <https://github.com/jonschlinkert/is-extendable>
|
||
*
|
||
* Copyright (c) 2015, Jon Schlinkert.
|
||
* Licensed under the MIT License.
|
||
*)
|
||
|
||
strip-bom-string/index.js:
|
||
(*!
|
||
* strip-bom-string <https://github.com/jonschlinkert/strip-bom-string>
|
||
*
|
||
* Copyright (c) 2015, 2017, Jon Schlinkert.
|
||
* Released under the MIT License.
|
||
*)
|
||
*/
|
||
|
||
/* nosourcemap */ |