[ci] format
This commit is contained in:
parent
7461e82c81
commit
25e04a2ecb
13 changed files with 711 additions and 724 deletions
|
@ -409,16 +409,14 @@ The Astro Markdoc integration handles configuring Markdoc options and capabiliti
|
||||||
|
|
||||||
Enables writing HTML markup alongside Markdoc tags and nodes.
|
Enables writing HTML markup alongside Markdoc tags and nodes.
|
||||||
|
|
||||||
By default, Markdoc will not recognize HTML markup as semantic content.
|
By default, Markdoc will not recognize HTML markup as semantic content.
|
||||||
|
|
||||||
To achieve a more Markdown-like experience, where HTML elements can be included alongside your content, set `allowHTML:true` as a `markdoc` integration option. This will enable HTML parsing in Markdoc markup.
|
|
||||||
|
|
||||||
|
To achieve a more Markdown-like experience, where HTML elements can be included alongside your content, set `allowHTML:true` as a `markdoc` integration option. This will enable HTML parsing in Markdoc markup.
|
||||||
|
|
||||||
> **Warning**
|
> **Warning**
|
||||||
> When `allowHTML` is enabled, HTML markup inside Markdoc documents will be rendered as actual HTML elements (including `<script>`), making attack vectors like XSS possible.
|
> When `allowHTML` is enabled, HTML markup inside Markdoc documents will be rendered as actual HTML elements (including `<script>`), making attack vectors like XSS possible.
|
||||||
>
|
>
|
||||||
> Ensure that any HTML markup comes from trusted sources.
|
> Ensure that any HTML markup comes from trusted sources.
|
||||||
|
|
||||||
|
|
||||||
```js {7} "allowHTML: true"
|
```js {7} "allowHTML: true"
|
||||||
// astro.config.mjs
|
// astro.config.mjs
|
||||||
|
|
|
@ -11,21 +11,20 @@ import { MarkdocError, isComponentConfig, isValidUrl, prependForwardSlash } from
|
||||||
import { emitESMImage } from 'astro/assets/utils';
|
import { emitESMImage } from 'astro/assets/utils';
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import type * as rollup from 'rollup';
|
import type * as rollup from 'rollup';
|
||||||
|
import { htmlTokenTransform } from './html/transform/html-token-transform.js';
|
||||||
import type { MarkdocConfigResult } from './load-config.js';
|
import type { MarkdocConfigResult } from './load-config.js';
|
||||||
|
import type { MarkdocIntegrationOptions } from './options.js';
|
||||||
import { setupConfig } from './runtime.js';
|
import { setupConfig } from './runtime.js';
|
||||||
import { getMarkdocTokenizer } from './tokenizer.js';
|
import { getMarkdocTokenizer } from './tokenizer.js';
|
||||||
import type { MarkdocIntegrationOptions } from './options.js';
|
|
||||||
import { htmlTokenTransform } from './html/transform/html-token-transform.js';
|
|
||||||
|
|
||||||
export async function getContentEntryType({
|
export async function getContentEntryType({
|
||||||
markdocConfigResult,
|
markdocConfigResult,
|
||||||
astroConfig,
|
astroConfig,
|
||||||
options,
|
options,
|
||||||
}: {
|
}: {
|
||||||
astroConfig: AstroConfig;
|
astroConfig: AstroConfig;
|
||||||
markdocConfigResult?: MarkdocConfigResult;
|
markdocConfigResult?: MarkdocConfigResult;
|
||||||
options?: MarkdocIntegrationOptions,
|
options?: MarkdocIntegrationOptions;
|
||||||
|
|
||||||
}): Promise<ContentEntryType> {
|
}): Promise<ContentEntryType> {
|
||||||
return {
|
return {
|
||||||
extensions: ['.mdoc'],
|
extensions: ['.mdoc'],
|
||||||
|
@ -33,12 +32,12 @@ export async function getContentEntryType({
|
||||||
handlePropagation: true,
|
handlePropagation: true,
|
||||||
async getRenderModule({ contents, fileUrl, viteId }) {
|
async getRenderModule({ contents, fileUrl, viteId }) {
|
||||||
const entry = getEntryInfo({ contents, fileUrl });
|
const entry = getEntryInfo({ contents, fileUrl });
|
||||||
const tokenizer = getMarkdocTokenizer(options);
|
const tokenizer = getMarkdocTokenizer(options);
|
||||||
let tokens = tokenizer.tokenize(entry.body);
|
let tokens = tokenizer.tokenize(entry.body);
|
||||||
|
|
||||||
if (options?.allowHTML) {
|
if (options?.allowHTML) {
|
||||||
tokens = htmlTokenTransform(tokenizer, tokens);
|
tokens = htmlTokenTransform(tokenizer, tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
const ast = Markdoc.parse(tokens);
|
const ast = Markdoc.parse(tokens);
|
||||||
const usedTags = getUsedTags(ast);
|
const usedTags = getUsedTags(ast);
|
||||||
|
|
|
@ -1,23 +1,24 @@
|
||||||
|
import { styleToObject } from './style-to-object.js';
|
||||||
|
|
||||||
import { styleToObject } from "./style-to-object.js";
|
export function parseInlineCSSToReactLikeObject(
|
||||||
|
css: string | undefined | null
|
||||||
|
): React.CSSProperties | undefined {
|
||||||
|
if (typeof css === 'string') {
|
||||||
|
const cssObject: Record<string, string> = {};
|
||||||
|
styleToObject(css, (originalCssDirective: string, value: string) => {
|
||||||
|
const reactCssDirective = convertCssDirectiveNameToReactCamelCase(originalCssDirective);
|
||||||
|
cssObject[reactCssDirective] = value;
|
||||||
|
});
|
||||||
|
return cssObject;
|
||||||
|
}
|
||||||
|
|
||||||
export function parseInlineCSSToReactLikeObject(css: string | undefined | null): React.CSSProperties | undefined {
|
return undefined;
|
||||||
if (typeof css === "string") {
|
|
||||||
const cssObject: Record<string, string> = {};
|
|
||||||
styleToObject(css, (originalCssDirective: string, value: string) => {
|
|
||||||
const reactCssDirective = convertCssDirectiveNameToReactCamelCase(originalCssDirective);
|
|
||||||
cssObject[reactCssDirective] = value;
|
|
||||||
});
|
|
||||||
return cssObject;
|
|
||||||
}
|
|
||||||
|
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function convertCssDirectiveNameToReactCamelCase(original: string): string {
|
function convertCssDirectiveNameToReactCamelCase(original: string): string {
|
||||||
// capture group 1 is the character to capitalize, the hyphen is omitted by virtue of being outside the capture group
|
// capture group 1 is the character to capitalize, the hyphen is omitted by virtue of being outside the capture group
|
||||||
const replaced = original.replace(/-([a-z0-9])/ig, (_match, char) => {
|
const replaced = original.replace(/-([a-z0-9])/gi, (_match, char) => {
|
||||||
return char.toUpperCase();
|
return char.toUpperCase();
|
||||||
});
|
});
|
||||||
return replaced;
|
return replaced;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,15 +3,15 @@
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @license MIT
|
* @license MIT
|
||||||
*
|
*
|
||||||
* (The MIT License)
|
* (The MIT License)
|
||||||
*
|
*
|
||||||
* Copyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>
|
* Copyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>
|
||||||
*
|
*
|
||||||
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
*
|
*
|
||||||
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
*
|
*
|
||||||
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -49,223 +49,219 @@ const TYPE_DECLARATION = 'declaration';
|
||||||
* @throws {Error}
|
* @throws {Error}
|
||||||
*/
|
*/
|
||||||
export function parseInlineStyles(style, options) {
|
export function parseInlineStyles(style, options) {
|
||||||
if (typeof style !== 'string') {
|
if (typeof style !== 'string') {
|
||||||
throw new TypeError('First argument must be a string');
|
throw new TypeError('First argument must be a string');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!style) return [];
|
if (!style) return [];
|
||||||
|
|
||||||
options = options || {};
|
options = options || {};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Positional.
|
* Positional.
|
||||||
*/
|
*/
|
||||||
let lineno = 1;
|
let lineno = 1;
|
||||||
let column = 1;
|
let column = 1;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update lineno and column based on `str`.
|
* Update lineno and column based on `str`.
|
||||||
*
|
*
|
||||||
* @param {String} str
|
* @param {String} str
|
||||||
*/
|
*/
|
||||||
function updatePosition(str) {
|
function updatePosition(str) {
|
||||||
let lines = str.match(NEWLINE_REGEX);
|
let lines = str.match(NEWLINE_REGEX);
|
||||||
if (lines) lineno += lines.length;
|
if (lines) lineno += lines.length;
|
||||||
let i = str.lastIndexOf(NEWLINE);
|
let i = str.lastIndexOf(NEWLINE);
|
||||||
column = ~i ? str.length - i : column + str.length;
|
column = ~i ? str.length - i : column + str.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Mark position and patch `node.position`.
|
* Mark position and patch `node.position`.
|
||||||
*
|
*
|
||||||
* @return {Function}
|
* @return {Function}
|
||||||
*/
|
*/
|
||||||
function position() {
|
function position() {
|
||||||
let start = { line: lineno, column: column };
|
let start = { line: lineno, column: column };
|
||||||
return function (node) {
|
return function (node) {
|
||||||
node.position = new Position(start);
|
node.position = new Position(start);
|
||||||
whitespace();
|
whitespace();
|
||||||
return node;
|
return node;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store position information for a node.
|
* Store position information for a node.
|
||||||
*
|
*
|
||||||
* @constructor
|
* @constructor
|
||||||
* @property {Object} start
|
* @property {Object} start
|
||||||
* @property {Object} end
|
* @property {Object} end
|
||||||
* @property {undefined|String} source
|
* @property {undefined|String} source
|
||||||
*/
|
*/
|
||||||
function Position(start) {
|
function Position(start) {
|
||||||
this.start = start;
|
this.start = start;
|
||||||
this.end = { line: lineno, column: column };
|
this.end = { line: lineno, column: column };
|
||||||
this.source = options.source;
|
this.source = options.source;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Non-enumerable source string.
|
* Non-enumerable source string.
|
||||||
*/
|
*/
|
||||||
Position.prototype.content = style;
|
Position.prototype.content = style;
|
||||||
|
|
||||||
const errorsList = [];
|
const errorsList = [];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Error `msg`.
|
* Error `msg`.
|
||||||
*
|
*
|
||||||
* @param {String} msg
|
* @param {String} msg
|
||||||
* @throws {Error}
|
* @throws {Error}
|
||||||
*/
|
*/
|
||||||
function error(msg) {
|
function error(msg) {
|
||||||
const err = new Error(
|
const err = new Error(options.source + ':' + lineno + ':' + column + ': ' + msg);
|
||||||
options.source + ':' + lineno + ':' + column + ': ' + msg
|
err.reason = msg;
|
||||||
);
|
err.filename = options.source;
|
||||||
err.reason = msg;
|
err.line = lineno;
|
||||||
err.filename = options.source;
|
err.column = column;
|
||||||
err.line = lineno;
|
err.source = style;
|
||||||
err.column = column;
|
|
||||||
err.source = style;
|
|
||||||
|
|
||||||
if (options.silent) {
|
if (options.silent) {
|
||||||
errorsList.push(err);
|
errorsList.push(err);
|
||||||
} else {
|
} else {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Match `re` and return captures.
|
* Match `re` and return captures.
|
||||||
*
|
*
|
||||||
* @param {RegExp} re
|
* @param {RegExp} re
|
||||||
* @return {undefined|Array}
|
* @return {undefined|Array}
|
||||||
*/
|
*/
|
||||||
function match(re) {
|
function match(re) {
|
||||||
const m = re.exec(style);
|
const m = re.exec(style);
|
||||||
if (!m) return;
|
if (!m) return;
|
||||||
const str = m[0];
|
const str = m[0];
|
||||||
updatePosition(str);
|
updatePosition(str);
|
||||||
style = style.slice(str.length);
|
style = style.slice(str.length);
|
||||||
return m;
|
return m;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse whitespace.
|
* Parse whitespace.
|
||||||
*/
|
*/
|
||||||
function whitespace() {
|
function whitespace() {
|
||||||
match(WHITESPACE_REGEX);
|
match(WHITESPACE_REGEX);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse comments.
|
* Parse comments.
|
||||||
*
|
*
|
||||||
* @param {Object[]} [rules]
|
* @param {Object[]} [rules]
|
||||||
* @return {Object[]}
|
* @return {Object[]}
|
||||||
*/
|
*/
|
||||||
function comments(rules) {
|
function comments(rules) {
|
||||||
let c;
|
let c;
|
||||||
rules = rules || [];
|
rules = rules || [];
|
||||||
while ((c = comment())) {
|
while ((c = comment())) {
|
||||||
if (c !== false) {
|
if (c !== false) {
|
||||||
rules.push(c);
|
rules.push(c);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return rules;
|
return rules;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse comment.
|
* Parse comment.
|
||||||
*
|
*
|
||||||
* @return {Object}
|
* @return {Object}
|
||||||
* @throws {Error}
|
* @throws {Error}
|
||||||
*/
|
*/
|
||||||
function comment() {
|
function comment() {
|
||||||
const pos = position();
|
const pos = position();
|
||||||
if (FORWARD_SLASH != style.charAt(0) || ASTERISK != style.charAt(1)) return;
|
if (FORWARD_SLASH != style.charAt(0) || ASTERISK != style.charAt(1)) return;
|
||||||
|
|
||||||
let i = 2;
|
let i = 2;
|
||||||
while (
|
while (
|
||||||
EMPTY_STRING != style.charAt(i) &&
|
EMPTY_STRING != style.charAt(i) &&
|
||||||
(ASTERISK != style.charAt(i) || FORWARD_SLASH != style.charAt(i + 1))
|
(ASTERISK != style.charAt(i) || FORWARD_SLASH != style.charAt(i + 1))
|
||||||
) {
|
) {
|
||||||
++i;
|
++i;
|
||||||
}
|
}
|
||||||
i += 2;
|
i += 2;
|
||||||
|
|
||||||
if (EMPTY_STRING === style.charAt(i - 1)) {
|
if (EMPTY_STRING === style.charAt(i - 1)) {
|
||||||
return error('End of comment missing');
|
return error('End of comment missing');
|
||||||
}
|
}
|
||||||
|
|
||||||
const str = style.slice(2, i - 2);
|
const str = style.slice(2, i - 2);
|
||||||
column += 2;
|
column += 2;
|
||||||
updatePosition(str);
|
updatePosition(str);
|
||||||
style = style.slice(i);
|
style = style.slice(i);
|
||||||
column += 2;
|
column += 2;
|
||||||
|
|
||||||
return pos({
|
return pos({
|
||||||
type: TYPE_COMMENT,
|
type: TYPE_COMMENT,
|
||||||
comment: str
|
comment: str,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse declaration.
|
* Parse declaration.
|
||||||
*
|
*
|
||||||
* @return {Object}
|
* @return {Object}
|
||||||
* @throws {Error}
|
* @throws {Error}
|
||||||
*/
|
*/
|
||||||
function declaration() {
|
function declaration() {
|
||||||
const pos = position();
|
const pos = position();
|
||||||
|
|
||||||
// prop
|
// prop
|
||||||
const prop = match(PROPERTY_REGEX);
|
const prop = match(PROPERTY_REGEX);
|
||||||
if (!prop) return;
|
if (!prop) return;
|
||||||
comment();
|
comment();
|
||||||
|
|
||||||
// :
|
// :
|
||||||
if (!match(COLON_REGEX)) return error("property missing ':'");
|
if (!match(COLON_REGEX)) return error("property missing ':'");
|
||||||
|
|
||||||
// val
|
// val
|
||||||
const val = match(VALUE_REGEX);
|
const val = match(VALUE_REGEX);
|
||||||
|
|
||||||
const ret = pos({
|
const ret = pos({
|
||||||
type: TYPE_DECLARATION,
|
type: TYPE_DECLARATION,
|
||||||
property: trim(prop[0].replace(COMMENT_REGEX, EMPTY_STRING)),
|
property: trim(prop[0].replace(COMMENT_REGEX, EMPTY_STRING)),
|
||||||
value: val
|
value: val ? trim(val[0].replace(COMMENT_REGEX, EMPTY_STRING)) : EMPTY_STRING,
|
||||||
? trim(val[0].replace(COMMENT_REGEX, EMPTY_STRING))
|
});
|
||||||
: EMPTY_STRING
|
|
||||||
});
|
|
||||||
|
|
||||||
// ;
|
// ;
|
||||||
match(SEMICOLON_REGEX);
|
match(SEMICOLON_REGEX);
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse declarations.
|
* Parse declarations.
|
||||||
*
|
*
|
||||||
* @return {Object[]}
|
* @return {Object[]}
|
||||||
*/
|
*/
|
||||||
function declarations() {
|
function declarations() {
|
||||||
const decls = [];
|
const decls = [];
|
||||||
|
|
||||||
comments(decls);
|
comments(decls);
|
||||||
|
|
||||||
// declarations
|
// declarations
|
||||||
let decl;
|
let decl;
|
||||||
while ((decl = declaration())) {
|
while ((decl = declaration())) {
|
||||||
if (decl !== false) {
|
if (decl !== false) {
|
||||||
decls.push(decl);
|
decls.push(decl);
|
||||||
comments(decls);
|
comments(decls);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return decls;
|
return decls;
|
||||||
}
|
}
|
||||||
|
|
||||||
whitespace();
|
whitespace();
|
||||||
return declarations();
|
return declarations();
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Trim `str`.
|
* Trim `str`.
|
||||||
|
@ -274,5 +270,5 @@ export function parseInlineStyles(style, options) {
|
||||||
* @return {String}
|
* @return {String}
|
||||||
*/
|
*/
|
||||||
function trim(str) {
|
function trim(str) {
|
||||||
return str ? str.replace(TRIM_REGEX, EMPTY_STRING) : EMPTY_STRING;
|
return str ? str.replace(TRIM_REGEX, EMPTY_STRING) : EMPTY_STRING;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,11 +3,11 @@
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @license MIT
|
* @license MIT
|
||||||
*
|
*
|
||||||
* The MIT License (MIT)
|
* The MIT License (MIT)
|
||||||
*
|
*
|
||||||
* Copyright (c) 2017 Menglin "Mark" Xu <mark@remarkablemark.org>
|
* Copyright (c) 2017 Menglin "Mark" Xu <mark@remarkablemark.org>
|
||||||
*
|
*
|
||||||
* Permission is hereby granted, free of charge, to any person obtaining
|
* Permission is hereby granted, free of charge, to any person obtaining
|
||||||
* a copy of this software and associated documentation files (the
|
* a copy of this software and associated documentation files (the
|
||||||
* "Software"), to deal in the Software without restriction, including
|
* "Software"), to deal in the Software without restriction, including
|
||||||
|
@ -15,10 +15,10 @@
|
||||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
* permit persons to whom the Software is furnished to do so, subject to
|
* permit persons to whom the Software is furnished to do so, subject to
|
||||||
* the following conditions:
|
* the following conditions:
|
||||||
*
|
*
|
||||||
* The above copyright notice and this permission notice shall be
|
* The above copyright notice and this permission notice shall be
|
||||||
* included in all copies or substantial portions of the Software.
|
* included in all copies or substantial portions of the Software.
|
||||||
*
|
*
|
||||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
@ -28,7 +28,7 @@
|
||||||
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { parseInlineStyles } from "./parse-inline-styles.js";
|
import { parseInlineStyles } from './parse-inline-styles.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses inline style to object.
|
* Parses inline style to object.
|
||||||
|
@ -42,29 +42,29 @@ import { parseInlineStyles } from "./parse-inline-styles.js";
|
||||||
* @return {null|Object}
|
* @return {null|Object}
|
||||||
*/
|
*/
|
||||||
export function styleToObject(style, iterator) {
|
export function styleToObject(style, iterator) {
|
||||||
let output = null;
|
let output = null;
|
||||||
if (!style || typeof style !== 'string') {
|
if (!style || typeof style !== 'string') {
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
let declaration;
|
let declaration;
|
||||||
let declarations = parseInlineStyles(style);
|
let declarations = parseInlineStyles(style);
|
||||||
let hasIterator = typeof iterator === 'function';
|
let hasIterator = typeof iterator === 'function';
|
||||||
let property;
|
let property;
|
||||||
let value;
|
let value;
|
||||||
|
|
||||||
for (let i = 0, len = declarations.length; i < len; i++) {
|
for (let i = 0, len = declarations.length; i < len; i++) {
|
||||||
declaration = declarations[i];
|
declaration = declarations[i];
|
||||||
property = declaration.property;
|
property = declaration.property;
|
||||||
value = declaration.value;
|
value = declaration.value;
|
||||||
|
|
||||||
if (hasIterator) {
|
if (hasIterator) {
|
||||||
iterator(property, value, declaration);
|
iterator(property, value, declaration);
|
||||||
} else if (value) {
|
} else if (value) {
|
||||||
output || (output = {});
|
output || (output = {});
|
||||||
output[property] = value;
|
output[property] = value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
export { htmlTokenTransform } from "./transform/html-token-transform";
|
export { htmlTag } from './tagdefs/html.tag';
|
||||||
export { htmlTag } from "./tagdefs/html.tag";
|
export { htmlTokenTransform } from './transform/html-token-transform';
|
||||||
|
|
|
@ -1,32 +1,30 @@
|
||||||
import type { Config, Schema } from "@markdoc/markdoc";
|
import type { Config, Schema } from '@markdoc/markdoc';
|
||||||
import Markdoc from "@markdoc/markdoc";
|
import Markdoc from '@markdoc/markdoc';
|
||||||
|
|
||||||
// local
|
// local
|
||||||
import { parseInlineCSSToReactLikeObject } from "../css/parse-inline-css-to-react.js";
|
import { parseInlineCSSToReactLikeObject } from '../css/parse-inline-css-to-react.js';
|
||||||
|
|
||||||
// a Markdoc tag that will render a given HTML element and its attributes, as produced by the htmlTokenTransform function
|
// a Markdoc tag that will render a given HTML element and its attributes, as produced by the htmlTokenTransform function
|
||||||
export const htmlTag: Schema<Config, never> = {
|
export const htmlTag: Schema<Config, never> = {
|
||||||
|
attributes: {
|
||||||
|
name: { type: String, required: true },
|
||||||
|
attrs: { type: Object },
|
||||||
|
},
|
||||||
|
|
||||||
attributes: {
|
transform(node, config) {
|
||||||
name: { type: String, required: true },
|
const { name, attrs: unsafeAttributes } = node.attributes;
|
||||||
attrs: { type: Object },
|
const children = node.transformChildren(config);
|
||||||
},
|
|
||||||
|
|
||||||
transform(node, config) {
|
// pull out any "unsafe" attributes which need additional processing
|
||||||
|
const { style, ...safeAttributes } = unsafeAttributes as Record<string, unknown>;
|
||||||
|
|
||||||
const { name, attrs: unsafeAttributes } = node.attributes;
|
// if the inline "style" attribute is present we need to parse the HTML into a react-like React.CSSProperties object
|
||||||
const children = node.transformChildren(config);
|
if (typeof style === 'string') {
|
||||||
|
const styleObject = parseInlineCSSToReactLikeObject(style);
|
||||||
|
safeAttributes.style = styleObject;
|
||||||
|
}
|
||||||
|
|
||||||
// pull out any "unsafe" attributes which need additional processing
|
// create a Markdoc Tag for the given HTML node with the HTML attributes and children
|
||||||
const { style, ...safeAttributes } = unsafeAttributes as Record<string, unknown>;
|
return new Markdoc.Tag(name, safeAttributes, children);
|
||||||
|
},
|
||||||
// if the inline "style" attribute is present we need to parse the HTML into a react-like React.CSSProperties object
|
|
||||||
if (typeof style === "string") {
|
|
||||||
const styleObject = parseInlineCSSToReactLikeObject(style);
|
|
||||||
safeAttributes.style = styleObject;
|
|
||||||
}
|
|
||||||
|
|
||||||
// create a Markdoc Tag for the given HTML node with the HTML attributes and children
|
|
||||||
return new Markdoc.Tag(name, safeAttributes, children);
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,256 +1,247 @@
|
||||||
import type * as Token from 'markdown-it/lib/token';
|
|
||||||
import { Parser } from 'htmlparser2';
|
|
||||||
import { Tokenizer } from '@markdoc/markdoc';
|
import { Tokenizer } from '@markdoc/markdoc';
|
||||||
|
import { Parser } from 'htmlparser2';
|
||||||
|
import type * as Token from 'markdown-it/lib/token';
|
||||||
|
|
||||||
export function htmlTokenTransform(tokenizer: Tokenizer, tokens: Token[]): Token[] {
|
export function htmlTokenTransform(tokenizer: Tokenizer, tokens: Token[]): Token[] {
|
||||||
|
const output: Token[] = [];
|
||||||
|
|
||||||
const output: Token[] = [];
|
// hold a lazy buffer of text and process it only when necessary
|
||||||
|
let textBuffer = '';
|
||||||
|
|
||||||
// hold a lazy buffer of text and process it only when necessary
|
let inCDATA = false;
|
||||||
let textBuffer = '';
|
|
||||||
|
|
||||||
let inCDATA = false;
|
const appendText = (text: string) => {
|
||||||
|
textBuffer += text;
|
||||||
|
};
|
||||||
|
|
||||||
const appendText = (text: string) => {
|
// process the current text buffer w/ Markdoc's Tokenizer for tokens
|
||||||
textBuffer += text;
|
const processTextBuffer = () => {
|
||||||
};
|
if (textBuffer.length > 0) {
|
||||||
|
// tokenize the text buffer to look for structural markup tokens
|
||||||
|
const toks = tokenizer.tokenize(textBuffer);
|
||||||
|
|
||||||
// process the current text buffer w/ Markdoc's Tokenizer for tokens
|
// when we tokenize some raw text content, it's basically treated like Markdown, and will result in a paragraph wrapper, which we don't want
|
||||||
const processTextBuffer = () => {
|
// in this scenario, we just want to generate a text token, but, we have to tokenize it in case there's other structural markup
|
||||||
|
if (toks.length === 3) {
|
||||||
|
const first = toks[0];
|
||||||
|
const second = toks[1];
|
||||||
|
const third: Token | undefined = toks.at(2);
|
||||||
|
|
||||||
if (textBuffer.length > 0) {
|
if (
|
||||||
|
first.type === 'paragraph_open' &&
|
||||||
|
second.type === 'inline' &&
|
||||||
|
third &&
|
||||||
|
third.type === 'paragraph_close' &&
|
||||||
|
Array.isArray(second.children)
|
||||||
|
) {
|
||||||
|
for (const tok of second.children as Token[]) {
|
||||||
|
// if the given token is a 'text' token and its trimmed content is the same as the pre-tokenized text buffer, use the original
|
||||||
|
// text buffer instead to preserve leading/trailing whitespace that is lost during tokenization of pure text content
|
||||||
|
if (tok.type === 'text') {
|
||||||
|
if (tok.content.trim() == textBuffer.trim()) {
|
||||||
|
tok.content = textBuffer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output.push(tok);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// some other markup that happened to be 3 tokens, push tokens as-is
|
||||||
|
for (const tok of toks) {
|
||||||
|
output.push(tok);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// some other tokenized markup, push tokens as-is
|
||||||
|
for (const tok of toks) {
|
||||||
|
output.push(tok);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// tokenize the text buffer to look for structural markup tokens
|
// reset the current lazy text buffer
|
||||||
const toks = tokenizer.tokenize(textBuffer);
|
textBuffer = '';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// when we tokenize some raw text content, it's basically treated like Markdown, and will result in a paragraph wrapper, which we don't want
|
// create an incremental HTML parser that tracks HTML tag open, close and text content
|
||||||
// in this scenario, we just want to generate a text token, but, we have to tokenize it in case there's other structural markup
|
const parser = new Parser(
|
||||||
if (toks.length === 3) {
|
{
|
||||||
|
oncdatastart() {
|
||||||
|
inCDATA = true;
|
||||||
|
},
|
||||||
|
|
||||||
const first = toks[0];
|
oncdataend() {
|
||||||
const second = toks[1];
|
inCDATA = false;
|
||||||
const third: Token | undefined = toks.at(2);
|
},
|
||||||
|
|
||||||
if (first.type === 'paragraph_open' && second.type === 'inline' && (third && third.type === 'paragraph_close') && Array.isArray(second.children)) {
|
// when an HTML tag opens...
|
||||||
for (const tok of second.children as Token[]) {
|
onopentag(name, attrs) {
|
||||||
// if the given token is a 'text' token and its trimmed content is the same as the pre-tokenized text buffer, use the original
|
// process any buffered text to be treated as text node before the currently opening HTML tag
|
||||||
// text buffer instead to preserve leading/trailing whitespace that is lost during tokenization of pure text content
|
processTextBuffer();
|
||||||
if (tok.type === 'text') {
|
|
||||||
if (tok.content.trim() == textBuffer.trim()) {
|
|
||||||
tok.content = textBuffer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
output.push(tok);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// some other markup that happened to be 3 tokens, push tokens as-is
|
|
||||||
for (const tok of toks) {
|
|
||||||
output.push(tok);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// some other tokenized markup, push tokens as-is
|
|
||||||
for (const tok of toks) {
|
|
||||||
output.push(tok);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// reset the current lazy text buffer
|
// push an 'html-tag' 'tag_open' Markdoc node instance for the currently opening HTML tag onto the resulting Token stack
|
||||||
textBuffer = '';
|
output.push({
|
||||||
}
|
type: 'tag_open',
|
||||||
};
|
nesting: 1,
|
||||||
|
meta: {
|
||||||
|
tag: 'html-tag',
|
||||||
|
attributes: [
|
||||||
|
{ type: 'attribute', name: 'name', value: name },
|
||||||
|
{ type: 'attribute', name: 'attrs', value: attrs },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
} as Token);
|
||||||
|
},
|
||||||
|
|
||||||
// create an incremental HTML parser that tracks HTML tag open, close and text content
|
ontext(content: string | null | undefined) {
|
||||||
const parser = new Parser({
|
if (inCDATA) {
|
||||||
|
// ignore entirely while inside CDATA
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
oncdatastart() {
|
// only accumulate text into the buffer if we're not under an ignored HTML element
|
||||||
inCDATA = true;
|
if (typeof content === 'string') {
|
||||||
},
|
appendText(content);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
oncdataend() {
|
// when an HTML tag closes...
|
||||||
inCDATA = false;
|
onclosetag(name) {
|
||||||
},
|
// process any buffered text to be treated as a text node inside the currently closing HTML tag
|
||||||
|
processTextBuffer();
|
||||||
|
|
||||||
// when an HTML tag opens...
|
// push an 'html-tag' 'tag_close' Markdoc node instance for the currently closing HTML tag onto the resulting Token stack
|
||||||
onopentag(name, attrs) {
|
output.push({
|
||||||
|
type: 'tag_close',
|
||||||
|
nesting: -1,
|
||||||
|
meta: {
|
||||||
|
tag: 'html-tag',
|
||||||
|
attributes: [{ type: 'attribute', name: 'name', value: name }],
|
||||||
|
},
|
||||||
|
} as Token);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
decodeEntities: false,
|
||||||
|
recognizeCDATA: true,
|
||||||
|
recognizeSelfClosing: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
// process any buffered text to be treated as text node before the currently opening HTML tag
|
// for every detected token...
|
||||||
processTextBuffer();
|
for (const token of tokens) {
|
||||||
|
// if it was an HTML token, write the HTML text into the HTML parser
|
||||||
|
if (token.type.startsWith('html')) {
|
||||||
|
// as the parser encounters opening/closing HTML tags, it will push Markdoc Tag nodes into the output stack
|
||||||
|
parser.write(token.content);
|
||||||
|
|
||||||
// push an 'html-tag' 'tag_open' Markdoc node instance for the currently opening HTML tag onto the resulting Token stack
|
// continue loop... IMPORTANT! we're throwing away the original 'html' tokens here (raw HTML strings), since the parser is inserting new ones based on the parsed HTML
|
||||||
output.push({
|
continue;
|
||||||
type: 'tag_open',
|
}
|
||||||
nesting: 1,
|
|
||||||
meta: {
|
|
||||||
tag: 'html-tag',
|
|
||||||
attributes: [
|
|
||||||
{ type: 'attribute', name: 'name', value: name },
|
|
||||||
{ type: 'attribute', name: 'attrs', value: attrs },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
} as Token);
|
|
||||||
|
|
||||||
},
|
// process any child content for HTML
|
||||||
|
if (token.type === 'inline') {
|
||||||
|
if (token.children) {
|
||||||
|
token.children = htmlTokenTransform(tokenizer, token.children);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ontext(content: string | null | undefined) {
|
// not an HTML Token, preserve it at the current stack location
|
||||||
|
output.push(token);
|
||||||
|
}
|
||||||
|
|
||||||
if (inCDATA) {
|
// process any remaining buffered text
|
||||||
// ignore entirely while inside CDATA
|
processTextBuffer();
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// only accumulate text into the buffer if we're not under an ignored HTML element
|
//
|
||||||
if (typeof content === 'string') {
|
// post-process the current levels output Token[] array to un-wind this pattern:
|
||||||
appendText(content);
|
//
|
||||||
}
|
// [
|
||||||
},
|
// { type: tag_open, meta.tag: html-tag },
|
||||||
|
// { type: paragraph_open },
|
||||||
|
// { type: inline, children [...] },
|
||||||
|
// { type: paragraph_close },
|
||||||
|
// { type: tag_close, meta.tag: html-tag }
|
||||||
|
// ]
|
||||||
|
//
|
||||||
|
// the paragraph_open, inline, paragraph_close triplet needs to be replaced by the children of the inline node
|
||||||
|
//
|
||||||
|
// this is extra, unwanted paragraph wrapping unfortunately introduced by markdown-it during processing w/ HTML enabled
|
||||||
|
//
|
||||||
|
|
||||||
// when an HTML tag closes...
|
mutateAndCollapseExtraParagraphsUnderHtml(output);
|
||||||
onclosetag(name) {
|
|
||||||
|
|
||||||
// process any buffered text to be treated as a text node inside the currently closing HTML tag
|
return output;
|
||||||
processTextBuffer();
|
|
||||||
|
|
||||||
// push an 'html-tag' 'tag_close' Markdoc node instance for the currently closing HTML tag onto the resulting Token stack
|
|
||||||
output.push({
|
|
||||||
type: 'tag_close',
|
|
||||||
nesting: -1,
|
|
||||||
meta: {
|
|
||||||
tag: 'html-tag',
|
|
||||||
attributes: [
|
|
||||||
{ type: 'attribute', name: 'name', value: name },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
} as Token);
|
|
||||||
|
|
||||||
},
|
|
||||||
|
|
||||||
}, {
|
|
||||||
decodeEntities: false,
|
|
||||||
recognizeCDATA: true,
|
|
||||||
recognizeSelfClosing: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
// for every detected token...
|
|
||||||
for (const token of tokens) {
|
|
||||||
|
|
||||||
// if it was an HTML token, write the HTML text into the HTML parser
|
|
||||||
if (token.type.startsWith('html')) {
|
|
||||||
|
|
||||||
// as the parser encounters opening/closing HTML tags, it will push Markdoc Tag nodes into the output stack
|
|
||||||
parser.write(token.content);
|
|
||||||
|
|
||||||
// continue loop... IMPORTANT! we're throwing away the original 'html' tokens here (raw HTML strings), since the parser is inserting new ones based on the parsed HTML
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// process any child content for HTML
|
|
||||||
if (token.type === 'inline') {
|
|
||||||
if (token.children) {
|
|
||||||
token.children = htmlTokenTransform(tokenizer, token.children);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// not an HTML Token, preserve it at the current stack location
|
|
||||||
output.push(token);
|
|
||||||
}
|
|
||||||
|
|
||||||
// process any remaining buffered text
|
|
||||||
processTextBuffer();
|
|
||||||
|
|
||||||
//
|
|
||||||
// post-process the current levels output Token[] array to un-wind this pattern:
|
|
||||||
//
|
|
||||||
// [
|
|
||||||
// { type: tag_open, meta.tag: html-tag },
|
|
||||||
// { type: paragraph_open },
|
|
||||||
// { type: inline, children [...] },
|
|
||||||
// { type: paragraph_close },
|
|
||||||
// { type: tag_close, meta.tag: html-tag }
|
|
||||||
// ]
|
|
||||||
//
|
|
||||||
// the paragraph_open, inline, paragraph_close triplet needs to be replaced by the children of the inline node
|
|
||||||
//
|
|
||||||
// this is extra, unwanted paragraph wrapping unfortunately introduced by markdown-it during processing w/ HTML enabled
|
|
||||||
//
|
|
||||||
|
|
||||||
mutateAndCollapseExtraParagraphsUnderHtml(output);
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function mutateAndCollapseExtraParagraphsUnderHtml(tokens: Token[]): void {
|
function mutateAndCollapseExtraParagraphsUnderHtml(tokens: Token[]): void {
|
||||||
let done = false;
|
let done = false;
|
||||||
|
|
||||||
while (!done) {
|
while (!done) {
|
||||||
const idx = findExtraParagraphUnderHtml(tokens);
|
const idx = findExtraParagraphUnderHtml(tokens);
|
||||||
if (typeof idx === 'number') {
|
if (typeof idx === 'number') {
|
||||||
// mutate
|
// mutate
|
||||||
|
|
||||||
const actualChildTokens = tokens[idx + 2].children ?? [];
|
const actualChildTokens = tokens[idx + 2].children ?? [];
|
||||||
|
|
||||||
tokens.splice(idx, 5, ...actualChildTokens);
|
tokens.splice(idx, 5, ...actualChildTokens);
|
||||||
} else {
|
} else {
|
||||||
done = true;
|
done = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param token
|
* @param token
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
function findExtraParagraphUnderHtml(tokens: Token[]): number | null {
|
function findExtraParagraphUnderHtml(tokens: Token[]): number | null {
|
||||||
|
if (tokens.length < 5) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
if (tokens.length < 5) {
|
for (let i = 0; i < tokens.length; i++) {
|
||||||
return null;
|
const last = i + 4;
|
||||||
}
|
if (last > tokens.length - 1) {
|
||||||
|
break; // early exit, no more possible 5-long slices to search
|
||||||
|
}
|
||||||
|
|
||||||
for (let i = 0; i < tokens.length; i++) {
|
const slice = tokens.slice(i, last + 1);
|
||||||
const last = i + 4;
|
const isMatch = isExtraParagraphPatternMatch(slice);
|
||||||
if (last > tokens.length - 1) {
|
if (isMatch) {
|
||||||
break; // early exit, no more possible 5-long slices to search
|
return i;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const slice = tokens.slice(i, last + 1);
|
return null;
|
||||||
const isMatch = isExtraParagraphPatternMatch(slice);
|
|
||||||
if (isMatch) {
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function isExtraParagraphPatternMatch(slice: Token[]): boolean {
|
function isExtraParagraphPatternMatch(slice: Token[]): boolean {
|
||||||
const match = isHtmlTagOpen(slice[0])
|
const match =
|
||||||
&& isParagraphOpen(slice[1])
|
isHtmlTagOpen(slice[0]) &&
|
||||||
&& isInline(slice[2])
|
isParagraphOpen(slice[1]) &&
|
||||||
&& isParagraphClose(slice[3])
|
isInline(slice[2]) &&
|
||||||
&& isHtmlTagClose(slice[4]);
|
isParagraphClose(slice[3]) &&
|
||||||
return match;
|
isHtmlTagClose(slice[4]);
|
||||||
|
return match;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function isHtmlTagOpen(token: Token): boolean {
|
function isHtmlTagOpen(token: Token): boolean {
|
||||||
return token.type === 'tag_open' && token.meta && token.meta.tag === 'html-tag';
|
return token.type === 'tag_open' && token.meta && token.meta.tag === 'html-tag';
|
||||||
}
|
}
|
||||||
|
|
||||||
function isHtmlTagClose(token: Token): boolean {
|
function isHtmlTagClose(token: Token): boolean {
|
||||||
return token.type === 'tag_close' && token.meta && token.meta.tag === 'html-tag';
|
return token.type === 'tag_close' && token.meta && token.meta.tag === 'html-tag';
|
||||||
}
|
}
|
||||||
|
|
||||||
function isParagraphOpen(token: Token): boolean {
|
function isParagraphOpen(token: Token): boolean {
|
||||||
return token.type === 'paragraph_open';
|
return token.type === 'paragraph_open';
|
||||||
}
|
}
|
||||||
|
|
||||||
function isParagraphClose(token: Token): boolean {
|
function isParagraphClose(token: Token): boolean {
|
||||||
return token.type === 'paragraph_close';
|
return token.type === 'paragraph_close';
|
||||||
}
|
}
|
||||||
|
|
||||||
function isInline(token: Token): boolean {
|
function isInline(token: Token): boolean {
|
||||||
return token.type === 'inline';
|
return token.type === 'inline';
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,9 @@ export default function markdocIntegration(options?: MarkdocIntegrationOptions):
|
||||||
|
|
||||||
markdocConfigResult = await loadMarkdocConfig(astroConfig);
|
markdocConfigResult = await loadMarkdocConfig(astroConfig);
|
||||||
|
|
||||||
addContentEntryType(await getContentEntryType({ markdocConfigResult, astroConfig, options }));
|
addContentEntryType(
|
||||||
|
await getContentEntryType({ markdocConfigResult, astroConfig, options })
|
||||||
|
);
|
||||||
|
|
||||||
updateConfig({
|
updateConfig({
|
||||||
vite: {
|
vite: {
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
export interface MarkdocIntegrationOptions {
|
export interface MarkdocIntegrationOptions {
|
||||||
allowHTML?: boolean;
|
allowHTML?: boolean;
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,15 +10,18 @@ import type { AstroInstance } from 'astro';
|
||||||
import { createComponent, renderComponent } from 'astro/runtime/server/index.js';
|
import { createComponent, renderComponent } from 'astro/runtime/server/index.js';
|
||||||
import type { AstroMarkdocConfig } from './config.js';
|
import type { AstroMarkdocConfig } from './config.js';
|
||||||
import { setupHeadingConfig } from './heading-ids.js';
|
import { setupHeadingConfig } from './heading-ids.js';
|
||||||
import type { MarkdocIntegrationOptions } from './options.js';
|
|
||||||
import { htmlTag } from './html/tagdefs/html.tag.js';
|
import { htmlTag } from './html/tagdefs/html.tag.js';
|
||||||
|
import type { MarkdocIntegrationOptions } from './options.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Merge user config with default config and set up context (ex. heading ID slugger)
|
* Merge user config with default config and set up context (ex. heading ID slugger)
|
||||||
* Called on each file's individual transform.
|
* Called on each file's individual transform.
|
||||||
* TODO: virtual module to merge configs per-build instead of per-file?
|
* TODO: virtual module to merge configs per-build instead of per-file?
|
||||||
*/
|
*/
|
||||||
export async function setupConfig(userConfig: AstroMarkdocConfig = {}, options: MarkdocIntegrationOptions | undefined): Promise<MergedConfig> {
|
export async function setupConfig(
|
||||||
|
userConfig: AstroMarkdocConfig = {},
|
||||||
|
options: MarkdocIntegrationOptions | undefined
|
||||||
|
): Promise<MergedConfig> {
|
||||||
let defaultConfig: AstroMarkdocConfig = setupHeadingConfig();
|
let defaultConfig: AstroMarkdocConfig = setupHeadingConfig();
|
||||||
|
|
||||||
if (userConfig.extends) {
|
if (userConfig.extends) {
|
||||||
|
@ -33,24 +36,27 @@ export async function setupConfig(userConfig: AstroMarkdocConfig = {}, options:
|
||||||
|
|
||||||
let merged = mergeConfig(defaultConfig, userConfig);
|
let merged = mergeConfig(defaultConfig, userConfig);
|
||||||
|
|
||||||
if (options?.allowHTML) {
|
if (options?.allowHTML) {
|
||||||
merged = mergeConfig(merged, HTML_CONFIG);
|
merged = mergeConfig(merged, HTML_CONFIG);
|
||||||
}
|
}
|
||||||
|
|
||||||
return merged;
|
return merged;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Used for synchronous `getHeadings()` function */
|
/** Used for synchronous `getHeadings()` function */
|
||||||
export function setupConfigSync(userConfig: AstroMarkdocConfig = {}, options: MarkdocIntegrationOptions | undefined): MergedConfig {
|
export function setupConfigSync(
|
||||||
|
userConfig: AstroMarkdocConfig = {},
|
||||||
|
options: MarkdocIntegrationOptions | undefined
|
||||||
|
): MergedConfig {
|
||||||
const defaultConfig: AstroMarkdocConfig = setupHeadingConfig();
|
const defaultConfig: AstroMarkdocConfig = setupHeadingConfig();
|
||||||
|
|
||||||
let merged = mergeConfig(defaultConfig, userConfig);
|
let merged = mergeConfig(defaultConfig, userConfig);
|
||||||
|
|
||||||
if (options?.allowHTML) {
|
if (options?.allowHTML) {
|
||||||
merged = mergeConfig(merged, HTML_CONFIG);
|
merged = mergeConfig(merged, HTML_CONFIG);
|
||||||
}
|
}
|
||||||
|
|
||||||
return merged;
|
return merged;
|
||||||
}
|
}
|
||||||
|
|
||||||
type MergedConfig = Required<Omit<AstroMarkdocConfig, 'extends'>>;
|
type MergedConfig = Required<Omit<AstroMarkdocConfig, 'extends'>>;
|
||||||
|
@ -160,7 +166,11 @@ export function collectHeadings(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createGetHeadings(stringifiedAst: string, userConfig: AstroMarkdocConfig, options: MarkdocIntegrationOptions | undefined) {
|
export function createGetHeadings(
|
||||||
|
stringifiedAst: string,
|
||||||
|
userConfig: AstroMarkdocConfig,
|
||||||
|
options: MarkdocIntegrationOptions | undefined
|
||||||
|
) {
|
||||||
return function getHeadings() {
|
return function getHeadings() {
|
||||||
/* Yes, we are transforming twice (once from `getHeadings()` and again from <Content /> in case of variables).
|
/* Yes, we are transforming twice (once from `getHeadings()` and again from <Content /> in case of variables).
|
||||||
TODO: propose new `render()` API to allow Markdoc variable passing to `render()` itself,
|
TODO: propose new `render()` API to allow Markdoc variable passing to `render()` itself,
|
||||||
|
@ -178,7 +188,7 @@ export function createContentComponent(
|
||||||
Renderer: AstroInstance['default'],
|
Renderer: AstroInstance['default'],
|
||||||
stringifiedAst: string,
|
stringifiedAst: string,
|
||||||
userConfig: AstroMarkdocConfig,
|
userConfig: AstroMarkdocConfig,
|
||||||
options: MarkdocIntegrationOptions | undefined,
|
options: MarkdocIntegrationOptions | undefined,
|
||||||
tagComponentMap: Record<string, AstroInstance['default']>,
|
tagComponentMap: Record<string, AstroInstance['default']>,
|
||||||
nodeComponentMap: Record<NodeType, AstroInstance['default']>
|
nodeComponentMap: Record<NodeType, AstroInstance['default']>
|
||||||
) {
|
) {
|
||||||
|
@ -199,7 +209,7 @@ export function createContentComponent(
|
||||||
|
|
||||||
// statically define a partial MarkdocConfig which registers the required "html-tag" Markdoc tag when the "allowHTML" feature is enabled
|
// statically define a partial MarkdocConfig which registers the required "html-tag" Markdoc tag when the "allowHTML" feature is enabled
|
||||||
const HTML_CONFIG: AstroMarkdocConfig = {
|
const HTML_CONFIG: AstroMarkdocConfig = {
|
||||||
tags: {
|
tags: {
|
||||||
"html-tag": htmlTag,
|
'html-tag': htmlTag,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -5,34 +5,32 @@ import type { MarkdocIntegrationOptions } from './options.js';
|
||||||
type TokenizerOptions = ConstructorParameters<typeof Tokenizer>[0];
|
type TokenizerOptions = ConstructorParameters<typeof Tokenizer>[0];
|
||||||
|
|
||||||
export function getMarkdocTokenizer(options: MarkdocIntegrationOptions | undefined): Tokenizer {
|
export function getMarkdocTokenizer(options: MarkdocIntegrationOptions | undefined): Tokenizer {
|
||||||
|
const key = cacheKey(options);
|
||||||
|
|
||||||
const key = cacheKey(options);
|
if (!_cachedMarkdocTokenizers[key]) {
|
||||||
|
const tokenizerOptions: TokenizerOptions = {
|
||||||
|
// Strip <!-- comments --> from rendered output
|
||||||
|
// Without this, they're rendered as strings!
|
||||||
|
allowComments: true,
|
||||||
|
};
|
||||||
|
|
||||||
if (!_cachedMarkdocTokenizers[key]) {
|
if (options?.allowHTML) {
|
||||||
|
// we want to allow indentation for Markdoc tags that are interleaved inside HTML block elements
|
||||||
|
tokenizerOptions.allowIndentation = true;
|
||||||
|
// enable HTML token detection in markdown-it
|
||||||
|
tokenizerOptions.html = true;
|
||||||
|
}
|
||||||
|
|
||||||
const tokenizerOptions: TokenizerOptions = {
|
_cachedMarkdocTokenizers[key] = new Markdoc.Tokenizer(tokenizerOptions);
|
||||||
// Strip <!-- comments --> from rendered output
|
}
|
||||||
// Without this, they're rendered as strings!
|
|
||||||
allowComments: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options?.allowHTML) {
|
return _cachedMarkdocTokenizers[key];
|
||||||
// we want to allow indentation for Markdoc tags that are interleaved inside HTML block elements
|
}
|
||||||
tokenizerOptions.allowIndentation = true;
|
|
||||||
// enable HTML token detection in markdown-it
|
|
||||||
tokenizerOptions.html = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
_cachedMarkdocTokenizers[key] = new Markdoc.Tokenizer(tokenizerOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
return _cachedMarkdocTokenizers[key];
|
|
||||||
};
|
|
||||||
|
|
||||||
// create this on-demand when needed since it relies on the runtime MarkdocIntegrationOptions and may change during
|
// create this on-demand when needed since it relies on the runtime MarkdocIntegrationOptions and may change during
|
||||||
// the life of module in certain scenarios (unit tests, etc.)
|
// the life of module in certain scenarios (unit tests, etc.)
|
||||||
let _cachedMarkdocTokenizers: Record<string, Tokenizer> = {};
|
let _cachedMarkdocTokenizers: Record<string, Tokenizer> = {};
|
||||||
|
|
||||||
function cacheKey(options: MarkdocIntegrationOptions | undefined): string {
|
function cacheKey(options: MarkdocIntegrationOptions | undefined): string {
|
||||||
return JSON.stringify(options);
|
return JSON.stringify(options);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,291 +3,285 @@ import { expect } from 'chai';
|
||||||
import { loadFixture } from '../../../astro/test/test-utils.js';
|
import { loadFixture } from '../../../astro/test/test-utils.js';
|
||||||
|
|
||||||
async function getFixture(name) {
|
async function getFixture(name) {
|
||||||
return await loadFixture({
|
return await loadFixture({
|
||||||
root: new URL(`./fixtures/${name}/`, import.meta.url),
|
root: new URL(`./fixtures/${name}/`, import.meta.url),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('Markdoc - render html', () => {
|
describe('Markdoc - render html', () => {
|
||||||
|
let fixture;
|
||||||
|
|
||||||
let fixture;
|
before(async () => {
|
||||||
|
fixture = await getFixture('render-html');
|
||||||
|
});
|
||||||
|
|
||||||
before(async () => {
|
describe('dev', () => {
|
||||||
fixture = await getFixture('render-html');
|
let devServer;
|
||||||
});
|
|
||||||
|
|
||||||
describe('dev', () => {
|
before(async () => {
|
||||||
|
devServer = await fixture.startDevServer();
|
||||||
|
});
|
||||||
|
|
||||||
let devServer;
|
after(async () => {
|
||||||
|
await devServer.stop();
|
||||||
|
});
|
||||||
|
|
||||||
before(async () => {
|
it('renders content - simple', async () => {
|
||||||
devServer = await fixture.startDevServer();
|
const res = await fixture.fetch('/simple');
|
||||||
});
|
const html = await res.text();
|
||||||
|
|
||||||
after(async () => {
|
renderSimpleChecks(html);
|
||||||
await devServer.stop();
|
});
|
||||||
});
|
|
||||||
|
|
||||||
it('renders content - simple', async () => {
|
it('renders content - nested-html', async () => {
|
||||||
const res = await fixture.fetch('/simple');
|
const res = await fixture.fetch('/nested-html');
|
||||||
const html = await res.text();
|
const html = await res.text();
|
||||||
|
|
||||||
renderSimpleChecks(html);
|
renderNestedHTMLChecks(html);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders content - nested-html', async () => {
|
it('renders content - components interleaved with html', async () => {
|
||||||
const res = await fixture.fetch('/nested-html');
|
const res = await fixture.fetch('/components');
|
||||||
const html = await res.text();
|
const html = await res.text();
|
||||||
|
|
||||||
renderNestedHTMLChecks(html);
|
renderComponentsHTMLChecks(html);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders content - components interleaved with html', async () => {
|
it('renders content - randomly cased html attributes', async () => {
|
||||||
const res = await fixture.fetch('/components');
|
const res = await fixture.fetch('/randomly-cased-html-attributes');
|
||||||
const html = await res.text();
|
const html = await res.text();
|
||||||
|
|
||||||
renderComponentsHTMLChecks(html);
|
renderRandomlyCasedHTMLAttributesChecks(html);
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('renders content - randomly cased html attributes', async () => {
|
describe('build', () => {
|
||||||
const res = await fixture.fetch('/randomly-cased-html-attributes');
|
before(async () => {
|
||||||
const html = await res.text();
|
await fixture.build();
|
||||||
|
});
|
||||||
|
|
||||||
renderRandomlyCasedHTMLAttributesChecks(html);
|
it('renders content - simple', async () => {
|
||||||
});
|
const html = await fixture.readFile('/simple/index.html');
|
||||||
|
|
||||||
});
|
renderSimpleChecks(html);
|
||||||
|
});
|
||||||
|
|
||||||
describe('build', () => {
|
it('renders content - nested-html', async () => {
|
||||||
|
const html = await fixture.readFile('/nested-html/index.html');
|
||||||
|
|
||||||
before(async () => {
|
renderNestedHTMLChecks(html);
|
||||||
await fixture.build();
|
});
|
||||||
});
|
|
||||||
|
|
||||||
|
it('renders content - components interleaved with html', async () => {
|
||||||
|
const html = await fixture.readFile('/components/index.html');
|
||||||
|
|
||||||
it('renders content - simple', async () => {
|
renderComponentsHTMLChecks(html);
|
||||||
const html = await fixture.readFile('/simple/index.html');
|
});
|
||||||
|
|
||||||
renderSimpleChecks(html);
|
it('renders content - randomly cased html attributes', async () => {
|
||||||
});
|
const html = await fixture.readFile('/randomly-cased-html-attributes/index.html');
|
||||||
|
|
||||||
it('renders content - nested-html', async () => {
|
renderRandomlyCasedHTMLAttributesChecks(html);
|
||||||
const html = await fixture.readFile('/nested-html/index.html');
|
});
|
||||||
|
});
|
||||||
renderNestedHTMLChecks(html);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders content - components interleaved with html', async () => {
|
|
||||||
const html = await fixture.readFile('/components/index.html');
|
|
||||||
|
|
||||||
renderComponentsHTMLChecks(html);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders content - randomly cased html attributes', async () => {
|
|
||||||
const html = await fixture.readFile('/randomly-cased-html-attributes/index.html');
|
|
||||||
|
|
||||||
renderRandomlyCasedHTMLAttributesChecks(html);
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
/** @param {string} html */
|
/** @param {string} html */
|
||||||
function renderSimpleChecks(html) {
|
function renderSimpleChecks(html) {
|
||||||
const { document } = parseHTML(html);
|
const { document } = parseHTML(html);
|
||||||
|
|
||||||
const h2 = document.querySelector('h2');
|
const h2 = document.querySelector('h2');
|
||||||
expect(h2.textContent).to.equal('Simple post header');
|
expect(h2.textContent).to.equal('Simple post header');
|
||||||
|
|
||||||
const spanInsideH2 = document.querySelector('h2 > span');
|
const spanInsideH2 = document.querySelector('h2 > span');
|
||||||
expect(spanInsideH2.textContent).to.equal('post');
|
expect(spanInsideH2.textContent).to.equal('post');
|
||||||
expect(spanInsideH2.className).to.equal('inside-h2');
|
expect(spanInsideH2.className).to.equal('inside-h2');
|
||||||
expect(spanInsideH2.style.color).to.equal('fuscia');
|
expect(spanInsideH2.style.color).to.equal('fuscia');
|
||||||
|
|
||||||
const p1 = document.querySelector('article > p:nth-of-type(1)');
|
const p1 = document.querySelector('article > p:nth-of-type(1)');
|
||||||
expect(p1.children.length).to.equal(1);
|
expect(p1.children.length).to.equal(1);
|
||||||
expect(p1.textContent).to.equal('This is a simple Markdoc post.');
|
expect(p1.textContent).to.equal('This is a simple Markdoc post.');
|
||||||
|
|
||||||
const p2 = document.querySelector('article > p:nth-of-type(2)');
|
const p2 = document.querySelector('article > p:nth-of-type(2)');
|
||||||
expect(p2.children.length).to.equal(0);
|
expect(p2.children.length).to.equal(0);
|
||||||
expect(p2.textContent).to.equal('This is a paragraph!');
|
expect(p2.textContent).to.equal('This is a paragraph!');
|
||||||
|
|
||||||
const p3 = document.querySelector('article > p:nth-of-type(3)');
|
|
||||||
expect(p3.children.length).to.equal(1);
|
|
||||||
expect(p3.textContent).to.equal('This is a span inside a paragraph!');
|
|
||||||
|
|
||||||
|
const p3 = document.querySelector('article > p:nth-of-type(3)');
|
||||||
|
expect(p3.children.length).to.equal(1);
|
||||||
|
expect(p3.textContent).to.equal('This is a span inside a paragraph!');
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @param {string} html */
|
/** @param {string} html */
|
||||||
function renderNestedHTMLChecks(html) {
|
function renderNestedHTMLChecks(html) {
|
||||||
const { document } = parseHTML(html);
|
const { document } = parseHTML(html);
|
||||||
|
|
||||||
const p1 = document.querySelector('p:nth-of-type(1)');
|
const p1 = document.querySelector('p:nth-of-type(1)');
|
||||||
expect(p1.id).to.equal('p1');
|
expect(p1.id).to.equal('p1');
|
||||||
expect(p1.textContent).to.equal('before inner after');
|
expect(p1.textContent).to.equal('before inner after');
|
||||||
expect(p1.children.length).to.equal(1);
|
expect(p1.children.length).to.equal(1);
|
||||||
|
|
||||||
const p1Span1 = p1.querySelector('span');
|
const p1Span1 = p1.querySelector('span');
|
||||||
expect(p1Span1.textContent).to.equal('inner');
|
expect(p1Span1.textContent).to.equal('inner');
|
||||||
expect(p1Span1.id).to.equal('inner1');
|
expect(p1Span1.id).to.equal('inner1');
|
||||||
expect(p1Span1.className).to.equal('inner-class');
|
expect(p1Span1.className).to.equal('inner-class');
|
||||||
expect(p1Span1.style.color).to.equal('hotpink');
|
expect(p1Span1.style.color).to.equal('hotpink');
|
||||||
|
|
||||||
const p2 = document.querySelector('p:nth-of-type(2)');
|
const p2 = document.querySelector('p:nth-of-type(2)');
|
||||||
expect(p2.id).to.equal('p2');
|
expect(p2.id).to.equal('p2');
|
||||||
expect(p2.textContent).to.equal('\n before\n inner\n after\n');
|
expect(p2.textContent).to.equal('\n before\n inner\n after\n');
|
||||||
expect(p2.children.length).to.equal(1);
|
expect(p2.children.length).to.equal(1);
|
||||||
|
|
||||||
const divL1 = document.querySelector('div:nth-of-type(1)');
|
const divL1 = document.querySelector('div:nth-of-type(1)');
|
||||||
expect(divL1.id).to.equal('div-l1');
|
expect(divL1.id).to.equal('div-l1');
|
||||||
expect(divL1.children.length).to.equal(2);
|
expect(divL1.children.length).to.equal(2);
|
||||||
|
|
||||||
const divL2_1 = divL1.querySelector('div:nth-of-type(1)');
|
const divL2_1 = divL1.querySelector('div:nth-of-type(1)');
|
||||||
expect(divL2_1.id).to.equal('div-l2-1');
|
expect(divL2_1.id).to.equal('div-l2-1');
|
||||||
expect(divL2_1.children.length).to.equal(1);
|
expect(divL2_1.children.length).to.equal(1);
|
||||||
|
|
||||||
const p3 = divL2_1.querySelector('p:nth-of-type(1)');
|
const p3 = divL2_1.querySelector('p:nth-of-type(1)');
|
||||||
expect(p3.id).to.equal('p3');
|
expect(p3.id).to.equal('p3');
|
||||||
expect(p3.textContent).to.equal('before inner after');
|
expect(p3.textContent).to.equal('before inner after');
|
||||||
expect(p3.children.length).to.equal(1);
|
expect(p3.children.length).to.equal(1);
|
||||||
|
|
||||||
const divL2_2 = divL1.querySelector('div:nth-of-type(2)');
|
const divL2_2 = divL1.querySelector('div:nth-of-type(2)');
|
||||||
expect(divL2_2.id).to.equal('div-l2-2');
|
expect(divL2_2.id).to.equal('div-l2-2');
|
||||||
expect(divL2_2.children.length).to.equal(2);
|
expect(divL2_2.children.length).to.equal(2);
|
||||||
|
|
||||||
const p4 = divL2_2.querySelector('p:nth-of-type(1)');
|
const p4 = divL2_2.querySelector('p:nth-of-type(1)');
|
||||||
expect(p4.id).to.equal('p4');
|
expect(p4.id).to.equal('p4');
|
||||||
expect(p4.textContent).to.equal('before inner after');
|
expect(p4.textContent).to.equal('before inner after');
|
||||||
expect(p4.children.length).to.equal(1);
|
expect(p4.children.length).to.equal(1);
|
||||||
|
|
||||||
const p5 = divL2_2.querySelector('p:nth-of-type(2)');
|
|
||||||
expect(p5.id).to.equal('p5');
|
|
||||||
expect(p5.textContent).to.equal('before inner after');
|
|
||||||
expect(p5.children.length).to.equal(1);
|
|
||||||
|
|
||||||
|
const p5 = divL2_2.querySelector('p:nth-of-type(2)');
|
||||||
|
expect(p5.id).to.equal('p5');
|
||||||
|
expect(p5.textContent).to.equal('before inner after');
|
||||||
|
expect(p5.children.length).to.equal(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param {string} html */
|
* @param {string} html */
|
||||||
function renderRandomlyCasedHTMLAttributesChecks(html) {
|
function renderRandomlyCasedHTMLAttributesChecks(html) {
|
||||||
const { document } = parseHTML(html);
|
const { document } = parseHTML(html);
|
||||||
|
|
||||||
const td1 = document.querySelector('#td1');
|
const td1 = document.querySelector('#td1');
|
||||||
const td2 = document.querySelector('#td1');
|
const td2 = document.querySelector('#td1');
|
||||||
const td3 = document.querySelector('#td1');
|
const td3 = document.querySelector('#td1');
|
||||||
const td4 = document.querySelector('#td1');
|
const td4 = document.querySelector('#td1');
|
||||||
|
|
||||||
// all four <td>'s which had randomly cased variants of colspan/rowspan should all be rendered lowercased at this point
|
// all four <td>'s which had randomly cased variants of colspan/rowspan should all be rendered lowercased at this point
|
||||||
|
|
||||||
expect(td1.getAttribute("colspan")).to.equal("3");
|
expect(td1.getAttribute('colspan')).to.equal('3');
|
||||||
expect(td1.getAttribute("rowspan")).to.equal("2");
|
expect(td1.getAttribute('rowspan')).to.equal('2');
|
||||||
|
|
||||||
expect(td2.getAttribute("colspan")).to.equal("3");
|
expect(td2.getAttribute('colspan')).to.equal('3');
|
||||||
expect(td2.getAttribute("rowspan")).to.equal("2");
|
expect(td2.getAttribute('rowspan')).to.equal('2');
|
||||||
|
|
||||||
expect(td3.getAttribute("colspan")).to.equal("3");
|
expect(td3.getAttribute('colspan')).to.equal('3');
|
||||||
expect(td3.getAttribute("rowspan")).to.equal("2");
|
expect(td3.getAttribute('rowspan')).to.equal('2');
|
||||||
|
|
||||||
expect(td4.getAttribute("colspan")).to.equal("3");
|
expect(td4.getAttribute('colspan')).to.equal('3');
|
||||||
expect(td4.getAttribute("rowspan")).to.equal("2");
|
expect(td4.getAttribute('rowspan')).to.equal('2');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Asserts that the rendered HTML tags with interleaved Markdoc tags (both block and inline) rendered in the expected nested graph of elemements
|
* Asserts that the rendered HTML tags with interleaved Markdoc tags (both block and inline) rendered in the expected nested graph of elemements
|
||||||
*
|
*
|
||||||
* @param {string} html */
|
* @param {string} html */
|
||||||
function renderComponentsHTMLChecks(html) {
|
function renderComponentsHTMLChecks(html) {
|
||||||
const { document } = parseHTML(html);
|
const { document } = parseHTML(html);
|
||||||
|
|
||||||
const naturalP1 = document.querySelector('article > p:nth-of-type(1)');
|
const naturalP1 = document.querySelector('article > p:nth-of-type(1)');
|
||||||
expect(naturalP1.textContent).to.equal('This is a inline mark in regular Markdown markup.');
|
expect(naturalP1.textContent).to.equal('This is a inline mark in regular Markdown markup.');
|
||||||
expect(naturalP1.children.length).to.equal(1);
|
expect(naturalP1.children.length).to.equal(1);
|
||||||
|
|
||||||
const p1 = document.querySelector('article > p:nth-of-type(2)');
|
const p1 = document.querySelector('article > p:nth-of-type(2)');
|
||||||
expect(p1.id).to.equal('p1');
|
expect(p1.id).to.equal('p1');
|
||||||
expect(p1.textContent).to.equal('This is a inline mark under some HTML');
|
expect(p1.textContent).to.equal('This is a inline mark under some HTML');
|
||||||
expect(p1.children.length).to.equal(1);
|
expect(p1.children.length).to.equal(1);
|
||||||
assertInlineMark(p1.children[0]);
|
assertInlineMark(p1.children[0]);
|
||||||
|
|
||||||
const div1p1 = document.querySelector('article > #div1 > p:nth-of-type(1)');
|
const div1p1 = document.querySelector('article > #div1 > p:nth-of-type(1)');
|
||||||
expect(div1p1.id).to.equal('div1-p1');
|
expect(div1p1.id).to.equal('div1-p1');
|
||||||
expect(div1p1.textContent).to.equal('This is a inline mark under some HTML');
|
expect(div1p1.textContent).to.equal('This is a inline mark under some HTML');
|
||||||
expect(div1p1.children.length).to.equal(1);
|
expect(div1p1.children.length).to.equal(1);
|
||||||
assertInlineMark(div1p1.children[0]);
|
assertInlineMark(div1p1.children[0]);
|
||||||
|
|
||||||
const div1p2 = document.querySelector('article > #div1 > p:nth-of-type(2)');
|
const div1p2 = document.querySelector('article > #div1 > p:nth-of-type(2)');
|
||||||
expect(div1p2.id).to.equal('div1-p2');
|
expect(div1p2.id).to.equal('div1-p2');
|
||||||
expect(div1p2.textContent).to.equal('This is a inline mark under some HTML');
|
expect(div1p2.textContent).to.equal('This is a inline mark under some HTML');
|
||||||
expect(div1p2.children.length).to.equal(1);
|
expect(div1p2.children.length).to.equal(1);
|
||||||
|
|
||||||
const div1p2span1 = div1p2.querySelector('span');
|
const div1p2span1 = div1p2.querySelector('span');
|
||||||
expect(div1p2span1.id).to.equal('div1-p2-span1');
|
expect(div1p2span1.id).to.equal('div1-p2-span1');
|
||||||
expect(div1p2span1.textContent).to.equal('inline mark');
|
expect(div1p2span1.textContent).to.equal('inline mark');
|
||||||
expect(div1p2span1.children.length).to.equal(1);
|
expect(div1p2span1.children.length).to.equal(1);
|
||||||
assertInlineMark(div1p2span1.children[0]);
|
assertInlineMark(div1p2span1.children[0]);
|
||||||
|
|
||||||
const aside1 = document.querySelector('article > aside:nth-of-type(1)');
|
const aside1 = document.querySelector('article > aside:nth-of-type(1)');
|
||||||
const aside1Title = aside1.querySelector('p.title');
|
const aside1Title = aside1.querySelector('p.title');
|
||||||
expect(aside1Title.textContent.trim()).to.equal('Aside One');
|
expect(aside1Title.textContent.trim()).to.equal('Aside One');
|
||||||
const aside1Section = aside1.querySelector('section');
|
const aside1Section = aside1.querySelector('section');
|
||||||
const aside1SectionP1 = aside1Section.querySelector('p:nth-of-type(1)');
|
const aside1SectionP1 = aside1Section.querySelector('p:nth-of-type(1)');
|
||||||
expect(aside1SectionP1.textContent).to.equal('I\'m a Markdown paragraph inside an top-level aside tag');
|
expect(aside1SectionP1.textContent).to.equal(
|
||||||
const aside1H2_1 = aside1Section.querySelector('h2:nth-of-type(1)');
|
"I'm a Markdown paragraph inside an top-level aside tag"
|
||||||
expect(aside1H2_1.id).to.equal('im-an-h2-via-markdown-markup'); // automatic slug
|
);
|
||||||
expect(aside1H2_1.textContent).to.equal('I\'m an H2 via Markdown markup');
|
const aside1H2_1 = aside1Section.querySelector('h2:nth-of-type(1)');
|
||||||
const aside1H2_2 = aside1Section.querySelector('h2:nth-of-type(2)');
|
expect(aside1H2_1.id).to.equal('im-an-h2-via-markdown-markup'); // automatic slug
|
||||||
expect(aside1H2_2.id).to.equal('h-two');
|
expect(aside1H2_1.textContent).to.equal("I'm an H2 via Markdown markup");
|
||||||
expect(aside1H2_2.textContent).to.equal('I\'m an H2 via HTML markup');
|
const aside1H2_2 = aside1Section.querySelector('h2:nth-of-type(2)');
|
||||||
const aside1SectionP2 = aside1Section.querySelector('p:nth-of-type(2)');
|
expect(aside1H2_2.id).to.equal('h-two');
|
||||||
expect(aside1SectionP2.textContent).to.equal('Markdown bold vs HTML bold');
|
expect(aside1H2_2.textContent).to.equal("I'm an H2 via HTML markup");
|
||||||
expect(aside1SectionP2.children.length).to.equal(2);
|
const aside1SectionP2 = aside1Section.querySelector('p:nth-of-type(2)');
|
||||||
const aside1SectionP2Strong1 = aside1SectionP2.querySelector('strong:nth-of-type(1)');
|
expect(aside1SectionP2.textContent).to.equal('Markdown bold vs HTML bold');
|
||||||
expect(aside1SectionP2Strong1.textContent).to.equal('Markdown bold');
|
expect(aside1SectionP2.children.length).to.equal(2);
|
||||||
const aside1SectionP2Strong2 = aside1SectionP2.querySelector('strong:nth-of-type(2)');
|
const aside1SectionP2Strong1 = aside1SectionP2.querySelector('strong:nth-of-type(1)');
|
||||||
expect(aside1SectionP2Strong2.textContent).to.equal('HTML bold');
|
expect(aside1SectionP2Strong1.textContent).to.equal('Markdown bold');
|
||||||
|
const aside1SectionP2Strong2 = aside1SectionP2.querySelector('strong:nth-of-type(2)');
|
||||||
|
expect(aside1SectionP2Strong2.textContent).to.equal('HTML bold');
|
||||||
|
|
||||||
const article = document.querySelector('article');
|
const article = document.querySelector('article');
|
||||||
expect(article.textContent).to.contain('RENDERED');
|
expect(article.textContent).to.contain('RENDERED');
|
||||||
expect(article.textContent).to.not.contain('NOT RENDERED');
|
expect(article.textContent).to.not.contain('NOT RENDERED');
|
||||||
|
|
||||||
const section1 = document.querySelector('article > #section1');
|
const section1 = document.querySelector('article > #section1');
|
||||||
const section1div1 = section1.querySelector('#div1');
|
const section1div1 = section1.querySelector('#div1');
|
||||||
const section1Aside1 = section1div1.querySelector('aside:nth-of-type(1)');
|
const section1Aside1 = section1div1.querySelector('aside:nth-of-type(1)');
|
||||||
const section1Aside1Title = section1Aside1.querySelector('p.title');
|
const section1Aside1Title = section1Aside1.querySelector('p.title');
|
||||||
expect(section1Aside1Title.textContent.trim()).to.equal('Nested un-indented Aside');
|
expect(section1Aside1Title.textContent.trim()).to.equal('Nested un-indented Aside');
|
||||||
const section1Aside1Section = section1Aside1.querySelector('section');
|
const section1Aside1Section = section1Aside1.querySelector('section');
|
||||||
const section1Aside1SectionP1 = section1Aside1Section.querySelector('p:nth-of-type(1)');
|
const section1Aside1SectionP1 = section1Aside1Section.querySelector('p:nth-of-type(1)');
|
||||||
expect(section1Aside1SectionP1.textContent).to.equal('regular Markdown markup');
|
expect(section1Aside1SectionP1.textContent).to.equal('regular Markdown markup');
|
||||||
const section1Aside1SectionP4 = section1Aside1Section.querySelector('p:nth-of-type(2)');
|
const section1Aside1SectionP4 = section1Aside1Section.querySelector('p:nth-of-type(2)');
|
||||||
expect(section1Aside1SectionP4.textContent).to.equal('nested inline mark content');
|
expect(section1Aside1SectionP4.textContent).to.equal('nested inline mark content');
|
||||||
expect(section1Aside1SectionP4.children.length).to.equal(1);
|
expect(section1Aside1SectionP4.children.length).to.equal(1);
|
||||||
assertInlineMark(section1Aside1SectionP4.children[0]);
|
assertInlineMark(section1Aside1SectionP4.children[0]);
|
||||||
|
|
||||||
const section1div2 = section1.querySelector('#div2');
|
const section1div2 = section1.querySelector('#div2');
|
||||||
const section1Aside2 = section1div2.querySelector('aside:nth-of-type(1)');
|
const section1Aside2 = section1div2.querySelector('aside:nth-of-type(1)');
|
||||||
const section1Aside2Title = section1Aside2.querySelector('p.title');
|
const section1Aside2Title = section1Aside2.querySelector('p.title');
|
||||||
expect(section1Aside2Title.textContent.trim()).to.equal('Nested indented Aside 💀');
|
expect(section1Aside2Title.textContent.trim()).to.equal('Nested indented Aside 💀');
|
||||||
const section1Aside2Section = section1Aside2.querySelector('section');
|
const section1Aside2Section = section1Aside2.querySelector('section');
|
||||||
const section1Aside2SectionP1 = section1Aside2Section.querySelector('p:nth-of-type(1)');
|
const section1Aside2SectionP1 = section1Aside2Section.querySelector('p:nth-of-type(1)');
|
||||||
expect(section1Aside2SectionP1.textContent).to.equal('regular Markdown markup');
|
expect(section1Aside2SectionP1.textContent).to.equal('regular Markdown markup');
|
||||||
const section1Aside1SectionP5 = section1Aside2Section.querySelector('p:nth-of-type(2)');
|
const section1Aside1SectionP5 = section1Aside2Section.querySelector('p:nth-of-type(2)');
|
||||||
expect(section1Aside1SectionP5.id).to.equal('p5');
|
expect(section1Aside1SectionP5.id).to.equal('p5');
|
||||||
expect(section1Aside1SectionP5.children.length).to.equal(1);
|
expect(section1Aside1SectionP5.children.length).to.equal(1);
|
||||||
const section1Aside1SectionP5Span1 = section1Aside1SectionP5.children[0];
|
const section1Aside1SectionP5Span1 = section1Aside1SectionP5.children[0];
|
||||||
expect(section1Aside1SectionP5Span1.textContent).to.equal('inline mark');
|
expect(section1Aside1SectionP5Span1.textContent).to.equal('inline mark');
|
||||||
expect(section1Aside1SectionP5Span1.children.length).to.equal(1);
|
expect(section1Aside1SectionP5Span1.children.length).to.equal(1);
|
||||||
const section1Aside1SectionP5Span1Span1 = section1Aside1SectionP5Span1.children[0];
|
const section1Aside1SectionP5Span1Span1 = section1Aside1SectionP5Span1.children[0];
|
||||||
expect(section1Aside1SectionP5Span1Span1.textContent).to.equal(' mark');
|
expect(section1Aside1SectionP5Span1Span1.textContent).to.equal(' mark');
|
||||||
};
|
}
|
||||||
|
|
||||||
/** @param {HTMLElement | null | undefined} el */
|
/** @param {HTMLElement | null | undefined} el */
|
||||||
|
|
||||||
function assertInlineMark(el) {
|
function assertInlineMark(el) {
|
||||||
expect(el).to.not.be.null;
|
expect(el).to.not.be.null;
|
||||||
expect(el).to.not.be.undefined;
|
expect(el).to.not.be.undefined;
|
||||||
expect(el.children.length).to.equal(0);
|
expect(el.children.length).to.equal(0);
|
||||||
expect(el.textContent).to.equal('inline mark');
|
expect(el.textContent).to.equal('inline mark');
|
||||||
expect(el.className).to.equal('mark');
|
expect(el.className).to.equal('mark');
|
||||||
expect(el.style.color).to.equal('hotpink');
|
expect(el.style.color).to.equal('hotpink');
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue