[ci] format
This commit is contained in:
parent
7461e82c81
commit
25e04a2ecb
13 changed files with 711 additions and 724 deletions
|
@ -411,15 +411,13 @@ Enables writing HTML markup alongside Markdoc tags and nodes.
|
|||
|
||||
By default, Markdoc will not recognize HTML markup as semantic content.
|
||||
|
||||
To achieve a more Markdown-like experience, where HTML elements can be included alongside your content, set `allowHTML:true` as a `markdoc` integration option. This will enable HTML parsing in Markdoc markup.
|
||||
|
||||
To achieve a more Markdown-like experience, where HTML elements can be included alongside your content, set `allowHTML:true` as a `markdoc` integration option. This will enable HTML parsing in Markdoc markup.
|
||||
|
||||
> **Warning**
|
||||
> When `allowHTML` is enabled, HTML markup inside Markdoc documents will be rendered as actual HTML elements (including `<script>`), making attack vectors like XSS possible.
|
||||
>
|
||||
> Ensure that any HTML markup comes from trusted sources.
|
||||
|
||||
|
||||
```js {7} "allowHTML: true"
|
||||
// astro.config.mjs
|
||||
import { defineConfig } from 'astro/config';
|
||||
|
|
|
@ -11,21 +11,20 @@ import { MarkdocError, isComponentConfig, isValidUrl, prependForwardSlash } from
|
|||
import { emitESMImage } from 'astro/assets/utils';
|
||||
import path from 'node:path';
|
||||
import type * as rollup from 'rollup';
|
||||
import { htmlTokenTransform } from './html/transform/html-token-transform.js';
|
||||
import type { MarkdocConfigResult } from './load-config.js';
|
||||
import type { MarkdocIntegrationOptions } from './options.js';
|
||||
import { setupConfig } from './runtime.js';
|
||||
import { getMarkdocTokenizer } from './tokenizer.js';
|
||||
import type { MarkdocIntegrationOptions } from './options.js';
|
||||
import { htmlTokenTransform } from './html/transform/html-token-transform.js';
|
||||
|
||||
export async function getContentEntryType({
|
||||
markdocConfigResult,
|
||||
astroConfig,
|
||||
options,
|
||||
options,
|
||||
}: {
|
||||
astroConfig: AstroConfig;
|
||||
markdocConfigResult?: MarkdocConfigResult;
|
||||
options?: MarkdocIntegrationOptions,
|
||||
|
||||
options?: MarkdocIntegrationOptions;
|
||||
}): Promise<ContentEntryType> {
|
||||
return {
|
||||
extensions: ['.mdoc'],
|
||||
|
@ -33,12 +32,12 @@ export async function getContentEntryType({
|
|||
handlePropagation: true,
|
||||
async getRenderModule({ contents, fileUrl, viteId }) {
|
||||
const entry = getEntryInfo({ contents, fileUrl });
|
||||
const tokenizer = getMarkdocTokenizer(options);
|
||||
const tokenizer = getMarkdocTokenizer(options);
|
||||
let tokens = tokenizer.tokenize(entry.body);
|
||||
|
||||
if (options?.allowHTML) {
|
||||
tokens = htmlTokenTransform(tokenizer, tokens);
|
||||
}
|
||||
if (options?.allowHTML) {
|
||||
tokens = htmlTokenTransform(tokenizer, tokens);
|
||||
}
|
||||
|
||||
const ast = Markdoc.parse(tokens);
|
||||
const usedTags = getUsedTags(ast);
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
import { styleToObject } from './style-to-object.js';
|
||||
|
||||
import { styleToObject } from "./style-to-object.js";
|
||||
export function parseInlineCSSToReactLikeObject(
|
||||
css: string | undefined | null
|
||||
): React.CSSProperties | undefined {
|
||||
if (typeof css === 'string') {
|
||||
const cssObject: Record<string, string> = {};
|
||||
styleToObject(css, (originalCssDirective: string, value: string) => {
|
||||
const reactCssDirective = convertCssDirectiveNameToReactCamelCase(originalCssDirective);
|
||||
cssObject[reactCssDirective] = value;
|
||||
});
|
||||
return cssObject;
|
||||
}
|
||||
|
||||
export function parseInlineCSSToReactLikeObject(css: string | undefined | null): React.CSSProperties | undefined {
|
||||
if (typeof css === "string") {
|
||||
const cssObject: Record<string, string> = {};
|
||||
styleToObject(css, (originalCssDirective: string, value: string) => {
|
||||
const reactCssDirective = convertCssDirectiveNameToReactCamelCase(originalCssDirective);
|
||||
cssObject[reactCssDirective] = value;
|
||||
});
|
||||
return cssObject;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function convertCssDirectiveNameToReactCamelCase(original: string): string {
|
||||
// capture group 1 is the character to capitalize, the hyphen is omitted by virtue of being outside the capture group
|
||||
const replaced = original.replace(/-([a-z0-9])/ig, (_match, char) => {
|
||||
return char.toUpperCase();
|
||||
});
|
||||
return replaced;
|
||||
// capture group 1 is the character to capitalize, the hyphen is omitted by virtue of being outside the capture group
|
||||
const replaced = original.replace(/-([a-z0-9])/gi, (_match, char) => {
|
||||
return char.toUpperCase();
|
||||
});
|
||||
return replaced;
|
||||
}
|
||||
|
|
|
@ -49,223 +49,219 @@ const TYPE_DECLARATION = 'declaration';
|
|||
* @throws {Error}
|
||||
*/
|
||||
export function parseInlineStyles(style, options) {
|
||||
if (typeof style !== 'string') {
|
||||
throw new TypeError('First argument must be a string');
|
||||
}
|
||||
if (typeof style !== 'string') {
|
||||
throw new TypeError('First argument must be a string');
|
||||
}
|
||||
|
||||
if (!style) return [];
|
||||
if (!style) return [];
|
||||
|
||||
options = options || {};
|
||||
options = options || {};
|
||||
|
||||
/**
|
||||
* Positional.
|
||||
*/
|
||||
let lineno = 1;
|
||||
let column = 1;
|
||||
/**
|
||||
* Positional.
|
||||
*/
|
||||
let lineno = 1;
|
||||
let column = 1;
|
||||
|
||||
/**
|
||||
* Update lineno and column based on `str`.
|
||||
*
|
||||
* @param {String} str
|
||||
*/
|
||||
function updatePosition(str) {
|
||||
let lines = str.match(NEWLINE_REGEX);
|
||||
if (lines) lineno += lines.length;
|
||||
let i = str.lastIndexOf(NEWLINE);
|
||||
column = ~i ? str.length - i : column + str.length;
|
||||
}
|
||||
/**
|
||||
* Update lineno and column based on `str`.
|
||||
*
|
||||
* @param {String} str
|
||||
*/
|
||||
function updatePosition(str) {
|
||||
let lines = str.match(NEWLINE_REGEX);
|
||||
if (lines) lineno += lines.length;
|
||||
let i = str.lastIndexOf(NEWLINE);
|
||||
column = ~i ? str.length - i : column + str.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark position and patch `node.position`.
|
||||
*
|
||||
* @return {Function}
|
||||
*/
|
||||
function position() {
|
||||
let start = { line: lineno, column: column };
|
||||
return function (node) {
|
||||
node.position = new Position(start);
|
||||
whitespace();
|
||||
return node;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Mark position and patch `node.position`.
|
||||
*
|
||||
* @return {Function}
|
||||
*/
|
||||
function position() {
|
||||
let start = { line: lineno, column: column };
|
||||
return function (node) {
|
||||
node.position = new Position(start);
|
||||
whitespace();
|
||||
return node;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Store position information for a node.
|
||||
*
|
||||
* @constructor
|
||||
* @property {Object} start
|
||||
* @property {Object} end
|
||||
* @property {undefined|String} source
|
||||
*/
|
||||
function Position(start) {
|
||||
this.start = start;
|
||||
this.end = { line: lineno, column: column };
|
||||
this.source = options.source;
|
||||
}
|
||||
/**
|
||||
* Store position information for a node.
|
||||
*
|
||||
* @constructor
|
||||
* @property {Object} start
|
||||
* @property {Object} end
|
||||
* @property {undefined|String} source
|
||||
*/
|
||||
function Position(start) {
|
||||
this.start = start;
|
||||
this.end = { line: lineno, column: column };
|
||||
this.source = options.source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Non-enumerable source string.
|
||||
*/
|
||||
Position.prototype.content = style;
|
||||
/**
|
||||
* Non-enumerable source string.
|
||||
*/
|
||||
Position.prototype.content = style;
|
||||
|
||||
const errorsList = [];
|
||||
const errorsList = [];
|
||||
|
||||
/**
|
||||
* Error `msg`.
|
||||
*
|
||||
* @param {String} msg
|
||||
* @throws {Error}
|
||||
*/
|
||||
function error(msg) {
|
||||
const err = new Error(
|
||||
options.source + ':' + lineno + ':' + column + ': ' + msg
|
||||
);
|
||||
err.reason = msg;
|
||||
err.filename = options.source;
|
||||
err.line = lineno;
|
||||
err.column = column;
|
||||
err.source = style;
|
||||
/**
|
||||
* Error `msg`.
|
||||
*
|
||||
* @param {String} msg
|
||||
* @throws {Error}
|
||||
*/
|
||||
function error(msg) {
|
||||
const err = new Error(options.source + ':' + lineno + ':' + column + ': ' + msg);
|
||||
err.reason = msg;
|
||||
err.filename = options.source;
|
||||
err.line = lineno;
|
||||
err.column = column;
|
||||
err.source = style;
|
||||
|
||||
if (options.silent) {
|
||||
errorsList.push(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (options.silent) {
|
||||
errorsList.push(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Match `re` and return captures.
|
||||
*
|
||||
* @param {RegExp} re
|
||||
* @return {undefined|Array}
|
||||
*/
|
||||
function match(re) {
|
||||
const m = re.exec(style);
|
||||
if (!m) return;
|
||||
const str = m[0];
|
||||
updatePosition(str);
|
||||
style = style.slice(str.length);
|
||||
return m;
|
||||
}
|
||||
/**
|
||||
* Match `re` and return captures.
|
||||
*
|
||||
* @param {RegExp} re
|
||||
* @return {undefined|Array}
|
||||
*/
|
||||
function match(re) {
|
||||
const m = re.exec(style);
|
||||
if (!m) return;
|
||||
const str = m[0];
|
||||
updatePosition(str);
|
||||
style = style.slice(str.length);
|
||||
return m;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse whitespace.
|
||||
*/
|
||||
function whitespace() {
|
||||
match(WHITESPACE_REGEX);
|
||||
}
|
||||
/**
|
||||
* Parse whitespace.
|
||||
*/
|
||||
function whitespace() {
|
||||
match(WHITESPACE_REGEX);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse comments.
|
||||
*
|
||||
* @param {Object[]} [rules]
|
||||
* @return {Object[]}
|
||||
*/
|
||||
function comments(rules) {
|
||||
let c;
|
||||
rules = rules || [];
|
||||
while ((c = comment())) {
|
||||
if (c !== false) {
|
||||
rules.push(c);
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Parse comments.
|
||||
*
|
||||
* @param {Object[]} [rules]
|
||||
* @return {Object[]}
|
||||
*/
|
||||
function comments(rules) {
|
||||
let c;
|
||||
rules = rules || [];
|
||||
while ((c = comment())) {
|
||||
if (c !== false) {
|
||||
rules.push(c);
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse comment.
|
||||
*
|
||||
* @return {Object}
|
||||
* @throws {Error}
|
||||
*/
|
||||
function comment() {
|
||||
const pos = position();
|
||||
if (FORWARD_SLASH != style.charAt(0) || ASTERISK != style.charAt(1)) return;
|
||||
/**
|
||||
* Parse comment.
|
||||
*
|
||||
* @return {Object}
|
||||
* @throws {Error}
|
||||
*/
|
||||
function comment() {
|
||||
const pos = position();
|
||||
if (FORWARD_SLASH != style.charAt(0) || ASTERISK != style.charAt(1)) return;
|
||||
|
||||
let i = 2;
|
||||
while (
|
||||
EMPTY_STRING != style.charAt(i) &&
|
||||
(ASTERISK != style.charAt(i) || FORWARD_SLASH != style.charAt(i + 1))
|
||||
) {
|
||||
++i;
|
||||
}
|
||||
i += 2;
|
||||
let i = 2;
|
||||
while (
|
||||
EMPTY_STRING != style.charAt(i) &&
|
||||
(ASTERISK != style.charAt(i) || FORWARD_SLASH != style.charAt(i + 1))
|
||||
) {
|
||||
++i;
|
||||
}
|
||||
i += 2;
|
||||
|
||||
if (EMPTY_STRING === style.charAt(i - 1)) {
|
||||
return error('End of comment missing');
|
||||
}
|
||||
if (EMPTY_STRING === style.charAt(i - 1)) {
|
||||
return error('End of comment missing');
|
||||
}
|
||||
|
||||
const str = style.slice(2, i - 2);
|
||||
column += 2;
|
||||
updatePosition(str);
|
||||
style = style.slice(i);
|
||||
column += 2;
|
||||
const str = style.slice(2, i - 2);
|
||||
column += 2;
|
||||
updatePosition(str);
|
||||
style = style.slice(i);
|
||||
column += 2;
|
||||
|
||||
return pos({
|
||||
type: TYPE_COMMENT,
|
||||
comment: str
|
||||
});
|
||||
}
|
||||
return pos({
|
||||
type: TYPE_COMMENT,
|
||||
comment: str,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse declaration.
|
||||
*
|
||||
* @return {Object}
|
||||
* @throws {Error}
|
||||
*/
|
||||
function declaration() {
|
||||
const pos = position();
|
||||
/**
|
||||
* Parse declaration.
|
||||
*
|
||||
* @return {Object}
|
||||
* @throws {Error}
|
||||
*/
|
||||
function declaration() {
|
||||
const pos = position();
|
||||
|
||||
// prop
|
||||
const prop = match(PROPERTY_REGEX);
|
||||
if (!prop) return;
|
||||
comment();
|
||||
// prop
|
||||
const prop = match(PROPERTY_REGEX);
|
||||
if (!prop) return;
|
||||
comment();
|
||||
|
||||
// :
|
||||
if (!match(COLON_REGEX)) return error("property missing ':'");
|
||||
// :
|
||||
if (!match(COLON_REGEX)) return error("property missing ':'");
|
||||
|
||||
// val
|
||||
const val = match(VALUE_REGEX);
|
||||
// val
|
||||
const val = match(VALUE_REGEX);
|
||||
|
||||
const ret = pos({
|
||||
type: TYPE_DECLARATION,
|
||||
property: trim(prop[0].replace(COMMENT_REGEX, EMPTY_STRING)),
|
||||
value: val
|
||||
? trim(val[0].replace(COMMENT_REGEX, EMPTY_STRING))
|
||||
: EMPTY_STRING
|
||||
});
|
||||
const ret = pos({
|
||||
type: TYPE_DECLARATION,
|
||||
property: trim(prop[0].replace(COMMENT_REGEX, EMPTY_STRING)),
|
||||
value: val ? trim(val[0].replace(COMMENT_REGEX, EMPTY_STRING)) : EMPTY_STRING,
|
||||
});
|
||||
|
||||
// ;
|
||||
match(SEMICOLON_REGEX);
|
||||
// ;
|
||||
match(SEMICOLON_REGEX);
|
||||
|
||||
return ret;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse declarations.
|
||||
*
|
||||
* @return {Object[]}
|
||||
*/
|
||||
function declarations() {
|
||||
const decls = [];
|
||||
/**
|
||||
* Parse declarations.
|
||||
*
|
||||
* @return {Object[]}
|
||||
*/
|
||||
function declarations() {
|
||||
const decls = [];
|
||||
|
||||
comments(decls);
|
||||
comments(decls);
|
||||
|
||||
// declarations
|
||||
let decl;
|
||||
while ((decl = declaration())) {
|
||||
if (decl !== false) {
|
||||
decls.push(decl);
|
||||
comments(decls);
|
||||
}
|
||||
}
|
||||
// declarations
|
||||
let decl;
|
||||
while ((decl = declaration())) {
|
||||
if (decl !== false) {
|
||||
decls.push(decl);
|
||||
comments(decls);
|
||||
}
|
||||
}
|
||||
|
||||
return decls;
|
||||
}
|
||||
return decls;
|
||||
}
|
||||
|
||||
whitespace();
|
||||
return declarations();
|
||||
};
|
||||
whitespace();
|
||||
return declarations();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trim `str`.
|
||||
|
@ -274,5 +270,5 @@ export function parseInlineStyles(style, options) {
|
|||
* @return {String}
|
||||
*/
|
||||
function trim(str) {
|
||||
return str ? str.replace(TRIM_REGEX, EMPTY_STRING) : EMPTY_STRING;
|
||||
return str ? str.replace(TRIM_REGEX, EMPTY_STRING) : EMPTY_STRING;
|
||||
}
|
|
@ -28,7 +28,7 @@
|
|||
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import { parseInlineStyles } from "./parse-inline-styles.js";
|
||||
import { parseInlineStyles } from './parse-inline-styles.js';
|
||||
|
||||
/**
|
||||
* Parses inline style to object.
|
||||
|
@ -42,29 +42,29 @@ import { parseInlineStyles } from "./parse-inline-styles.js";
|
|||
* @return {null|Object}
|
||||
*/
|
||||
export function styleToObject(style, iterator) {
|
||||
let output = null;
|
||||
if (!style || typeof style !== 'string') {
|
||||
return output;
|
||||
}
|
||||
let output = null;
|
||||
if (!style || typeof style !== 'string') {
|
||||
return output;
|
||||
}
|
||||
|
||||
let declaration;
|
||||
let declarations = parseInlineStyles(style);
|
||||
let hasIterator = typeof iterator === 'function';
|
||||
let property;
|
||||
let value;
|
||||
let declaration;
|
||||
let declarations = parseInlineStyles(style);
|
||||
let hasIterator = typeof iterator === 'function';
|
||||
let property;
|
||||
let value;
|
||||
|
||||
for (let i = 0, len = declarations.length; i < len; i++) {
|
||||
declaration = declarations[i];
|
||||
property = declaration.property;
|
||||
value = declaration.value;
|
||||
for (let i = 0, len = declarations.length; i < len; i++) {
|
||||
declaration = declarations[i];
|
||||
property = declaration.property;
|
||||
value = declaration.value;
|
||||
|
||||
if (hasIterator) {
|
||||
iterator(property, value, declaration);
|
||||
} else if (value) {
|
||||
output || (output = {});
|
||||
output[property] = value;
|
||||
}
|
||||
}
|
||||
if (hasIterator) {
|
||||
iterator(property, value, declaration);
|
||||
} else if (value) {
|
||||
output || (output = {});
|
||||
output[property] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
return output;
|
||||
}
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
export { htmlTokenTransform } from "./transform/html-token-transform";
|
||||
export { htmlTag } from "./tagdefs/html.tag";
|
||||
export { htmlTag } from './tagdefs/html.tag';
|
||||
export { htmlTokenTransform } from './transform/html-token-transform';
|
||||
|
|
|
@ -1,32 +1,30 @@
|
|||
import type { Config, Schema } from "@markdoc/markdoc";
|
||||
import Markdoc from "@markdoc/markdoc";
|
||||
import type { Config, Schema } from '@markdoc/markdoc';
|
||||
import Markdoc from '@markdoc/markdoc';
|
||||
|
||||
// local
|
||||
import { parseInlineCSSToReactLikeObject } from "../css/parse-inline-css-to-react.js";
|
||||
import { parseInlineCSSToReactLikeObject } from '../css/parse-inline-css-to-react.js';
|
||||
|
||||
// a Markdoc tag that will render a given HTML element and its attributes, as produced by the htmlTokenTransform function
|
||||
export const htmlTag: Schema<Config, never> = {
|
||||
attributes: {
|
||||
name: { type: String, required: true },
|
||||
attrs: { type: Object },
|
||||
},
|
||||
|
||||
attributes: {
|
||||
name: { type: String, required: true },
|
||||
attrs: { type: Object },
|
||||
},
|
||||
transform(node, config) {
|
||||
const { name, attrs: unsafeAttributes } = node.attributes;
|
||||
const children = node.transformChildren(config);
|
||||
|
||||
transform(node, config) {
|
||||
// pull out any "unsafe" attributes which need additional processing
|
||||
const { style, ...safeAttributes } = unsafeAttributes as Record<string, unknown>;
|
||||
|
||||
const { name, attrs: unsafeAttributes } = node.attributes;
|
||||
const children = node.transformChildren(config);
|
||||
// if the inline "style" attribute is present we need to parse the HTML into a react-like React.CSSProperties object
|
||||
if (typeof style === 'string') {
|
||||
const styleObject = parseInlineCSSToReactLikeObject(style);
|
||||
safeAttributes.style = styleObject;
|
||||
}
|
||||
|
||||
// pull out any "unsafe" attributes which need additional processing
|
||||
const { style, ...safeAttributes } = unsafeAttributes as Record<string, unknown>;
|
||||
|
||||
// if the inline "style" attribute is present we need to parse the HTML into a react-like React.CSSProperties object
|
||||
if (typeof style === "string") {
|
||||
const styleObject = parseInlineCSSToReactLikeObject(style);
|
||||
safeAttributes.style = styleObject;
|
||||
}
|
||||
|
||||
// create a Markdoc Tag for the given HTML node with the HTML attributes and children
|
||||
return new Markdoc.Tag(name, safeAttributes, children);
|
||||
},
|
||||
// create a Markdoc Tag for the given HTML node with the HTML attributes and children
|
||||
return new Markdoc.Tag(name, safeAttributes, children);
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,256 +1,247 @@
|
|||
import type * as Token from 'markdown-it/lib/token';
|
||||
import { Parser } from 'htmlparser2';
|
||||
import { Tokenizer } from '@markdoc/markdoc';
|
||||
|
||||
import { Parser } from 'htmlparser2';
|
||||
import type * as Token from 'markdown-it/lib/token';
|
||||
|
||||
export function htmlTokenTransform(tokenizer: Tokenizer, tokens: Token[]): Token[] {
|
||||
const output: Token[] = [];
|
||||
|
||||
const output: Token[] = [];
|
||||
// hold a lazy buffer of text and process it only when necessary
|
||||
let textBuffer = '';
|
||||
|
||||
// hold a lazy buffer of text and process it only when necessary
|
||||
let textBuffer = '';
|
||||
let inCDATA = false;
|
||||
|
||||
let inCDATA = false;
|
||||
const appendText = (text: string) => {
|
||||
textBuffer += text;
|
||||
};
|
||||
|
||||
const appendText = (text: string) => {
|
||||
textBuffer += text;
|
||||
};
|
||||
// process the current text buffer w/ Markdoc's Tokenizer for tokens
|
||||
const processTextBuffer = () => {
|
||||
if (textBuffer.length > 0) {
|
||||
// tokenize the text buffer to look for structural markup tokens
|
||||
const toks = tokenizer.tokenize(textBuffer);
|
||||
|
||||
// process the current text buffer w/ Markdoc's Tokenizer for tokens
|
||||
const processTextBuffer = () => {
|
||||
// when we tokenize some raw text content, it's basically treated like Markdown, and will result in a paragraph wrapper, which we don't want
|
||||
// in this scenario, we just want to generate a text token, but, we have to tokenize it in case there's other structural markup
|
||||
if (toks.length === 3) {
|
||||
const first = toks[0];
|
||||
const second = toks[1];
|
||||
const third: Token | undefined = toks.at(2);
|
||||
|
||||
if (textBuffer.length > 0) {
|
||||
if (
|
||||
first.type === 'paragraph_open' &&
|
||||
second.type === 'inline' &&
|
||||
third &&
|
||||
third.type === 'paragraph_close' &&
|
||||
Array.isArray(second.children)
|
||||
) {
|
||||
for (const tok of second.children as Token[]) {
|
||||
// if the given token is a 'text' token and its trimmed content is the same as the pre-tokenized text buffer, use the original
|
||||
// text buffer instead to preserve leading/trailing whitespace that is lost during tokenization of pure text content
|
||||
if (tok.type === 'text') {
|
||||
if (tok.content.trim() == textBuffer.trim()) {
|
||||
tok.content = textBuffer;
|
||||
}
|
||||
}
|
||||
output.push(tok);
|
||||
}
|
||||
} else {
|
||||
// some other markup that happened to be 3 tokens, push tokens as-is
|
||||
for (const tok of toks) {
|
||||
output.push(tok);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// some other tokenized markup, push tokens as-is
|
||||
for (const tok of toks) {
|
||||
output.push(tok);
|
||||
}
|
||||
}
|
||||
|
||||
// tokenize the text buffer to look for structural markup tokens
|
||||
const toks = tokenizer.tokenize(textBuffer);
|
||||
// reset the current lazy text buffer
|
||||
textBuffer = '';
|
||||
}
|
||||
};
|
||||
|
||||
// when we tokenize some raw text content, it's basically treated like Markdown, and will result in a paragraph wrapper, which we don't want
|
||||
// in this scenario, we just want to generate a text token, but, we have to tokenize it in case there's other structural markup
|
||||
if (toks.length === 3) {
|
||||
// create an incremental HTML parser that tracks HTML tag open, close and text content
|
||||
const parser = new Parser(
|
||||
{
|
||||
oncdatastart() {
|
||||
inCDATA = true;
|
||||
},
|
||||
|
||||
const first = toks[0];
|
||||
const second = toks[1];
|
||||
const third: Token | undefined = toks.at(2);
|
||||
oncdataend() {
|
||||
inCDATA = false;
|
||||
},
|
||||
|
||||
if (first.type === 'paragraph_open' && second.type === 'inline' && (third && third.type === 'paragraph_close') && Array.isArray(second.children)) {
|
||||
for (const tok of second.children as Token[]) {
|
||||
// if the given token is a 'text' token and its trimmed content is the same as the pre-tokenized text buffer, use the original
|
||||
// text buffer instead to preserve leading/trailing whitespace that is lost during tokenization of pure text content
|
||||
if (tok.type === 'text') {
|
||||
if (tok.content.trim() == textBuffer.trim()) {
|
||||
tok.content = textBuffer;
|
||||
}
|
||||
}
|
||||
output.push(tok);
|
||||
}
|
||||
} else {
|
||||
// some other markup that happened to be 3 tokens, push tokens as-is
|
||||
for (const tok of toks) {
|
||||
output.push(tok);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// some other tokenized markup, push tokens as-is
|
||||
for (const tok of toks) {
|
||||
output.push(tok);
|
||||
}
|
||||
}
|
||||
// when an HTML tag opens...
|
||||
onopentag(name, attrs) {
|
||||
// process any buffered text to be treated as text node before the currently opening HTML tag
|
||||
processTextBuffer();
|
||||
|
||||
// reset the current lazy text buffer
|
||||
textBuffer = '';
|
||||
}
|
||||
};
|
||||
// push an 'html-tag' 'tag_open' Markdoc node instance for the currently opening HTML tag onto the resulting Token stack
|
||||
output.push({
|
||||
type: 'tag_open',
|
||||
nesting: 1,
|
||||
meta: {
|
||||
tag: 'html-tag',
|
||||
attributes: [
|
||||
{ type: 'attribute', name: 'name', value: name },
|
||||
{ type: 'attribute', name: 'attrs', value: attrs },
|
||||
],
|
||||
},
|
||||
} as Token);
|
||||
},
|
||||
|
||||
// create an incremental HTML parser that tracks HTML tag open, close and text content
|
||||
const parser = new Parser({
|
||||
ontext(content: string | null | undefined) {
|
||||
if (inCDATA) {
|
||||
// ignore entirely while inside CDATA
|
||||
return;
|
||||
}
|
||||
|
||||
oncdatastart() {
|
||||
inCDATA = true;
|
||||
},
|
||||
// only accumulate text into the buffer if we're not under an ignored HTML element
|
||||
if (typeof content === 'string') {
|
||||
appendText(content);
|
||||
}
|
||||
},
|
||||
|
||||
oncdataend() {
|
||||
inCDATA = false;
|
||||
},
|
||||
// when an HTML tag closes...
|
||||
onclosetag(name) {
|
||||
// process any buffered text to be treated as a text node inside the currently closing HTML tag
|
||||
processTextBuffer();
|
||||
|
||||
// when an HTML tag opens...
|
||||
onopentag(name, attrs) {
|
||||
// push an 'html-tag' 'tag_close' Markdoc node instance for the currently closing HTML tag onto the resulting Token stack
|
||||
output.push({
|
||||
type: 'tag_close',
|
||||
nesting: -1,
|
||||
meta: {
|
||||
tag: 'html-tag',
|
||||
attributes: [{ type: 'attribute', name: 'name', value: name }],
|
||||
},
|
||||
} as Token);
|
||||
},
|
||||
},
|
||||
{
|
||||
decodeEntities: false,
|
||||
recognizeCDATA: true,
|
||||
recognizeSelfClosing: true,
|
||||
}
|
||||
);
|
||||
|
||||
// process any buffered text to be treated as text node before the currently opening HTML tag
|
||||
processTextBuffer();
|
||||
// for every detected token...
|
||||
for (const token of tokens) {
|
||||
// if it was an HTML token, write the HTML text into the HTML parser
|
||||
if (token.type.startsWith('html')) {
|
||||
// as the parser encounters opening/closing HTML tags, it will push Markdoc Tag nodes into the output stack
|
||||
parser.write(token.content);
|
||||
|
||||
// push an 'html-tag' 'tag_open' Markdoc node instance for the currently opening HTML tag onto the resulting Token stack
|
||||
output.push({
|
||||
type: 'tag_open',
|
||||
nesting: 1,
|
||||
meta: {
|
||||
tag: 'html-tag',
|
||||
attributes: [
|
||||
{ type: 'attribute', name: 'name', value: name },
|
||||
{ type: 'attribute', name: 'attrs', value: attrs },
|
||||
],
|
||||
},
|
||||
} as Token);
|
||||
// continue loop... IMPORTANT! we're throwing away the original 'html' tokens here (raw HTML strings), since the parser is inserting new ones based on the parsed HTML
|
||||
continue;
|
||||
}
|
||||
|
||||
},
|
||||
// process any child content for HTML
|
||||
if (token.type === 'inline') {
|
||||
if (token.children) {
|
||||
token.children = htmlTokenTransform(tokenizer, token.children);
|
||||
}
|
||||
}
|
||||
|
||||
ontext(content: string | null | undefined) {
|
||||
// not an HTML Token, preserve it at the current stack location
|
||||
output.push(token);
|
||||
}
|
||||
|
||||
if (inCDATA) {
|
||||
// ignore entirely while inside CDATA
|
||||
return;
|
||||
}
|
||||
// process any remaining buffered text
|
||||
processTextBuffer();
|
||||
|
||||
// only accumulate text into the buffer if we're not under an ignored HTML element
|
||||
if (typeof content === 'string') {
|
||||
appendText(content);
|
||||
}
|
||||
},
|
||||
//
|
||||
// post-process the current levels output Token[] array to un-wind this pattern:
|
||||
//
|
||||
// [
|
||||
// { type: tag_open, meta.tag: html-tag },
|
||||
// { type: paragraph_open },
|
||||
// { type: inline, children [...] },
|
||||
// { type: paragraph_close },
|
||||
// { type: tag_close, meta.tag: html-tag }
|
||||
// ]
|
||||
//
|
||||
// the paragraph_open, inline, paragraph_close triplet needs to be replaced by the children of the inline node
|
||||
//
|
||||
// this is extra, unwanted paragraph wrapping unfortunately introduced by markdown-it during processing w/ HTML enabled
|
||||
//
|
||||
|
||||
// when an HTML tag closes...
|
||||
onclosetag(name) {
|
||||
mutateAndCollapseExtraParagraphsUnderHtml(output);
|
||||
|
||||
// process any buffered text to be treated as a text node inside the currently closing HTML tag
|
||||
processTextBuffer();
|
||||
|
||||
// push an 'html-tag' 'tag_close' Markdoc node instance for the currently closing HTML tag onto the resulting Token stack
|
||||
output.push({
|
||||
type: 'tag_close',
|
||||
nesting: -1,
|
||||
meta: {
|
||||
tag: 'html-tag',
|
||||
attributes: [
|
||||
{ type: 'attribute', name: 'name', value: name },
|
||||
],
|
||||
},
|
||||
} as Token);
|
||||
|
||||
},
|
||||
|
||||
}, {
|
||||
decodeEntities: false,
|
||||
recognizeCDATA: true,
|
||||
recognizeSelfClosing: true,
|
||||
});
|
||||
|
||||
// for every detected token...
|
||||
for (const token of tokens) {
|
||||
|
||||
// if it was an HTML token, write the HTML text into the HTML parser
|
||||
if (token.type.startsWith('html')) {
|
||||
|
||||
// as the parser encounters opening/closing HTML tags, it will push Markdoc Tag nodes into the output stack
|
||||
parser.write(token.content);
|
||||
|
||||
// continue loop... IMPORTANT! we're throwing away the original 'html' tokens here (raw HTML strings), since the parser is inserting new ones based on the parsed HTML
|
||||
continue;
|
||||
}
|
||||
|
||||
// process any child content for HTML
|
||||
if (token.type === 'inline') {
|
||||
if (token.children) {
|
||||
token.children = htmlTokenTransform(tokenizer, token.children);
|
||||
}
|
||||
}
|
||||
|
||||
// not an HTML Token, preserve it at the current stack location
|
||||
output.push(token);
|
||||
}
|
||||
|
||||
// process any remaining buffered text
|
||||
processTextBuffer();
|
||||
|
||||
//
|
||||
// post-process the current levels output Token[] array to un-wind this pattern:
|
||||
//
|
||||
// [
|
||||
// { type: tag_open, meta.tag: html-tag },
|
||||
// { type: paragraph_open },
|
||||
// { type: inline, children [...] },
|
||||
// { type: paragraph_close },
|
||||
// { type: tag_close, meta.tag: html-tag }
|
||||
// ]
|
||||
//
|
||||
// the paragraph_open, inline, paragraph_close triplet needs to be replaced by the children of the inline node
|
||||
//
|
||||
// this is extra, unwanted paragraph wrapping unfortunately introduced by markdown-it during processing w/ HTML enabled
|
||||
//
|
||||
|
||||
mutateAndCollapseExtraParagraphsUnderHtml(output);
|
||||
|
||||
return output;
|
||||
return output;
|
||||
}
|
||||
|
||||
function mutateAndCollapseExtraParagraphsUnderHtml(tokens: Token[]): void {
|
||||
let done = false;
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
const idx = findExtraParagraphUnderHtml(tokens);
|
||||
if (typeof idx === 'number') {
|
||||
// mutate
|
||||
while (!done) {
|
||||
const idx = findExtraParagraphUnderHtml(tokens);
|
||||
if (typeof idx === 'number') {
|
||||
// mutate
|
||||
|
||||
const actualChildTokens = tokens[idx + 2].children ?? [];
|
||||
const actualChildTokens = tokens[idx + 2].children ?? [];
|
||||
|
||||
tokens.splice(idx, 5, ...actualChildTokens);
|
||||
} else {
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
tokens.splice(idx, 5, ...actualChildTokens);
|
||||
} else {
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param token
|
||||
* @returns
|
||||
*/
|
||||
function findExtraParagraphUnderHtml(tokens: Token[]): number | null {
|
||||
if (tokens.length < 5) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (tokens.length < 5) {
|
||||
return null;
|
||||
}
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
const last = i + 4;
|
||||
if (last > tokens.length - 1) {
|
||||
break; // early exit, no more possible 5-long slices to search
|
||||
}
|
||||
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
const last = i + 4;
|
||||
if (last > tokens.length - 1) {
|
||||
break; // early exit, no more possible 5-long slices to search
|
||||
}
|
||||
const slice = tokens.slice(i, last + 1);
|
||||
const isMatch = isExtraParagraphPatternMatch(slice);
|
||||
if (isMatch) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
const slice = tokens.slice(i, last + 1);
|
||||
const isMatch = isExtraParagraphPatternMatch(slice);
|
||||
if (isMatch) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
|
||||
function isExtraParagraphPatternMatch(slice: Token[]): boolean {
|
||||
const match = isHtmlTagOpen(slice[0])
|
||||
&& isParagraphOpen(slice[1])
|
||||
&& isInline(slice[2])
|
||||
&& isParagraphClose(slice[3])
|
||||
&& isHtmlTagClose(slice[4]);
|
||||
return match;
|
||||
const match =
|
||||
isHtmlTagOpen(slice[0]) &&
|
||||
isParagraphOpen(slice[1]) &&
|
||||
isInline(slice[2]) &&
|
||||
isParagraphClose(slice[3]) &&
|
||||
isHtmlTagClose(slice[4]);
|
||||
return match;
|
||||
}
|
||||
|
||||
|
||||
function isHtmlTagOpen(token: Token): boolean {
|
||||
return token.type === 'tag_open' && token.meta && token.meta.tag === 'html-tag';
|
||||
return token.type === 'tag_open' && token.meta && token.meta.tag === 'html-tag';
|
||||
}
|
||||
|
||||
function isHtmlTagClose(token: Token): boolean {
|
||||
return token.type === 'tag_close' && token.meta && token.meta.tag === 'html-tag';
|
||||
return token.type === 'tag_close' && token.meta && token.meta.tag === 'html-tag';
|
||||
}
|
||||
|
||||
function isParagraphOpen(token: Token): boolean {
|
||||
return token.type === 'paragraph_open';
|
||||
return token.type === 'paragraph_open';
|
||||
}
|
||||
|
||||
function isParagraphClose(token: Token): boolean {
|
||||
return token.type === 'paragraph_close';
|
||||
return token.type === 'paragraph_close';
|
||||
}
|
||||
|
||||
function isInline(token: Token): boolean {
|
||||
return token.type === 'inline';
|
||||
return token.type === 'inline';
|
||||
}
|
||||
|
|
|
@ -25,7 +25,9 @@ export default function markdocIntegration(options?: MarkdocIntegrationOptions):
|
|||
|
||||
markdocConfigResult = await loadMarkdocConfig(astroConfig);
|
||||
|
||||
addContentEntryType(await getContentEntryType({ markdocConfigResult, astroConfig, options }));
|
||||
addContentEntryType(
|
||||
await getContentEntryType({ markdocConfigResult, astroConfig, options })
|
||||
);
|
||||
|
||||
updateConfig({
|
||||
vite: {
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
export interface MarkdocIntegrationOptions {
|
||||
allowHTML?: boolean;
|
||||
allowHTML?: boolean;
|
||||
}
|
||||
|
|
|
@ -10,15 +10,18 @@ import type { AstroInstance } from 'astro';
|
|||
import { createComponent, renderComponent } from 'astro/runtime/server/index.js';
|
||||
import type { AstroMarkdocConfig } from './config.js';
|
||||
import { setupHeadingConfig } from './heading-ids.js';
|
||||
import type { MarkdocIntegrationOptions } from './options.js';
|
||||
import { htmlTag } from './html/tagdefs/html.tag.js';
|
||||
import type { MarkdocIntegrationOptions } from './options.js';
|
||||
|
||||
/**
|
||||
* Merge user config with default config and set up context (ex. heading ID slugger)
|
||||
* Called on each file's individual transform.
|
||||
* TODO: virtual module to merge configs per-build instead of per-file?
|
||||
*/
|
||||
export async function setupConfig(userConfig: AstroMarkdocConfig = {}, options: MarkdocIntegrationOptions | undefined): Promise<MergedConfig> {
|
||||
export async function setupConfig(
|
||||
userConfig: AstroMarkdocConfig = {},
|
||||
options: MarkdocIntegrationOptions | undefined
|
||||
): Promise<MergedConfig> {
|
||||
let defaultConfig: AstroMarkdocConfig = setupHeadingConfig();
|
||||
|
||||
if (userConfig.extends) {
|
||||
|
@ -33,24 +36,27 @@ export async function setupConfig(userConfig: AstroMarkdocConfig = {}, options:
|
|||
|
||||
let merged = mergeConfig(defaultConfig, userConfig);
|
||||
|
||||
if (options?.allowHTML) {
|
||||
merged = mergeConfig(merged, HTML_CONFIG);
|
||||
}
|
||||
if (options?.allowHTML) {
|
||||
merged = mergeConfig(merged, HTML_CONFIG);
|
||||
}
|
||||
|
||||
return merged;
|
||||
return merged;
|
||||
}
|
||||
|
||||
/** Used for synchronous `getHeadings()` function */
|
||||
export function setupConfigSync(userConfig: AstroMarkdocConfig = {}, options: MarkdocIntegrationOptions | undefined): MergedConfig {
|
||||
export function setupConfigSync(
|
||||
userConfig: AstroMarkdocConfig = {},
|
||||
options: MarkdocIntegrationOptions | undefined
|
||||
): MergedConfig {
|
||||
const defaultConfig: AstroMarkdocConfig = setupHeadingConfig();
|
||||
|
||||
let merged = mergeConfig(defaultConfig, userConfig);
|
||||
|
||||
if (options?.allowHTML) {
|
||||
merged = mergeConfig(merged, HTML_CONFIG);
|
||||
}
|
||||
if (options?.allowHTML) {
|
||||
merged = mergeConfig(merged, HTML_CONFIG);
|
||||
}
|
||||
|
||||
return merged;
|
||||
return merged;
|
||||
}
|
||||
|
||||
type MergedConfig = Required<Omit<AstroMarkdocConfig, 'extends'>>;
|
||||
|
@ -160,7 +166,11 @@ export function collectHeadings(
|
|||
}
|
||||
}
|
||||
|
||||
export function createGetHeadings(stringifiedAst: string, userConfig: AstroMarkdocConfig, options: MarkdocIntegrationOptions | undefined) {
|
||||
export function createGetHeadings(
|
||||
stringifiedAst: string,
|
||||
userConfig: AstroMarkdocConfig,
|
||||
options: MarkdocIntegrationOptions | undefined
|
||||
) {
|
||||
return function getHeadings() {
|
||||
/* Yes, we are transforming twice (once from `getHeadings()` and again from <Content /> in case of variables).
|
||||
TODO: propose new `render()` API to allow Markdoc variable passing to `render()` itself,
|
||||
|
@ -178,7 +188,7 @@ export function createContentComponent(
|
|||
Renderer: AstroInstance['default'],
|
||||
stringifiedAst: string,
|
||||
userConfig: AstroMarkdocConfig,
|
||||
options: MarkdocIntegrationOptions | undefined,
|
||||
options: MarkdocIntegrationOptions | undefined,
|
||||
tagComponentMap: Record<string, AstroInstance['default']>,
|
||||
nodeComponentMap: Record<NodeType, AstroInstance['default']>
|
||||
) {
|
||||
|
@ -199,7 +209,7 @@ export function createContentComponent(
|
|||
|
||||
// statically define a partial MarkdocConfig which registers the required "html-tag" Markdoc tag when the "allowHTML" feature is enabled
|
||||
const HTML_CONFIG: AstroMarkdocConfig = {
|
||||
tags: {
|
||||
"html-tag": htmlTag,
|
||||
},
|
||||
tags: {
|
||||
'html-tag': htmlTag,
|
||||
},
|
||||
};
|
||||
|
|
|
@ -5,34 +5,32 @@ import type { MarkdocIntegrationOptions } from './options.js';
|
|||
type TokenizerOptions = ConstructorParameters<typeof Tokenizer>[0];
|
||||
|
||||
export function getMarkdocTokenizer(options: MarkdocIntegrationOptions | undefined): Tokenizer {
|
||||
const key = cacheKey(options);
|
||||
|
||||
const key = cacheKey(options);
|
||||
if (!_cachedMarkdocTokenizers[key]) {
|
||||
const tokenizerOptions: TokenizerOptions = {
|
||||
// Strip <!-- comments --> from rendered output
|
||||
// Without this, they're rendered as strings!
|
||||
allowComments: true,
|
||||
};
|
||||
|
||||
if (!_cachedMarkdocTokenizers[key]) {
|
||||
if (options?.allowHTML) {
|
||||
// we want to allow indentation for Markdoc tags that are interleaved inside HTML block elements
|
||||
tokenizerOptions.allowIndentation = true;
|
||||
// enable HTML token detection in markdown-it
|
||||
tokenizerOptions.html = true;
|
||||
}
|
||||
|
||||
const tokenizerOptions: TokenizerOptions = {
|
||||
// Strip <!-- comments --> from rendered output
|
||||
// Without this, they're rendered as strings!
|
||||
allowComments: true,
|
||||
}
|
||||
_cachedMarkdocTokenizers[key] = new Markdoc.Tokenizer(tokenizerOptions);
|
||||
}
|
||||
|
||||
if (options?.allowHTML) {
|
||||
// we want to allow indentation for Markdoc tags that are interleaved inside HTML block elements
|
||||
tokenizerOptions.allowIndentation = true;
|
||||
// enable HTML token detection in markdown-it
|
||||
tokenizerOptions.html = true;
|
||||
}
|
||||
|
||||
_cachedMarkdocTokenizers[key] = new Markdoc.Tokenizer(tokenizerOptions);
|
||||
}
|
||||
|
||||
return _cachedMarkdocTokenizers[key];
|
||||
};
|
||||
return _cachedMarkdocTokenizers[key];
|
||||
}
|
||||
|
||||
// create this on-demand when needed since it relies on the runtime MarkdocIntegrationOptions and may change during
|
||||
// the life of module in certain scenarios (unit tests, etc.)
|
||||
let _cachedMarkdocTokenizers: Record<string, Tokenizer> = {};
|
||||
|
||||
function cacheKey(options: MarkdocIntegrationOptions | undefined): string {
|
||||
return JSON.stringify(options);
|
||||
return JSON.stringify(options);
|
||||
}
|
||||
|
|
|
@ -3,194 +3,186 @@ import { expect } from 'chai';
|
|||
import { loadFixture } from '../../../astro/test/test-utils.js';
|
||||
|
||||
async function getFixture(name) {
|
||||
return await loadFixture({
|
||||
root: new URL(`./fixtures/${name}/`, import.meta.url),
|
||||
});
|
||||
return await loadFixture({
|
||||
root: new URL(`./fixtures/${name}/`, import.meta.url),
|
||||
});
|
||||
}
|
||||
|
||||
describe('Markdoc - render html', () => {
|
||||
let fixture;
|
||||
|
||||
let fixture;
|
||||
before(async () => {
|
||||
fixture = await getFixture('render-html');
|
||||
});
|
||||
|
||||
before(async () => {
|
||||
fixture = await getFixture('render-html');
|
||||
});
|
||||
describe('dev', () => {
|
||||
let devServer;
|
||||
|
||||
describe('dev', () => {
|
||||
before(async () => {
|
||||
devServer = await fixture.startDevServer();
|
||||
});
|
||||
|
||||
let devServer;
|
||||
after(async () => {
|
||||
await devServer.stop();
|
||||
});
|
||||
|
||||
before(async () => {
|
||||
devServer = await fixture.startDevServer();
|
||||
});
|
||||
it('renders content - simple', async () => {
|
||||
const res = await fixture.fetch('/simple');
|
||||
const html = await res.text();
|
||||
|
||||
after(async () => {
|
||||
await devServer.stop();
|
||||
});
|
||||
renderSimpleChecks(html);
|
||||
});
|
||||
|
||||
it('renders content - simple', async () => {
|
||||
const res = await fixture.fetch('/simple');
|
||||
const html = await res.text();
|
||||
it('renders content - nested-html', async () => {
|
||||
const res = await fixture.fetch('/nested-html');
|
||||
const html = await res.text();
|
||||
|
||||
renderSimpleChecks(html);
|
||||
});
|
||||
renderNestedHTMLChecks(html);
|
||||
});
|
||||
|
||||
it('renders content - nested-html', async () => {
|
||||
const res = await fixture.fetch('/nested-html');
|
||||
const html = await res.text();
|
||||
it('renders content - components interleaved with html', async () => {
|
||||
const res = await fixture.fetch('/components');
|
||||
const html = await res.text();
|
||||
|
||||
renderNestedHTMLChecks(html);
|
||||
});
|
||||
renderComponentsHTMLChecks(html);
|
||||
});
|
||||
|
||||
it('renders content - components interleaved with html', async () => {
|
||||
const res = await fixture.fetch('/components');
|
||||
const html = await res.text();
|
||||
it('renders content - randomly cased html attributes', async () => {
|
||||
const res = await fixture.fetch('/randomly-cased-html-attributes');
|
||||
const html = await res.text();
|
||||
|
||||
renderComponentsHTMLChecks(html);
|
||||
});
|
||||
renderRandomlyCasedHTMLAttributesChecks(html);
|
||||
});
|
||||
});
|
||||
|
||||
it('renders content - randomly cased html attributes', async () => {
|
||||
const res = await fixture.fetch('/randomly-cased-html-attributes');
|
||||
const html = await res.text();
|
||||
describe('build', () => {
|
||||
before(async () => {
|
||||
await fixture.build();
|
||||
});
|
||||
|
||||
renderRandomlyCasedHTMLAttributesChecks(html);
|
||||
});
|
||||
it('renders content - simple', async () => {
|
||||
const html = await fixture.readFile('/simple/index.html');
|
||||
|
||||
});
|
||||
renderSimpleChecks(html);
|
||||
});
|
||||
|
||||
describe('build', () => {
|
||||
it('renders content - nested-html', async () => {
|
||||
const html = await fixture.readFile('/nested-html/index.html');
|
||||
|
||||
before(async () => {
|
||||
await fixture.build();
|
||||
});
|
||||
renderNestedHTMLChecks(html);
|
||||
});
|
||||
|
||||
it('renders content - components interleaved with html', async () => {
|
||||
const html = await fixture.readFile('/components/index.html');
|
||||
|
||||
it('renders content - simple', async () => {
|
||||
const html = await fixture.readFile('/simple/index.html');
|
||||
renderComponentsHTMLChecks(html);
|
||||
});
|
||||
|
||||
renderSimpleChecks(html);
|
||||
});
|
||||
it('renders content - randomly cased html attributes', async () => {
|
||||
const html = await fixture.readFile('/randomly-cased-html-attributes/index.html');
|
||||
|
||||
it('renders content - nested-html', async () => {
|
||||
const html = await fixture.readFile('/nested-html/index.html');
|
||||
|
||||
renderNestedHTMLChecks(html);
|
||||
});
|
||||
|
||||
it('renders content - components interleaved with html', async () => {
|
||||
const html = await fixture.readFile('/components/index.html');
|
||||
|
||||
renderComponentsHTMLChecks(html);
|
||||
});
|
||||
|
||||
it('renders content - randomly cased html attributes', async () => {
|
||||
const html = await fixture.readFile('/randomly-cased-html-attributes/index.html');
|
||||
|
||||
renderRandomlyCasedHTMLAttributesChecks(html);
|
||||
});
|
||||
|
||||
});
|
||||
renderRandomlyCasedHTMLAttributesChecks(html);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
/** @param {string} html */
|
||||
function renderSimpleChecks(html) {
|
||||
const { document } = parseHTML(html);
|
||||
const { document } = parseHTML(html);
|
||||
|
||||
const h2 = document.querySelector('h2');
|
||||
expect(h2.textContent).to.equal('Simple post header');
|
||||
const h2 = document.querySelector('h2');
|
||||
expect(h2.textContent).to.equal('Simple post header');
|
||||
|
||||
const spanInsideH2 = document.querySelector('h2 > span');
|
||||
expect(spanInsideH2.textContent).to.equal('post');
|
||||
expect(spanInsideH2.className).to.equal('inside-h2');
|
||||
expect(spanInsideH2.style.color).to.equal('fuscia');
|
||||
const spanInsideH2 = document.querySelector('h2 > span');
|
||||
expect(spanInsideH2.textContent).to.equal('post');
|
||||
expect(spanInsideH2.className).to.equal('inside-h2');
|
||||
expect(spanInsideH2.style.color).to.equal('fuscia');
|
||||
|
||||
const p1 = document.querySelector('article > p:nth-of-type(1)');
|
||||
expect(p1.children.length).to.equal(1);
|
||||
expect(p1.textContent).to.equal('This is a simple Markdoc post.');
|
||||
const p1 = document.querySelector('article > p:nth-of-type(1)');
|
||||
expect(p1.children.length).to.equal(1);
|
||||
expect(p1.textContent).to.equal('This is a simple Markdoc post.');
|
||||
|
||||
const p2 = document.querySelector('article > p:nth-of-type(2)');
|
||||
expect(p2.children.length).to.equal(0);
|
||||
expect(p2.textContent).to.equal('This is a paragraph!');
|
||||
|
||||
const p3 = document.querySelector('article > p:nth-of-type(3)');
|
||||
expect(p3.children.length).to.equal(1);
|
||||
expect(p3.textContent).to.equal('This is a span inside a paragraph!');
|
||||
const p2 = document.querySelector('article > p:nth-of-type(2)');
|
||||
expect(p2.children.length).to.equal(0);
|
||||
expect(p2.textContent).to.equal('This is a paragraph!');
|
||||
|
||||
const p3 = document.querySelector('article > p:nth-of-type(3)');
|
||||
expect(p3.children.length).to.equal(1);
|
||||
expect(p3.textContent).to.equal('This is a span inside a paragraph!');
|
||||
}
|
||||
|
||||
/** @param {string} html */
|
||||
function renderNestedHTMLChecks(html) {
|
||||
const { document } = parseHTML(html);
|
||||
const { document } = parseHTML(html);
|
||||
|
||||
const p1 = document.querySelector('p:nth-of-type(1)');
|
||||
expect(p1.id).to.equal('p1');
|
||||
expect(p1.textContent).to.equal('before inner after');
|
||||
expect(p1.children.length).to.equal(1);
|
||||
const p1 = document.querySelector('p:nth-of-type(1)');
|
||||
expect(p1.id).to.equal('p1');
|
||||
expect(p1.textContent).to.equal('before inner after');
|
||||
expect(p1.children.length).to.equal(1);
|
||||
|
||||
const p1Span1 = p1.querySelector('span');
|
||||
expect(p1Span1.textContent).to.equal('inner');
|
||||
expect(p1Span1.id).to.equal('inner1');
|
||||
expect(p1Span1.className).to.equal('inner-class');
|
||||
expect(p1Span1.style.color).to.equal('hotpink');
|
||||
const p1Span1 = p1.querySelector('span');
|
||||
expect(p1Span1.textContent).to.equal('inner');
|
||||
expect(p1Span1.id).to.equal('inner1');
|
||||
expect(p1Span1.className).to.equal('inner-class');
|
||||
expect(p1Span1.style.color).to.equal('hotpink');
|
||||
|
||||
const p2 = document.querySelector('p:nth-of-type(2)');
|
||||
expect(p2.id).to.equal('p2');
|
||||
expect(p2.textContent).to.equal('\n before\n inner\n after\n');
|
||||
expect(p2.children.length).to.equal(1);
|
||||
const p2 = document.querySelector('p:nth-of-type(2)');
|
||||
expect(p2.id).to.equal('p2');
|
||||
expect(p2.textContent).to.equal('\n before\n inner\n after\n');
|
||||
expect(p2.children.length).to.equal(1);
|
||||
|
||||
const divL1 = document.querySelector('div:nth-of-type(1)');
|
||||
expect(divL1.id).to.equal('div-l1');
|
||||
expect(divL1.children.length).to.equal(2);
|
||||
const divL1 = document.querySelector('div:nth-of-type(1)');
|
||||
expect(divL1.id).to.equal('div-l1');
|
||||
expect(divL1.children.length).to.equal(2);
|
||||
|
||||
const divL2_1 = divL1.querySelector('div:nth-of-type(1)');
|
||||
expect(divL2_1.id).to.equal('div-l2-1');
|
||||
expect(divL2_1.children.length).to.equal(1);
|
||||
const divL2_1 = divL1.querySelector('div:nth-of-type(1)');
|
||||
expect(divL2_1.id).to.equal('div-l2-1');
|
||||
expect(divL2_1.children.length).to.equal(1);
|
||||
|
||||
const p3 = divL2_1.querySelector('p:nth-of-type(1)');
|
||||
expect(p3.id).to.equal('p3');
|
||||
expect(p3.textContent).to.equal('before inner after');
|
||||
expect(p3.children.length).to.equal(1);
|
||||
const p3 = divL2_1.querySelector('p:nth-of-type(1)');
|
||||
expect(p3.id).to.equal('p3');
|
||||
expect(p3.textContent).to.equal('before inner after');
|
||||
expect(p3.children.length).to.equal(1);
|
||||
|
||||
const divL2_2 = divL1.querySelector('div:nth-of-type(2)');
|
||||
expect(divL2_2.id).to.equal('div-l2-2');
|
||||
expect(divL2_2.children.length).to.equal(2);
|
||||
const divL2_2 = divL1.querySelector('div:nth-of-type(2)');
|
||||
expect(divL2_2.id).to.equal('div-l2-2');
|
||||
expect(divL2_2.children.length).to.equal(2);
|
||||
|
||||
const p4 = divL2_2.querySelector('p:nth-of-type(1)');
|
||||
expect(p4.id).to.equal('p4');
|
||||
expect(p4.textContent).to.equal('before inner after');
|
||||
expect(p4.children.length).to.equal(1);
|
||||
|
||||
const p5 = divL2_2.querySelector('p:nth-of-type(2)');
|
||||
expect(p5.id).to.equal('p5');
|
||||
expect(p5.textContent).to.equal('before inner after');
|
||||
expect(p5.children.length).to.equal(1);
|
||||
const p4 = divL2_2.querySelector('p:nth-of-type(1)');
|
||||
expect(p4.id).to.equal('p4');
|
||||
expect(p4.textContent).to.equal('before inner after');
|
||||
expect(p4.children.length).to.equal(1);
|
||||
|
||||
const p5 = divL2_2.querySelector('p:nth-of-type(2)');
|
||||
expect(p5.id).to.equal('p5');
|
||||
expect(p5.textContent).to.equal('before inner after');
|
||||
expect(p5.children.length).to.equal(1);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} html */
|
||||
function renderRandomlyCasedHTMLAttributesChecks(html) {
|
||||
const { document } = parseHTML(html);
|
||||
const { document } = parseHTML(html);
|
||||
|
||||
const td1 = document.querySelector('#td1');
|
||||
const td2 = document.querySelector('#td1');
|
||||
const td3 = document.querySelector('#td1');
|
||||
const td4 = document.querySelector('#td1');
|
||||
const td1 = document.querySelector('#td1');
|
||||
const td2 = document.querySelector('#td1');
|
||||
const td3 = document.querySelector('#td1');
|
||||
const td4 = document.querySelector('#td1');
|
||||
|
||||
// all four <td>'s which had randomly cased variants of colspan/rowspan should all be rendered lowercased at this point
|
||||
// all four <td>'s which had randomly cased variants of colspan/rowspan should all be rendered lowercased at this point
|
||||
|
||||
expect(td1.getAttribute("colspan")).to.equal("3");
|
||||
expect(td1.getAttribute("rowspan")).to.equal("2");
|
||||
expect(td1.getAttribute('colspan')).to.equal('3');
|
||||
expect(td1.getAttribute('rowspan')).to.equal('2');
|
||||
|
||||
expect(td2.getAttribute("colspan")).to.equal("3");
|
||||
expect(td2.getAttribute("rowspan")).to.equal("2");
|
||||
expect(td2.getAttribute('colspan')).to.equal('3');
|
||||
expect(td2.getAttribute('rowspan')).to.equal('2');
|
||||
|
||||
expect(td3.getAttribute("colspan")).to.equal("3");
|
||||
expect(td3.getAttribute("rowspan")).to.equal("2");
|
||||
expect(td3.getAttribute('colspan')).to.equal('3');
|
||||
expect(td3.getAttribute('rowspan')).to.equal('2');
|
||||
|
||||
expect(td4.getAttribute("colspan")).to.equal("3");
|
||||
expect(td4.getAttribute("rowspan")).to.equal("2");
|
||||
expect(td4.getAttribute('colspan')).to.equal('3');
|
||||
expect(td4.getAttribute('rowspan')).to.equal('2');
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -198,96 +190,98 @@ function renderRandomlyCasedHTMLAttributesChecks(html) {
|
|||
*
|
||||
* @param {string} html */
|
||||
function renderComponentsHTMLChecks(html) {
|
||||
const { document } = parseHTML(html);
|
||||
const { document } = parseHTML(html);
|
||||
|
||||
const naturalP1 = document.querySelector('article > p:nth-of-type(1)');
|
||||
expect(naturalP1.textContent).to.equal('This is a inline mark in regular Markdown markup.');
|
||||
expect(naturalP1.children.length).to.equal(1);
|
||||
const naturalP1 = document.querySelector('article > p:nth-of-type(1)');
|
||||
expect(naturalP1.textContent).to.equal('This is a inline mark in regular Markdown markup.');
|
||||
expect(naturalP1.children.length).to.equal(1);
|
||||
|
||||
const p1 = document.querySelector('article > p:nth-of-type(2)');
|
||||
expect(p1.id).to.equal('p1');
|
||||
expect(p1.textContent).to.equal('This is a inline mark under some HTML');
|
||||
expect(p1.children.length).to.equal(1);
|
||||
assertInlineMark(p1.children[0]);
|
||||
const p1 = document.querySelector('article > p:nth-of-type(2)');
|
||||
expect(p1.id).to.equal('p1');
|
||||
expect(p1.textContent).to.equal('This is a inline mark under some HTML');
|
||||
expect(p1.children.length).to.equal(1);
|
||||
assertInlineMark(p1.children[0]);
|
||||
|
||||
const div1p1 = document.querySelector('article > #div1 > p:nth-of-type(1)');
|
||||
expect(div1p1.id).to.equal('div1-p1');
|
||||
expect(div1p1.textContent).to.equal('This is a inline mark under some HTML');
|
||||
expect(div1p1.children.length).to.equal(1);
|
||||
assertInlineMark(div1p1.children[0]);
|
||||
const div1p1 = document.querySelector('article > #div1 > p:nth-of-type(1)');
|
||||
expect(div1p1.id).to.equal('div1-p1');
|
||||
expect(div1p1.textContent).to.equal('This is a inline mark under some HTML');
|
||||
expect(div1p1.children.length).to.equal(1);
|
||||
assertInlineMark(div1p1.children[0]);
|
||||
|
||||
const div1p2 = document.querySelector('article > #div1 > p:nth-of-type(2)');
|
||||
expect(div1p2.id).to.equal('div1-p2');
|
||||
expect(div1p2.textContent).to.equal('This is a inline mark under some HTML');
|
||||
expect(div1p2.children.length).to.equal(1);
|
||||
const div1p2 = document.querySelector('article > #div1 > p:nth-of-type(2)');
|
||||
expect(div1p2.id).to.equal('div1-p2');
|
||||
expect(div1p2.textContent).to.equal('This is a inline mark under some HTML');
|
||||
expect(div1p2.children.length).to.equal(1);
|
||||
|
||||
const div1p2span1 = div1p2.querySelector('span');
|
||||
expect(div1p2span1.id).to.equal('div1-p2-span1');
|
||||
expect(div1p2span1.textContent).to.equal('inline mark');
|
||||
expect(div1p2span1.children.length).to.equal(1);
|
||||
assertInlineMark(div1p2span1.children[0]);
|
||||
const div1p2span1 = div1p2.querySelector('span');
|
||||
expect(div1p2span1.id).to.equal('div1-p2-span1');
|
||||
expect(div1p2span1.textContent).to.equal('inline mark');
|
||||
expect(div1p2span1.children.length).to.equal(1);
|
||||
assertInlineMark(div1p2span1.children[0]);
|
||||
|
||||
const aside1 = document.querySelector('article > aside:nth-of-type(1)');
|
||||
const aside1Title = aside1.querySelector('p.title');
|
||||
expect(aside1Title.textContent.trim()).to.equal('Aside One');
|
||||
const aside1Section = aside1.querySelector('section');
|
||||
const aside1SectionP1 = aside1Section.querySelector('p:nth-of-type(1)');
|
||||
expect(aside1SectionP1.textContent).to.equal('I\'m a Markdown paragraph inside an top-level aside tag');
|
||||
const aside1H2_1 = aside1Section.querySelector('h2:nth-of-type(1)');
|
||||
expect(aside1H2_1.id).to.equal('im-an-h2-via-markdown-markup'); // automatic slug
|
||||
expect(aside1H2_1.textContent).to.equal('I\'m an H2 via Markdown markup');
|
||||
const aside1H2_2 = aside1Section.querySelector('h2:nth-of-type(2)');
|
||||
expect(aside1H2_2.id).to.equal('h-two');
|
||||
expect(aside1H2_2.textContent).to.equal('I\'m an H2 via HTML markup');
|
||||
const aside1SectionP2 = aside1Section.querySelector('p:nth-of-type(2)');
|
||||
expect(aside1SectionP2.textContent).to.equal('Markdown bold vs HTML bold');
|
||||
expect(aside1SectionP2.children.length).to.equal(2);
|
||||
const aside1SectionP2Strong1 = aside1SectionP2.querySelector('strong:nth-of-type(1)');
|
||||
expect(aside1SectionP2Strong1.textContent).to.equal('Markdown bold');
|
||||
const aside1SectionP2Strong2 = aside1SectionP2.querySelector('strong:nth-of-type(2)');
|
||||
expect(aside1SectionP2Strong2.textContent).to.equal('HTML bold');
|
||||
const aside1 = document.querySelector('article > aside:nth-of-type(1)');
|
||||
const aside1Title = aside1.querySelector('p.title');
|
||||
expect(aside1Title.textContent.trim()).to.equal('Aside One');
|
||||
const aside1Section = aside1.querySelector('section');
|
||||
const aside1SectionP1 = aside1Section.querySelector('p:nth-of-type(1)');
|
||||
expect(aside1SectionP1.textContent).to.equal(
|
||||
"I'm a Markdown paragraph inside an top-level aside tag"
|
||||
);
|
||||
const aside1H2_1 = aside1Section.querySelector('h2:nth-of-type(1)');
|
||||
expect(aside1H2_1.id).to.equal('im-an-h2-via-markdown-markup'); // automatic slug
|
||||
expect(aside1H2_1.textContent).to.equal("I'm an H2 via Markdown markup");
|
||||
const aside1H2_2 = aside1Section.querySelector('h2:nth-of-type(2)');
|
||||
expect(aside1H2_2.id).to.equal('h-two');
|
||||
expect(aside1H2_2.textContent).to.equal("I'm an H2 via HTML markup");
|
||||
const aside1SectionP2 = aside1Section.querySelector('p:nth-of-type(2)');
|
||||
expect(aside1SectionP2.textContent).to.equal('Markdown bold vs HTML bold');
|
||||
expect(aside1SectionP2.children.length).to.equal(2);
|
||||
const aside1SectionP2Strong1 = aside1SectionP2.querySelector('strong:nth-of-type(1)');
|
||||
expect(aside1SectionP2Strong1.textContent).to.equal('Markdown bold');
|
||||
const aside1SectionP2Strong2 = aside1SectionP2.querySelector('strong:nth-of-type(2)');
|
||||
expect(aside1SectionP2Strong2.textContent).to.equal('HTML bold');
|
||||
|
||||
const article = document.querySelector('article');
|
||||
expect(article.textContent).to.contain('RENDERED');
|
||||
expect(article.textContent).to.not.contain('NOT RENDERED');
|
||||
const article = document.querySelector('article');
|
||||
expect(article.textContent).to.contain('RENDERED');
|
||||
expect(article.textContent).to.not.contain('NOT RENDERED');
|
||||
|
||||
const section1 = document.querySelector('article > #section1');
|
||||
const section1div1 = section1.querySelector('#div1');
|
||||
const section1Aside1 = section1div1.querySelector('aside:nth-of-type(1)');
|
||||
const section1Aside1Title = section1Aside1.querySelector('p.title');
|
||||
expect(section1Aside1Title.textContent.trim()).to.equal('Nested un-indented Aside');
|
||||
const section1Aside1Section = section1Aside1.querySelector('section');
|
||||
const section1Aside1SectionP1 = section1Aside1Section.querySelector('p:nth-of-type(1)');
|
||||
expect(section1Aside1SectionP1.textContent).to.equal('regular Markdown markup');
|
||||
const section1Aside1SectionP4 = section1Aside1Section.querySelector('p:nth-of-type(2)');
|
||||
expect(section1Aside1SectionP4.textContent).to.equal('nested inline mark content');
|
||||
expect(section1Aside1SectionP4.children.length).to.equal(1);
|
||||
assertInlineMark(section1Aside1SectionP4.children[0]);
|
||||
const section1 = document.querySelector('article > #section1');
|
||||
const section1div1 = section1.querySelector('#div1');
|
||||
const section1Aside1 = section1div1.querySelector('aside:nth-of-type(1)');
|
||||
const section1Aside1Title = section1Aside1.querySelector('p.title');
|
||||
expect(section1Aside1Title.textContent.trim()).to.equal('Nested un-indented Aside');
|
||||
const section1Aside1Section = section1Aside1.querySelector('section');
|
||||
const section1Aside1SectionP1 = section1Aside1Section.querySelector('p:nth-of-type(1)');
|
||||
expect(section1Aside1SectionP1.textContent).to.equal('regular Markdown markup');
|
||||
const section1Aside1SectionP4 = section1Aside1Section.querySelector('p:nth-of-type(2)');
|
||||
expect(section1Aside1SectionP4.textContent).to.equal('nested inline mark content');
|
||||
expect(section1Aside1SectionP4.children.length).to.equal(1);
|
||||
assertInlineMark(section1Aside1SectionP4.children[0]);
|
||||
|
||||
const section1div2 = section1.querySelector('#div2');
|
||||
const section1Aside2 = section1div2.querySelector('aside:nth-of-type(1)');
|
||||
const section1Aside2Title = section1Aside2.querySelector('p.title');
|
||||
expect(section1Aside2Title.textContent.trim()).to.equal('Nested indented Aside 💀');
|
||||
const section1Aside2Section = section1Aside2.querySelector('section');
|
||||
const section1Aside2SectionP1 = section1Aside2Section.querySelector('p:nth-of-type(1)');
|
||||
expect(section1Aside2SectionP1.textContent).to.equal('regular Markdown markup');
|
||||
const section1Aside1SectionP5 = section1Aside2Section.querySelector('p:nth-of-type(2)');
|
||||
expect(section1Aside1SectionP5.id).to.equal('p5');
|
||||
expect(section1Aside1SectionP5.children.length).to.equal(1);
|
||||
const section1Aside1SectionP5Span1 = section1Aside1SectionP5.children[0];
|
||||
expect(section1Aside1SectionP5Span1.textContent).to.equal('inline mark');
|
||||
expect(section1Aside1SectionP5Span1.children.length).to.equal(1);
|
||||
const section1Aside1SectionP5Span1Span1 = section1Aside1SectionP5Span1.children[0];
|
||||
expect(section1Aside1SectionP5Span1Span1.textContent).to.equal(' mark');
|
||||
};
|
||||
const section1div2 = section1.querySelector('#div2');
|
||||
const section1Aside2 = section1div2.querySelector('aside:nth-of-type(1)');
|
||||
const section1Aside2Title = section1Aside2.querySelector('p.title');
|
||||
expect(section1Aside2Title.textContent.trim()).to.equal('Nested indented Aside 💀');
|
||||
const section1Aside2Section = section1Aside2.querySelector('section');
|
||||
const section1Aside2SectionP1 = section1Aside2Section.querySelector('p:nth-of-type(1)');
|
||||
expect(section1Aside2SectionP1.textContent).to.equal('regular Markdown markup');
|
||||
const section1Aside1SectionP5 = section1Aside2Section.querySelector('p:nth-of-type(2)');
|
||||
expect(section1Aside1SectionP5.id).to.equal('p5');
|
||||
expect(section1Aside1SectionP5.children.length).to.equal(1);
|
||||
const section1Aside1SectionP5Span1 = section1Aside1SectionP5.children[0];
|
||||
expect(section1Aside1SectionP5Span1.textContent).to.equal('inline mark');
|
||||
expect(section1Aside1SectionP5Span1.children.length).to.equal(1);
|
||||
const section1Aside1SectionP5Span1Span1 = section1Aside1SectionP5Span1.children[0];
|
||||
expect(section1Aside1SectionP5Span1Span1.textContent).to.equal(' mark');
|
||||
}
|
||||
|
||||
/** @param {HTMLElement | null | undefined} el */
|
||||
|
||||
function assertInlineMark(el) {
|
||||
expect(el).to.not.be.null;
|
||||
expect(el).to.not.be.undefined;
|
||||
expect(el.children.length).to.equal(0);
|
||||
expect(el.textContent).to.equal('inline mark');
|
||||
expect(el.className).to.equal('mark');
|
||||
expect(el.style.color).to.equal('hotpink');
|
||||
expect(el).to.not.be.null;
|
||||
expect(el).to.not.be.undefined;
|
||||
expect(el.children.length).to.equal(0);
|
||||
expect(el.textContent).to.equal('inline mark');
|
||||
expect(el.className).to.equal('mark');
|
||||
expect(el.style.color).to.equal('hotpink');
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue