From 578b1d47ef75b1df85991e375da5bb0973b9c7f0 Mon Sep 17 00:00:00 2001 From: Damian Tarnawski Date: Mon, 4 Mar 2024 23:10:12 +0100 Subject: [PATCH] Move to a single file --- build.js | 21 ++- mds/t.d.ts | 54 -------- mds/t.js | 1 - package.json | 12 +- readme.md | 12 +- mds/mds.js => smd.js | 109 +++++++++++---- test.js | 322 +++++++++++++++++++++---------------------- 7 files changed, 264 insertions(+), 267 deletions(-) delete mode 100644 mds/t.d.ts delete mode 100644 mds/t.js rename mds/mds.js => smd.js (86%) diff --git a/build.js b/build.js index 2eda6c0..39b028a 100644 --- a/build.js +++ b/build.js @@ -3,9 +3,10 @@ import path from "node:path" import url from "node:url" import ts from "typescript" -const dirname = path.dirname(url.fileURLToPath(import.meta.url)) -const src_dir = path.join(dirname, "mds") -const src_entry = path.join(src_dir, "mds.js") +const dirname = path.dirname(url.fileURLToPath(import.meta.url)) +const file_js_path = path.join(dirname, "smd.js") +const file_dts_path = path.join(dirname, "smd.d.ts") +const file_map_path = path.join(dirname, "smd.d.ts.map") /** @type {ts.CompilerOptions} */ const ts_options = { @@ -23,18 +24,12 @@ const ts_options = { function main() { const begin = performance.now() - // Remove old .d.ts files except t.d.ts - const files = fs.readdirSync(src_dir) - for (const file of files) { - if ((file.endsWith(".d.ts") && file !== "t.d.ts") || - file.endsWith(".d.ts.map") - ) { - fs.unlinkSync(path.join(src_dir, file)) - } - } + // Remove old .d.ts files + if (fs.existsSync(file_dts_path)) fs.unlinkSync(file_dts_path) + if (fs.existsSync(file_map_path)) fs.unlinkSync(file_map_path) // Emit d.ts files - const program = ts.createProgram([src_entry], ts_options) + const program = ts.createProgram([file_js_path], ts_options) program.emit() console.log(`DTS complete in ${Math.ceil(performance.now() - begin)}ms`) } diff --git a/mds/t.d.ts b/mds/t.d.ts deleted file mode 100644 index bde3d6f..0000000 --- a/mds/t.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -import {Attr, Token} from './mds.js' - -export type Parser = { - /** {@link Renderer} interface */ - renderer : Any_Renderer - /** Text to be added to the last token in the next flush */ - text : string - /** Characters for identifying tokens */ - pending : string - /** Current token and it's parents (a slice of a tree) */ - types : Token[] - /** Number of tokens in {@link Parser.types types} without root */ - len : number - /** For Code_Fence parsing */ - code_fence_body: 0 | 1 - backticks_count: number - /* For Blockquote parsing */ - newline_blockquote_idx: number - /* For horizontal rule parsing */ - hr_char: string - hr_chars: number -} - -export type Renderer_Add_Token = (data: TData, type: Token) => void -export type Renderer_End_Token = (data: TData) => void -export type Renderer_Add_Text = (data: TData, text: string) => void -export type Renderer_Set_Attr = (data: TData, type: Attr, value: string) => void - -export type Renderer = { - data : TData - add_token: Renderer_Add_Token - end_token: Renderer_End_Token - add_text : Renderer_Add_Text - set_attr : Renderer_Set_Attr -} - -export type Any_Renderer = Renderer - -export type Default_Renderer_Data = { - nodes: HTMLElement[] - index: number -} -export type Default_Renderer = Renderer -export type Default_Renderer_Add_Token = Renderer_Add_Token -export type Default_Renderer_End_Token = Renderer_End_Token -export type Default_Renderer_Add_Text = Renderer_Add_Text -export type Default_Renderer_Set_Attr = Renderer_Set_Attr - -export type Logger_Renderer_Data = undefined -export type Logger_Renderer = Renderer -export type Logger_Renderer_Add_Token = Renderer_Add_Token -export type Logger_Renderer_End_Token = Renderer_End_Token -export type Logger_Renderer_Add_Text = Renderer_Add_Text -export type Logger_Renderer_Set_Attr = Renderer_Set_Attr diff --git a/mds/t.js b/mds/t.js deleted file mode 100644 index 693da49..0000000 --- a/mds/t.js +++ /dev/null @@ -1 +0,0 @@ -export {} \ No newline at end of file diff --git a/package.json b/package.json index b9bd10b..c3095ae 100644 --- a/package.json +++ b/package.json @@ -13,14 +13,16 @@ "sideEffects": false, "type": "module", "files": [ - "mds" + "./smd.js", + "./smd.d.ts", + "./smd.d.ts.map" ], - "module": "./mds/mds.js", - "types": "./mds/mds.d.ts", + "module": "./smd.js", + "types": "./smd.d.ts", "exports": { "import": { - "types": "./mds/mds.d.ts", - "default": "./mds/mds.js" + "types": "./smd.d.ts", + "default": "./smd.js" } }, "scripts": { diff --git a/readme.md b/readme.md index ca7c039..f661982 100644 --- a/readme.md +++ b/readme.md @@ -14,7 +14,7 @@ Install `streaming-markdown` package from npm. npm install streaming-markdown ``` -*Or just copy [**`mds`**](https://github.com/thetarnav/streaming-markdown/blob/main/mds) dir to your project.* +*Or just copy [**`smd`**](https://github.com/thetarnav/streaming-markdown/blob/main/smd.js) file to your project.* ## Usage @@ -23,11 +23,11 @@ It's single argument is a `Renderer` object, which is an interface to render the There are two built-in renderers—`default_renderer` and `logger_renderer`—that you can try at first. ```js -import * as mds from "streaming-markdown" +import * as smd from "streaming-markdown" const element = document.getElementById("markdown") -const renderer = mds.default_renderer(element) -const parser = mds.parser(renderer) +const renderer = smd.default_renderer(element) +const parser = smd.parser(renderer) ``` ### `write` function @@ -35,7 +35,7 @@ const parser = mds.parser(renderer) Then, you can start streaming markdown to the `Parser` by calling `parser_write` function with the chunk of markdown string. ```js -mds.parser_write(parser, "# Streaming Markdown\n\n") +smd.parser_write(parser, "# Streaming Markdown\n\n") ``` *You can write **as many times as you want** to stream the markdown.* @@ -56,7 +56,7 @@ Finally, you can end the stream by calling `end` function. It will reset the `Parser` state and flush the remaining markdown. ```js -mds.parser_end(parser) +smd.parser_end(parser) ``` ## TODO diff --git a/mds/mds.js b/smd.js similarity index 86% rename from mds/mds.js rename to smd.js index bb8ca70..be5273c 100644 --- a/mds/mds.js +++ b/smd.js @@ -5,8 +5,6 @@ Copyright 2024 Damian Tarnawski https://github.com/thetarnav/streaming-markdown */ -export * from "./t.js" - export const DOCUMENT = 1, // 1 PARAGRAPH = 2, // 2 @@ -124,10 +122,60 @@ export function attr_to_html_attr(type) { } /** - * @typedef {import("./t.js").Any_Renderer} Any_Renderer - * @typedef {import("./t.js").Parser } Parser + * @typedef {object } Parser + * @property {Any_Renderer} renderer - {@link Renderer} interface + * @property {string } text - Text to be added to the last token in the next flush + * @property {string } pending - Characters for identifying tokens + * @property {Token[] } types - Current token and it's parents (a slice of a tree) + * @property {number } len - Number of tokens in types without root + * @property {0 | 1 } code_fence_body - For {@link Token.Code_Fence} parsing + * @property {number } backticks_count + * @property {number } blockquote_idx - For Blockquote parsing + * @property {string } hr_char - For horizontal rule parsing + * @property {number } hr_chars - For horizontal rule parsing */ +/** + * @template T + * @callback Renderer_Add_Token + * @param {T } data + * @param {Token} type + * @returns {void } */ + +/** + * @template T + * @callback Renderer_End_Token + * @param {T } data + * @returns {void } */ + +/** + * @template T + * @callback Renderer_Add_Text + * @param {T } data + * @param {string} text + * @returns {void } */ + +/** + * @template T + * @callback Renderer_Set_Attr + * @param {T } data + * @param {Attr } type + * @param {string} value + * @returns {void } */ + +/** + * The renderer interface. + * @template T + * @typedef {object } Renderer + * @property {T } data + * @property {Renderer_Add_Token} add_token + * @property {Renderer_End_Token} end_token + * @property {Renderer_Add_Text } add_text + * @property {Renderer_Set_Attr } set_attr + */ + +/** @typedef {Renderer} Any_Renderer */ + /** * Makes a new Parser object. * @param {Any_Renderer} renderer @@ -140,7 +188,7 @@ export function parser(renderer) { types : /**@type {*}*/([DOCUMENT,,,,,]), len : 0, code_fence_body: 0, - newline_blockquote_idx: 0, + blockquote_idx: 0, hr_char : '', hr_chars : 0, backticks_count: 0, @@ -195,7 +243,7 @@ function _parser_into_line_break(p) { parser_add_text(p) p.len += 1 p.types[p.len] = LINE_BREAK - p.newline_blockquote_idx = 0 + p.blockquote_idx = 0 } /** @@ -223,30 +271,30 @@ export function parser_write(p, chunk) { case ">": p.pending = char - while (p.newline_blockquote_idx+1 < p.len-1) { - p.newline_blockquote_idx += 1 - if (p.types[p.newline_blockquote_idx] === BLOCKQUOTE) { + while (p.blockquote_idx+1 < p.len-1) { + p.blockquote_idx += 1 + if (p.types[p.blockquote_idx] === BLOCKQUOTE) { continue chars } } p.len -= 1 // end line break - while (p.newline_blockquote_idx < p.len) { + while (p.blockquote_idx < p.len) { parser_end_token(p) } - p.newline_blockquote_idx += 1 + p.blockquote_idx += 1 p.backticks_count = 0 parser_add_token(p, BLOCKQUOTE) continue case "\n": p.len -= 1 // end line break - while (p.newline_blockquote_idx < p.len) { + while (p.blockquote_idx < p.len) { parser_end_token(p) } - p.newline_blockquote_idx = 0 + p.blockquote_idx = 0 p.backticks_count = 0 p.pending = char continue @@ -306,14 +354,14 @@ export function parser_write(p, chunk) { case '>': p.pending = char - while (p.newline_blockquote_idx+1 <= p.len) { - p.newline_blockquote_idx += 1 - if (p.types[p.newline_blockquote_idx] === BLOCKQUOTE) { + while (p.blockquote_idx+1 <= p.len) { + p.blockquote_idx += 1 + if (p.types[p.blockquote_idx] === BLOCKQUOTE) { continue chars } } - p.newline_blockquote_idx += 1 + p.blockquote_idx += 1 parser_add_token(p, BLOCKQUOTE) continue /* Horizontal Rule @@ -769,12 +817,17 @@ export function parser_write(p, chunk) { parser_add_text(p) } + /** - * @typedef {import("./t.js").Default_Renderer } Default_Renderer - * @typedef {import("./t.js").Default_Renderer_Add_Token} Default_Renderer_Add_Token - * @typedef {import("./t.js").Default_Renderer_End_Token} Default_Renderer_End_Token - * @typedef {import("./t.js").Default_Renderer_Add_Text } Default_Renderer_Add_Text - * @typedef {import("./t.js").Default_Renderer_Set_Attr } Default_Renderer_Set_Attr + * @typedef {object} Default_Renderer_Data + * @property {HTMLElement[]} nodes + * @property {number } index + * + * @typedef {Renderer } Default_Renderer + * @typedef {Renderer_Add_Token} Default_Renderer_Add_Token + * @typedef {Renderer_End_Token} Default_Renderer_End_Token + * @typedef {Renderer_Add_Text } Default_Renderer_Add_Text + * @typedef {Renderer_Set_Attr } Default_Renderer_Set_Attr */ /** @@ -848,11 +901,13 @@ export function default_set_attr(data, type, value) { /** - * @typedef {import("./t.js").Logger_Renderer } Logger_Renderer - * @typedef {import("./t.js").Logger_Renderer_Add_Token} Logger_Renderer_Add_Token - * @typedef {import("./t.js").Logger_Renderer_End_Token} Logger_Renderer_End_Token - * @typedef {import("./t.js").Logger_Renderer_Add_Text } Logger_Renderer_Add_Text - * @typedef {import("./t.js").Logger_Renderer_Set_Attr } Logger_Renderer_Set_Attr + * @typedef {undefined} Logger_Renderer_Data + * + * @typedef {Renderer } Logger_Renderer + * @typedef {Renderer_Add_Token} Logger_Renderer_Add_Token + * @typedef {Renderer_End_Token} Logger_Renderer_End_Token + * @typedef {Renderer_Add_Text } Logger_Renderer_Add_Text + * @typedef {Renderer_Set_Attr } Logger_Renderer_Set_Attr */ /** @returns {Logger_Renderer} */ diff --git a/test.js b/test.js index 91dee73..4f36d58 100644 --- a/test.js +++ b/test.js @@ -1,12 +1,12 @@ import * as t from "node:test" import * as assert from "node:assert/strict" -import * as mds from "./mds/mds.js" +import * as smd from "./smd.js" /** * @typedef {(string | Test_Renderer_Node)[]} Children * @typedef {Map} Parent_Map - * @typedef {{[key in mds.Attr]?: string}} Node_Attrs + * @typedef {{[key in smd.Attr]?: string}} Node_Attrs * * @typedef {object} Test_Renderer_Data * @property {Test_Renderer_Node} root @@ -14,22 +14,22 @@ import * as mds from "./mds/mds.js" * @property {Parent_Map } parent_map * * @typedef {object} Test_Renderer_Node - * @property {mds.Token } type + * @property {smd.Token } type * @property {Children } children * @property {Node_Attrs=} attrs * - * @typedef {mds.Renderer } Test_Renderer - * @typedef {mds.Renderer_Add_Token} Test_Add_Token - * @typedef {mds.Renderer_End_Token} Test_End_Token - * @typedef {mds.Renderer_Add_Text } Test_Add_Text - * @typedef {mds.Renderer_Set_Attr } Test_Set_Attr + * @typedef {smd.Renderer } Test_Renderer + * @typedef {smd.Renderer_Add_Token} Test_Add_Token + * @typedef {smd.Renderer_End_Token} Test_End_Token + * @typedef {smd.Renderer_Add_Text } Test_Add_Text + * @typedef {smd.Renderer_Set_Attr } Test_Set_Attr */ /** @returns {Test_Renderer} */ function test_renderer() { /** @type {Test_Renderer_Node} */ const root = { - type : mds.Token.Document, + type : smd.Token.Document, children: [] } return { @@ -80,7 +80,7 @@ function test_renderer_set_attr(data, type, value) { /** @type {Test_Renderer_Node} */ const br = { - type : mds.Token.Line_Break, + type : smd.Token.Line_Break, children: [] } @@ -132,13 +132,13 @@ function compare_push_node(node, lines, len, h) { } /** - * @param {mds.Token} type + * @param {smd.Token} type * @param {string[]} lines * @param {number} len * @param {number} h * @returns {void} */ function compare_push_type(type, lines, len, h) { - lines.push(compare_pad(len, h) + "\u001b[36m" + mds.token_to_string(type) + "\u001b[0m") + lines.push(compare_pad(len, h) + "\u001b[36m" + smd.token_to_string(type) + "\u001b[0m") } /** @@ -258,22 +258,22 @@ function assert_children(children, expected_children) { function test_single_write(title, markdown, expected_children) { t.test(title, () => { const renderer = test_renderer() - const parser = mds.parser(renderer) + const parser = smd.parser(renderer) - mds.parser_write(parser, markdown) - mds.parser_end(parser) + smd.parser_write(parser, markdown) + smd.parser_end(parser) assert_children(renderer.data.root.children, expected_children) }) t.test(title + " - by char", () => { const renderer = test_renderer() - const parser = mds.parser(renderer) + const parser = smd.parser(renderer) for (const char of markdown) { - mds.parser_write(parser, char) + smd.parser_write(parser, char) } - mds.parser_end(parser) + smd.parser_end(parser) assert_children(renderer.data.root.children, expected_children) }) @@ -281,15 +281,15 @@ function test_single_write(title, markdown, expected_children) { for (let level = 1; level <= 6; level += 1) { - /** @type {mds.Token} */ + /** @type {smd.Token} */ let heading_type switch (level) { - case 1: heading_type = mds.Token.Heading_1; break - case 2: heading_type = mds.Token.Heading_2; break - case 3: heading_type = mds.Token.Heading_3; break - case 4: heading_type = mds.Token.Heading_4; break - case 5: heading_type = mds.Token.Heading_5; break - case 6: heading_type = mds.Token.Heading_6; break + case 1: heading_type = smd.Token.Heading_1; break + case 2: heading_type = smd.Token.Heading_2; break + case 3: heading_type = smd.Token.Heading_3; break + case 4: heading_type = smd.Token.Heading_4; break + case 5: heading_type = smd.Token.Heading_5; break + case 6: heading_type = smd.Token.Heading_6; break default: throw new Error("Invalid heading level") } @@ -306,7 +306,7 @@ for (let level = 1; level <= 6; level += 1) { [{ type : heading_type, children: ["foo ", { - type : mds.Token.Italic_Ast, + type : smd.Token.Italic_Ast, children: ["bar"] }] }] @@ -316,7 +316,7 @@ for (let level = 1; level <= 6; level += 1) { test_single_write("Line Breaks", "foo\nbar", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo", br, "bar"], }] ) @@ -324,9 +324,9 @@ test_single_write("Line Breaks", test_single_write("Line Breaks with Italic", "*a\nb*", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Italic_Ast, + type : smd.Token.Italic_Ast, children: ["a", br, "b"] }], }] @@ -335,7 +335,7 @@ test_single_write("Line Breaks with Italic", test_single_write("Escaped Line Breaks", "a\\\nb", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["a", br, "b"], }] ) @@ -343,10 +343,10 @@ test_single_write("Escaped Line Breaks", test_single_write("Paragraphs", "foo\n\nbar", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["bar"], }] ) @@ -354,7 +354,7 @@ test_single_write("Paragraphs", test_single_write("Paragraph trim leading spaces", " foo", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] ) @@ -362,7 +362,7 @@ test_single_write("Paragraph trim leading spaces", test_single_write("Trim too many spaces", "foo bar", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo bar"], }] ) @@ -370,9 +370,9 @@ test_single_write("Trim too many spaces", test_single_write("Trim too many spaces in italic", "*foo bar*", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Italic_Ast, + type : smd.Token.Italic_Ast, children: ["foo bar"] }], }] @@ -391,7 +391,7 @@ for (const c of ["*", "-", "_"]) { test_single_write('Horizontal Rule "' + txt + '"', txt, [{ - type : mds.Token.Horizontal_Rule, + type : smd.Token.Horizontal_Rule, children: [] }] ) @@ -401,10 +401,10 @@ for (const c of ["*", "-", "_"]) { test_single_write("Text after Horizontal Rule", "---\nfoo", [{ - type : mds.Token.Horizontal_Rule, + type : smd.Token.Horizontal_Rule, children: [] }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] ) @@ -415,9 +415,9 @@ for (let l = 1; l <= 4; l += 1) { test_single_write("Code Inline" + " - "+l+" backticks", c + "a" + c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["a"] }], }] @@ -426,12 +426,12 @@ for (let l = 1; l <= 4; l += 1) { test_single_write("Code Inline x2" + " - "+l+" backticks", c+"a"+c+" "+c+"b"+c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["a"] }, " ", { - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["b"] }], }] @@ -443,9 +443,9 @@ for (let l = 1; l <= 4; l += 1) { test_single_write("Code ` Inline" + " - "+l+" backticks", c + "a"+m+"b" + c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["a"+m+"b"] }], }] @@ -459,9 +459,9 @@ for (let l = 1; l <= 2; l += 1) { test_single_write("Code with line break" + " - "+l+" backticks", c + "a\nb" + c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["a", br, "b"] }], }] @@ -470,13 +470,13 @@ for (let l = 1; l <= 2; l += 1) { test_single_write("Code with two line breaks" + " - "+l+" backticks", c + "a\n\nb", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["a"] }], }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["b"], }] ) @@ -488,7 +488,7 @@ for (let l = 3; l <= 5; l += 1) { test_single_write("Empty Code_Fence - " + l + " backticks", c+"\n"+c, [{ - type : mds.Token.Code_Fence, + type : smd.Token.Code_Fence, children: [] }] ) @@ -496,7 +496,7 @@ for (let l = 3; l <= 5; l += 1) { test_single_write("Code_Fence - " + l + " backticks", c+"\nfoo\n"+c, [{ - type : mds.Token.Code_Fence, + type : smd.Token.Code_Fence, children: ["foo"] }] ) @@ -504,9 +504,9 @@ for (let l = 3; l <= 5; l += 1) { test_single_write("Code_Fence with language - " + l + " backticks", c+"js\nfoo\n"+c, [{ - type : mds.Token.Code_Fence, + type : smd.Token.Code_Fence, children: ["foo"], - attrs : {[mds.Attr.Lang]: "js"} + attrs : {[smd.Attr.Lang]: "js"} }] ) @@ -515,7 +515,7 @@ for (let l = 3; l <= 5; l += 1) { test_single_write("Code_Fence escaped backticks - " + l + " backticks", c+"\n"+m+"\n"+c, [{ - type : mds.Token.Code_Fence, + type : smd.Token.Code_Fence, children: [m] }] ) @@ -523,7 +523,7 @@ for (let l = 3; l <= 5; l += 1) { test_single_write("Code_Fence with unfinished end backticks - " + l + " backticks", c+"\na\n"+m+"\n"+c, [{ - type : mds.Token.Code_Fence, + type : smd.Token.Code_Fence, children: ["a\n"+m+""] }] ) @@ -540,7 +540,7 @@ for (const indent of [ test_single_write("Code_Block", indent + " foo", [{ - type : mds.Token.Code_Block, + type : smd.Token.Code_Block, children: [" foo"] }] ) @@ -549,7 +549,7 @@ for (const indent of [ indent + "foo\n" + indent + "bar", [{ - type : mds.Token.Code_Block, + type : smd.Token.Code_Block, children: ["foo\nbar"] }] ) @@ -558,10 +558,10 @@ for (const indent of [ indent+"foo\n" + "bar", [{ - type : mds.Token.Code_Block, + type : smd.Token.Code_Block, children: ["foo"] }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["bar"] }] ) @@ -570,12 +570,12 @@ for (const indent of [ for (const {c, italic, strong} of [{ c: "*", - italic: mds.Token.Italic_Ast, - strong: mds.Token.Strong_Ast, + italic: smd.Token.Italic_Ast, + strong: smd.Token.Strong_Ast, }, { c: "_", - italic: mds.Token.Italic_Und, - strong: mds.Token.Strong_Und, + italic: smd.Token.Italic_Und, + strong: smd.Token.Strong_Und, }]) { const case_1 = ""+c+c+"bold"+c+"bold>em"+c+c+c+"" const case_2 = ""+c+c+c+"bold>em"+c+"bold"+c+c+"" @@ -585,7 +585,7 @@ for (const {c, italic, strong} of [{ test_single_write("Italic & Bold \""+case_1+"\'", case_1, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : strong, children: ["bold", { @@ -599,7 +599,7 @@ for (const {c, italic, strong} of [{ test_single_write("Italic & Bold \""+case_2+"\'", case_2, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : strong, children: [{ @@ -614,7 +614,7 @@ for (const {c, italic, strong} of [{ test_single_write("Italic & Bold \""+case_3+"\'", case_3, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : italic, children: ["em", { @@ -628,7 +628,7 @@ for (const {c, italic, strong} of [{ test_single_write("Italic & Bold \""+case_4+"\'", case_4, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : strong, children: [{ @@ -644,21 +644,21 @@ for (const {c, italic, strong} of [{ } for (const {type, c} of [ - {type: mds.Token.Italic_Ast, c: "*" }, - {type: mds.Token.Italic_Und, c: "_" }, - {type: mds.Token.Strong_Ast, c: "**"}, - {type: mds.Token.Strong_Und, c: "__"}, - {type: mds.Token.Strike , c: "~~"}, + {type: smd.Token.Italic_Ast, c: "*" }, + {type: smd.Token.Italic_Und, c: "_" }, + {type: smd.Token.Strong_Ast, c: "**"}, + {type: smd.Token.Strong_Und, c: "__"}, + {type: smd.Token.Strike , c: "~~"}, ]) { let e = "" for (const char of c) { e += "\\" + char } - test_single_write(mds.token_to_string(type), + test_single_write(smd.token_to_string(type), c + "foo" + c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : type, children: ["foo"] @@ -666,36 +666,36 @@ for (const {type, c} of [ }] ) - test_single_write(mds.token_to_string(type) + " space after begin", + test_single_write(smd.token_to_string(type) + " space after begin", c + " foo" + c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [c + " foo" + c] }] ) - test_single_write(mds.token_to_string(type) + " with Code", + test_single_write(smd.token_to_string(type) + " with Code", c + "`foo`" + c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : type, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["foo"] }] }] }] ) - test_single_write(mds.token_to_string(type) + " new Paragraph", + test_single_write(smd.token_to_string(type) + " new Paragraph", "foo\n\n"+ c + "bar" + c, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : type, children: ["bar"] @@ -703,18 +703,18 @@ for (const {type, c} of [ }] ) - test_single_write(`Escape ${mds.token_to_string(type)} Begin`, + test_single_write(`Escape ${smd.token_to_string(type)} Begin`, e + "foo", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [c + "foo"] }] ) - test_single_write(`Escape ${mds.token_to_string(type)} End`, + test_single_write(`Escape ${smd.token_to_string(type)} End`, c + "foo" + e, [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ type : type, children: ["foo" + c] @@ -726,7 +726,7 @@ for (const {type, c} of [ test_single_write("Escape Backtick", "\\`" + "foo", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["`" + "foo"] }] ) @@ -734,7 +734,7 @@ test_single_write("Escape Backtick", test_single_write("Escape Backslash", "\\\\" + "foo", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["\\" + "foo"] }] ) @@ -742,7 +742,7 @@ test_single_write("Escape Backslash", test_single_write("Escape normal char", "\\a", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["\\a"] }] ) @@ -750,10 +750,10 @@ test_single_write("Escape normal char", test_single_write("Link", "[title](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Link, - attrs : {[mds.Attr.Href]: "url"}, + type : smd.Token.Link, + attrs : {[smd.Attr.Href]: "url"}, children: ["title"], }] }] @@ -762,12 +762,12 @@ test_single_write("Link", test_single_write("Link with code", "[`title`](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Link, - attrs : {[mds.Attr.Href]: "url"}, + type : smd.Token.Link, + attrs : {[smd.Attr.Href]: "url"}, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["title"], }], }] @@ -778,13 +778,13 @@ test_single_write("Link new paragraph", "foo\n\n"+ "[title](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"] },{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Link, - attrs : {[mds.Attr.Href]: "url"}, + type : smd.Token.Link, + attrs : {[smd.Attr.Href]: "url"}, children: ["title"], }] }] @@ -793,10 +793,10 @@ test_single_write("Link new paragraph", test_single_write("Image", "![title](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Image, - attrs : {[mds.Attr.Src]: "url"}, + type : smd.Token.Image, + attrs : {[smd.Attr.Src]: "url"}, children: ["title"], }] }] @@ -805,10 +805,10 @@ test_single_write("Image", test_single_write("Image with code", "![`title`](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Image, - attrs : {[mds.Attr.Src]: "url"}, + type : smd.Token.Image, + attrs : {[smd.Attr.Src]: "url"}, children: ["`title`"], }] }] @@ -817,13 +817,13 @@ test_single_write("Image with code", test_single_write("Link with Image", "[![title](src)](href)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Link, - attrs : {[mds.Attr.Href]: "href"}, + type : smd.Token.Link, + attrs : {[smd.Attr.Href]: "href"}, children: [{ - type : mds.Token.Image, - attrs : {[mds.Attr.Src]: "src"}, + type : smd.Token.Image, + attrs : {[smd.Attr.Src]: "src"}, children: ["title"], }], }] @@ -833,7 +833,7 @@ test_single_write("Link with Image", test_single_write("Escaped link Begin", "\\[foo](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["[foo](url)"] }] ) @@ -841,9 +841,9 @@ test_single_write("Escaped link Begin", test_single_write("Escaped link End", "[foo\\](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Link, + type : smd.Token.Link, children: ["foo](url)"], }] }] @@ -852,10 +852,10 @@ test_single_write("Escaped link End", test_single_write("Un-Escaped link Both", "\\\\[foo\\\\](url)", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["\\", { - type : mds.Token.Link, - attrs : {[mds.Attr.Href]: "url"}, + type : smd.Token.Link, + attrs : {[smd.Attr.Href]: "url"}, children: ["foo\\"], }] }] @@ -864,9 +864,9 @@ test_single_write("Un-Escaped link Both", test_single_write("Blockquote", "> foo", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] }] @@ -875,9 +875,9 @@ test_single_write("Blockquote", test_single_write("Blockquote no-space", ">foo", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] }] @@ -886,7 +886,7 @@ test_single_write("Blockquote no-space", test_single_write("Blockquote Escape", "\\> foo", [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["> foo"], }] ) @@ -894,9 +894,9 @@ test_single_write("Blockquote Escape", test_single_write("Blockquote line break", "> foo\nbar", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo", br, "bar"], }] }] @@ -905,9 +905,9 @@ test_single_write("Blockquote line break", test_single_write("Blockquote continued", "> foo\n> bar", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo", br, "bar"], }] }] @@ -916,13 +916,13 @@ test_single_write("Blockquote continued", test_single_write("Blockquote end", "> foo\n\nbar", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["bar"], }] ) @@ -930,9 +930,9 @@ test_single_write("Blockquote end", test_single_write("Blockquote heading", "> # foo", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Heading_1, + type : smd.Token.Heading_1, children: ["foo"], }] }] @@ -941,9 +941,9 @@ test_single_write("Blockquote heading", test_single_write("Blockquote codeblock", "> ```\nfoo\n```", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Code_Fence, + type : smd.Token.Code_Fence, children: ["foo"], }] }] @@ -952,11 +952,11 @@ test_single_write("Blockquote codeblock", test_single_write("Blockquote blockquote", "> > foo", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] }] @@ -967,14 +967,14 @@ test_single_write("Blockquote up blockquote", "> foo\n"+ "> > bar", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }, { - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["bar"], }] }] @@ -986,15 +986,15 @@ test_single_write("Blockquote blockquote down", "> \n"+ "> bar", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["bar"], }] }] @@ -1005,14 +1005,14 @@ test_single_write("Blockquote blockquote continued", "> >\n"+ "> > bar", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }, { - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["bar"], }] }] @@ -1024,17 +1024,17 @@ test_single_write("Blockquote up down", ">\n"+ "> > bar", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["foo"], }] }, { - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["bar"], }] }] @@ -1047,20 +1047,20 @@ test_single_write("Blockquote with code and line break", ">\n"+ "> > c", [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: [{ - type : mds.Token.Code_Inline, + type : smd.Token.Code_Inline, children: ["a", br, "b"], }] }] }, { - type : mds.Token.Blockquote, + type : smd.Token.Blockquote, children: [{ - type : mds.Token.Paragraph, + type : smd.Token.Paragraph, children: ["c"], }], }]