first commit; migrate code from minimal/docs-server
This commit is contained in:
41
src/async-steps.ts
Normal file
41
src/async-steps.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
|
||||
// marked does not support asynchronous steps in the rendering process.
|
||||
// as such, in order to support external renderers that do their processing
|
||||
// async, this handles swapping out a placeholder for the additionally processed
|
||||
// content after marked is finished rendering
|
||||
|
||||
let next_task_id = 1;
|
||||
const data_binding = /{{@@!@!@@ MARKED ASYNC DATA BINDING PLACEHOLDER {{([0-9]+)}} @@!@!@@}}/g;
|
||||
|
||||
const pending_tasks: Record<number, Promise<string>> = { };
|
||||
|
||||
export function bind_data_async(data_generator: Promise<string>) {
|
||||
const task_id = next_task_id++;
|
||||
const placeholder = `{{@@!@!@@ MARKED ASYNC DATA BINDING PLACEHOLDER {{${task_id}}} @@!@!@@}}`;
|
||||
pending_tasks[task_id] = data_generator;
|
||||
return placeholder;
|
||||
}
|
||||
|
||||
export async function resolve_async_bindings(html: string) {
|
||||
const bindings: string[] = [ ];
|
||||
const promises: Promise<string>[] = [ ];
|
||||
|
||||
let match: RegExpMatchArray;
|
||||
|
||||
while (match = data_binding.exec(html)) {
|
||||
bindings.push(match[0]);
|
||||
promises.push(pending_tasks[match[1]]);
|
||||
delete pending_tasks[match[1]];
|
||||
}
|
||||
|
||||
data_binding.lastIndex = 0;
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
html = html.replace(bindings[i], results[i]);
|
||||
}
|
||||
|
||||
return html;
|
||||
}
|
||||
|
94
src/attrs.ts
Normal file
94
src/attrs.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
|
||||
import { Lazy, resolve_lazy } from './lazy';
|
||||
|
||||
const attrs_pattern = /\s+\{\s*(?:[\.#:][^\s]+(?:\s*[\.#:][^\s]+)*)?\s*}$/;
|
||||
|
||||
export interface ParsedAttributes {
|
||||
id?: string;
|
||||
classes: string[];
|
||||
attrs: Record<string, string | string[]>;
|
||||
html_attrs: string[];
|
||||
text: string;
|
||||
}
|
||||
|
||||
export function parse_attributes(text: string, fallback_id?: Lazy<string>) {
|
||||
const attrs: ParsedAttributes = {
|
||||
id: null,
|
||||
classes: [ ],
|
||||
attrs: { },
|
||||
html_attrs: [ ],
|
||||
text,
|
||||
};
|
||||
|
||||
const attrs_match = attrs_pattern.exec(text);
|
||||
|
||||
if (attrs_match) {
|
||||
attrs.text = text.slice(0, -attrs_match[0].length).trim();
|
||||
|
||||
const raw_attrs = attrs_match[0].trim().slice(1, -1).trim().split(/\s+/g);
|
||||
|
||||
for (const attr of raw_attrs) {
|
||||
if (attr.startsWith('.')) {
|
||||
attrs.classes.push(attr.slice(1));
|
||||
}
|
||||
|
||||
else if (attr.startsWith('#')) {
|
||||
attrs.id = attr.slice(1);
|
||||
}
|
||||
|
||||
else if (attr.startsWith(':')) {
|
||||
const eq_index = attr.indexOf('=');
|
||||
|
||||
if (eq_index === -1) {
|
||||
const name = attr.slice(1);
|
||||
attrs.attrs[name] = '';
|
||||
}
|
||||
|
||||
const name = attr.slice(1, eq_index);
|
||||
const value = attr.slice(eq_index + 1);
|
||||
|
||||
// Enable passing the same attribute more than once for lists, i.e.:
|
||||
// {:rel=external :rel=nofollow}
|
||||
// should render as:
|
||||
// rel="external nofollow"
|
||||
if (attrs.attrs[name]) {
|
||||
if (! Array.isArray(attrs.attrs[name])) {
|
||||
attrs.attrs[name] = [ attrs.attrs[name] as string ];
|
||||
}
|
||||
|
||||
(attrs.attrs[name] as string[]).push(value);
|
||||
}
|
||||
|
||||
else {
|
||||
attrs.attrs[name] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (! attrs.id) {
|
||||
attrs.id = resolve_lazy(fallback_id);
|
||||
}
|
||||
|
||||
if (attrs.id) {
|
||||
attrs.html_attrs.push(`id="${attrs.id}"`);
|
||||
}
|
||||
|
||||
if (attrs.classes.length) {
|
||||
attrs.html_attrs.push(`class="${attrs.classes.join(' ')}"`);
|
||||
}
|
||||
|
||||
for (const [name, value] of Object.entries(attrs.attrs)) {
|
||||
if (Array.isArray(value)) {
|
||||
attrs.html_attrs.push(`${name}="${value.join(' ')}"`);
|
||||
}
|
||||
|
||||
else {
|
||||
attrs.html_attrs.push(value ? `${name}="${value}"` : name);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(attrs);
|
||||
|
||||
return attrs;
|
||||
}
|
2
src/bin/markdown2html.ts
Normal file
2
src/bin/markdown2html.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
|
||||
console.log('markdown2html');
|
92
src/description-list.ts
Normal file
92
src/description-list.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import { MarkdownOptions } from './render';
|
||||
|
||||
export interface DescriptionListToken extends marked.Tokens.Generic {
|
||||
items: (DescriptionTermToken | DescriptionDetailToken)[];
|
||||
}
|
||||
|
||||
export interface DescriptionTermToken extends marked.Tokens.Generic {
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface DescriptionDetailToken extends marked.Tokens.Generic {
|
||||
text: string;
|
||||
}
|
||||
|
||||
export function description_list_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'description_list',
|
||||
level: 'block',
|
||||
start: (src) => src.match(/^:[:#-]/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^(?::[:#-](?:\s[^\n]*)?(?:\n|$))+/;
|
||||
const match = rule.exec(src);
|
||||
|
||||
if (match) {
|
||||
const token: DescriptionListToken = {
|
||||
type: 'description_list',
|
||||
raw: match[0],
|
||||
items: [ ]
|
||||
};
|
||||
|
||||
const items = token.raw.trim().split('\n');
|
||||
const raw_buffer: string[] = [ ];
|
||||
const text_buffer: string[] = [ ];
|
||||
|
||||
const flush_buffer = () => {
|
||||
if (! raw_buffer.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Grab the second character from the first line to determine the
|
||||
// token type (should be "#" or "-")
|
||||
const type = raw_buffer[0][1] === '#' ? 'description_term' : 'description_detail';
|
||||
|
||||
const sub_token: (DescriptionTermToken | DescriptionDetailToken) = {
|
||||
type,
|
||||
raw: raw_buffer.join('\n'),
|
||||
text: text_buffer.join('\n'),
|
||||
tokens: [ ],
|
||||
};
|
||||
|
||||
raw_buffer.length = 0;
|
||||
text_buffer.length = 0;
|
||||
|
||||
this.lexer.blockTokens(sub_token.text, sub_token.tokens);
|
||||
token.items.push(sub_token);
|
||||
};
|
||||
|
||||
for (const line of items) {
|
||||
const rule = /^:([:#-])(?:\s([^\n]*))?(?:\n|$)/;
|
||||
const match = rule.exec(line);
|
||||
|
||||
if (match) {
|
||||
if (match[1] !== ':') {
|
||||
flush_buffer();
|
||||
}
|
||||
|
||||
raw_buffer.push(match[0]);
|
||||
text_buffer.push(match[2]);
|
||||
}
|
||||
}
|
||||
|
||||
flush_buffer();
|
||||
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token: DescriptionListToken) {
|
||||
const items = token.items.map((item) => {
|
||||
const tag = item.type === 'description_term' ? 'dt' : 'dd';
|
||||
return `
|
||||
<${tag}>
|
||||
${this.parser.parse(item.tokens)}
|
||||
</${tag}>
|
||||
`;
|
||||
});
|
||||
|
||||
return `<dl>${items.join('')}</dl>`;
|
||||
}
|
||||
};
|
||||
}
|
206
src/footnotes.ts
Normal file
206
src/footnotes.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import { MarkdownOptions } from './render';
|
||||
|
||||
const footnotes = Symbol('footnotes');
|
||||
|
||||
// We're going to hang some extra data off of the lexer so we can reference it
|
||||
// later when generating links back to references
|
||||
declare module 'marked' {
|
||||
export interface Lexer {
|
||||
[footnotes]: Record<string, number>;
|
||||
}
|
||||
}
|
||||
|
||||
export interface FootnoteLinkToken extends marked.Tokens.Generic {
|
||||
id: string;
|
||||
inst: number;
|
||||
}
|
||||
|
||||
export function footnote_ref_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'footnote_ref',
|
||||
level: 'inline',
|
||||
start: (src) => src.match(/\[\^/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^\[\^([a-zA-Z0-9-._§]+)]/;
|
||||
const match = rule.exec(src);
|
||||
|
||||
if (match) {
|
||||
const id = match[1];
|
||||
|
||||
return {
|
||||
type: 'footnote_ref',
|
||||
raw: match[0],
|
||||
id: id,
|
||||
inst: next_cite_inst(this.lexer, id)
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token: FootnoteLinkToken) {
|
||||
return `<sup id="cite:ref-${token.id}-${token.inst}"><a href="#cite:note-${token.id}">[${token.id}]</a></sup>`;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export interface FootnoteListToken extends marked.Tokens.Generic {
|
||||
text: string;
|
||||
items: FootnoteToken[];
|
||||
}
|
||||
|
||||
export interface FootnoteToken extends marked.Tokens.Generic {
|
||||
id: string;
|
||||
text: string;
|
||||
inst_count() : number;
|
||||
}
|
||||
|
||||
export function footnote_list_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'footnote_list',
|
||||
level: 'block',
|
||||
start: (src) => src.match(/^\[/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const token: FootnoteListToken = {
|
||||
type: 'footnote_list',
|
||||
raw: '',
|
||||
text: '',
|
||||
items: [ ]
|
||||
};
|
||||
|
||||
let remaining = src;
|
||||
const prefix_rule = /^\[\^([a-zA-Z0-9\._§-]+)]:/;
|
||||
const whitespace_rule = /^\s*(?:\n|$)/;
|
||||
|
||||
if (! prefix_rule.test(src)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const items: { prefix: string, content: string }[] = [ ];
|
||||
let current: { prefix: string, content: string };
|
||||
|
||||
function take(str: string) {
|
||||
token.raw += str;
|
||||
remaining = remaining.slice(str.length);
|
||||
return str;
|
||||
}
|
||||
|
||||
line_loop:
|
||||
while (true) {
|
||||
const prefix_match = prefix_rule.exec(remaining);
|
||||
|
||||
if (prefix_match) {
|
||||
take(prefix_match[0]);
|
||||
|
||||
items.push(
|
||||
current = {
|
||||
prefix: prefix_match[1],
|
||||
content: '',
|
||||
}
|
||||
);
|
||||
|
||||
continue line_loop;
|
||||
}
|
||||
|
||||
const whitespace_match = whitespace_rule.exec(remaining);
|
||||
|
||||
if (whitespace_match) {
|
||||
if (! take(whitespace_match[0])) {
|
||||
break line_loop;
|
||||
}
|
||||
|
||||
current = null;
|
||||
continue line_loop;
|
||||
}
|
||||
|
||||
if (current) {
|
||||
const next_newline = remaining.indexOf('\n');
|
||||
|
||||
if (next_newline < 0) {
|
||||
current.content += take(remaining);
|
||||
break;
|
||||
}
|
||||
|
||||
current.content += take(remaining.slice(0, next_newline + 1));
|
||||
continue line_loop;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
token.text = token.raw.trim();
|
||||
token.items = items.map(({ prefix, content }) => {
|
||||
const token = {
|
||||
type: 'footnote',
|
||||
raw: `[^${prefix}]:${content}`,
|
||||
id: prefix,
|
||||
text: content,
|
||||
tokens: [ ],
|
||||
inst_count: () => get_cite_inst_count(this.lexer, prefix)
|
||||
};
|
||||
|
||||
this.lexer.inline(token.text, token.tokens);
|
||||
return token;
|
||||
});
|
||||
|
||||
return token;
|
||||
},
|
||||
renderer(token: FootnoteListToken) {
|
||||
const items = token.items.map((item) => (`
|
||||
<li role="doc-footnote" id="cite:note-${item.id}">
|
||||
<span class="cite-label">[${item.id}]</span>
|
||||
${footnote_link_backs(item.id, item.inst_count())}
|
||||
${this.parser.parseInline(item.tokens, renderer)}
|
||||
</li>
|
||||
`));
|
||||
|
||||
return `<ul role="doc-endnotes">${items.join('')}</ul>`;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function get_cite_inst_count(lexer: marked.Lexer, id: string) {
|
||||
if (! lexer[footnotes]) {
|
||||
lexer[footnotes] = { };
|
||||
}
|
||||
|
||||
return lexer[footnotes][id] || 0;
|
||||
}
|
||||
|
||||
function next_cite_inst(lexer: marked.Lexer, id: string) {
|
||||
if (! lexer[footnotes]) {
|
||||
lexer[footnotes] = { };
|
||||
}
|
||||
|
||||
if (! lexer[footnotes][id]) {
|
||||
lexer[footnotes][id] = 0;
|
||||
}
|
||||
|
||||
return ++lexer[footnotes][id];
|
||||
}
|
||||
|
||||
const letters = 'abcdefghijklmnopqrstuvwxyz';
|
||||
|
||||
function footnote_link_backs(id: string, count: number) {
|
||||
if (! count) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (count === 1) {
|
||||
return `<sup><a href="#cite:ref-${id}-1" title="Back to reference">^</a></sup>`;
|
||||
}
|
||||
|
||||
// note: We're using letters for link backs; If we run out, only
|
||||
// show the first 26 references
|
||||
// todo: A more ideal solution would be to start using double-letters,
|
||||
// like "aa", "ab", "ac", etc. after running out of single letter options
|
||||
count = Math.min(count, 26);
|
||||
|
||||
const links: string[] = [ ];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const letter = letters[i];
|
||||
links[i] = `<a href="#cite:ref-${id}-${i + 1}" title="Back to reference ${letter}">${letter}</a>`;
|
||||
}
|
||||
|
||||
return `<sup>^ ${links.join(' ')}</sup>`;
|
||||
}
|
14
src/html-sanitize.ts
Normal file
14
src/html-sanitize.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
import { JSDOM } from 'jsdom';
|
||||
import createDOMPurify = require('dompurify');
|
||||
|
||||
export function sanitize_html(html: string) : string {
|
||||
const { window } = new JSDOM('');
|
||||
const dom_purify = createDOMPurify(window as any as Window);
|
||||
return dom_purify.sanitize(html, {
|
||||
CUSTOM_ELEMENT_HANDLING: {
|
||||
tagNameCheck: (tag_name) => tag_name === 'svg-icon',
|
||||
attributeNameCheck: (attr_name) => attr_name === 'icon',
|
||||
}
|
||||
});
|
||||
}
|
32
src/icon.ts
Normal file
32
src/icon.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import { icons } from './icons';
|
||||
import { MarkdownOptions } from './render';
|
||||
|
||||
export interface IconToken extends marked.Tokens.Generic {
|
||||
text: string;
|
||||
}
|
||||
|
||||
export function icon_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'icon',
|
||||
level: 'inline',
|
||||
start: (src) => src.match(/\{:/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^\{:([a-zA-Z0-9-]+):\}/;
|
||||
const match = rule.exec(src);
|
||||
|
||||
if (match) {
|
||||
return {
|
||||
type: 'icon',
|
||||
raw: match[0],
|
||||
text: match[1],
|
||||
tokens: this.lexer.inlineTokens(match[1], [ ])
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token: IconToken) {
|
||||
return icons[token.text] || `<!-- unknown icon "${token.text}" -->`;
|
||||
}
|
||||
};
|
||||
}
|
25
src/icons.ts
Normal file
25
src/icons.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
export const icons: Record<string, string> = Object.create(null);
|
||||
|
||||
const whitespace = /[\s\t\n]+/g;
|
||||
const feather_icons: Record<string, string> = require('../vendor/feather-icons/icons.json');
|
||||
|
||||
// todo: css variables
|
||||
|
||||
for (const [name, contents] of Object.entries(feather_icons)) {
|
||||
icons[name] = `
|
||||
<svg xmlns="http://www.w3.org/2000/svg"
|
||||
class="icon ${name}"
|
||||
aria-hidden="true"
|
||||
style="width: var(--icon-size, 1rem); height: var(--icon-size, 1rem)"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentcolor"
|
||||
stroke-width="2"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
>${contents}</svg>
|
||||
`.replace(whitespace, ' ').trim();
|
||||
}
|
||||
|
||||
Object.freeze(icons);
|
2
src/index.ts
Normal file
2
src/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
|
||||
export * from './render';
|
67
src/katex.ts
Normal file
67
src/katex.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import katex = require('katex');
|
||||
import type { KatexOptions } from 'katex';
|
||||
import { MarkdownOptions } from './render';
|
||||
|
||||
export interface KatexToken extends marked.Tokens.Generic {
|
||||
text: string;
|
||||
}
|
||||
|
||||
export function katex_block_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'katex_block',
|
||||
level: 'block',
|
||||
start: (src) => src.match(/^\$\$/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^\$\$((?:[^\$]|\$(?!\$))+)\$\$/;
|
||||
const match = rule.exec(src);
|
||||
|
||||
if (match) {
|
||||
return {
|
||||
type: 'katex_block',
|
||||
raw: match[0],
|
||||
text: match[1]
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token: KatexToken) {
|
||||
const katex_opts: KatexOptions = {
|
||||
displayMode: true, // true == "block"
|
||||
output: 'html',
|
||||
macros: opts.katex_macros,
|
||||
};
|
||||
|
||||
return (katex as any).renderToString(token.text, katex_opts);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function katex_inline_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'katex_inline',
|
||||
level: 'inline',
|
||||
start: (src) => src.match(/\$/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^\$([^\n\s](?:[^\n\$]*[^\n\s])?)\$/;
|
||||
const match = rule.exec(src);
|
||||
|
||||
if (match) {
|
||||
return {
|
||||
type: 'katex_inline',
|
||||
raw: match[0],
|
||||
text: match[1]
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token: KatexToken) {
|
||||
const katex_opts: KatexOptions = {
|
||||
displayMode: false, // false == "inline"
|
||||
output: 'html',
|
||||
macros: opts.katex_macros,
|
||||
};
|
||||
|
||||
return (katex as any).renderToString(token.text, katex_opts);
|
||||
}
|
||||
};
|
||||
}
|
8
src/lazy.ts
Normal file
8
src/lazy.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
export type NotFunc = string | number | boolean | object;
|
||||
|
||||
export type Lazy<T extends NotFunc> = T | (() => T);
|
||||
|
||||
export function resolve_lazy<T extends NotFunc>(lazy: Lazy<T>) : T {
|
||||
return typeof lazy === 'function' ? lazy() : lazy;
|
||||
}
|
31
src/mark.ts
Normal file
31
src/mark.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import { MarkdownOptions } from './render';
|
||||
|
||||
export interface MarkToken extends marked.Tokens.Generic {
|
||||
text: string;
|
||||
}
|
||||
|
||||
export function mark_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'mark',
|
||||
level: 'inline',
|
||||
start: (src) => src.match(/=/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^==([^\n\s](?:(?:[^\n=]|=(?!=))*[^\n\s])?)==/;
|
||||
const match = rule.exec(src);
|
||||
|
||||
if (match) {
|
||||
return {
|
||||
type: 'mark',
|
||||
raw: match[0],
|
||||
text: match[1],
|
||||
tokens: this.lexer.inlineTokens(match[1], [ ])
|
||||
};
|
||||
}
|
||||
},
|
||||
renderer(token: MarkToken) {
|
||||
return `<mark>${this.parser.parseInline(token.tokens, renderer)}</mark>`;
|
||||
}
|
||||
};
|
||||
}
|
15
src/prism/index.ts
Normal file
15
src/prism/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
import { highlight as prism_highlight, languages/* , hooks */ } from 'prismjs';
|
||||
import load_languages = require('prismjs/components/index');
|
||||
|
||||
load_languages();
|
||||
require('./wasm');
|
||||
|
||||
// hooks.add('after-tokenize', (env) => {
|
||||
// //
|
||||
// });
|
||||
|
||||
export function highlight(code: string, lang: string) {
|
||||
const grammar = typeof languages[lang] === 'object' ? languages[lang] : languages.plain;
|
||||
return prism_highlight(code, grammar, lang);
|
||||
}
|
79
src/prism/wasm.ts
Normal file
79
src/prism/wasm.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
|
||||
import { languages } from 'prismjs';
|
||||
|
||||
const extra_keywords = [
|
||||
{
|
||||
pattern: /\b(?:memory\.(?:init|copy|fill)|data\.(?:drop)|table\.(?:init|copy|grow|size|fill|get|set)|elem\.(?:drop)|ref\.(?:null|is_null|func))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
/\b(?:funcref|externref)\b/,
|
||||
|
||||
// ===== SIMD / Vectors =====
|
||||
{
|
||||
pattern: /\b(?:v128\.(?:const|store(?:(?:8|16|32|64)_lane)?|load(?:(?:8x8|16x4|32x2)_[su]|(?:8|16|32|64)_splat|(?:8|16|32|64)_lane|(?:32|64)_zero)?|not|and|andnot|or|xor|bitselect|any_true))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:i(?:8x16|16x8|32x4|64x2)\.(?:splat|replace_lane|abs|neg|all_true|bitmask|shl|eq|ne|(?:shr)_[su]))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:i(?:8x16|16x8|32x4)\.(?:(?:[gl][te]|min|max)_[su]))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:i8x16\.(?:shuffle|swizzle|popcnt|avgr_u|(?:add|sub)(?:_sat_[su])?|(?:extract_lane|narrow_i16x8|extadd_pairwise_(?:i8x16|i16x8))_[su]))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:i16x8\.(?:mul|avgr_u|q15mulr_sat_s|(?:add|sub)(?:_sat_[su])?|(?:extract_lane|narrow_i32x4|(?:extend|extmul)_(?:low|high)_i8x16)_[su]))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:i32x4\.(?:extract_lane|add|sub|mul|dot_i16x8_s|(?:(?:extend|extmul)_(?:low|high)_i16x8|trunc_sat_f32x4)_[su]|trunc_sat_f64x2_[su]_zero))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:i64x2\.(?:extract_lane|add|sub|mul|[gl][te]_s|(?:extend|extmul)_(?:low|high)_i32x4_[su]))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:f(?:32x4|64x2)\.(?:splat|extract_lane|replace_lane|eq|ne|[gl][te]|ceil|floor|trunc|nearest|abs|neg|sqrt|add|sub|mul|div|p?(?:min|max)))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:f32x4\.(?:demote_f64x2_zero|convert_i32x4_[su]))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
{
|
||||
pattern: /\b(?:f64x2\.(?:promote_low_f32x4|convert_low_i32x4_[su]))\b/,
|
||||
inside: {
|
||||
'punctuation': /\./
|
||||
}
|
||||
},
|
||||
/\b(?:i8x16|i16x8|i32x4|i64x2|f32x4|f64x2)\b/,
|
||||
];
|
||||
|
||||
if (Array.isArray(languages.wasm.keyword)) {
|
||||
languages.wasm.keyword.unshift(...extra_keywords);
|
||||
}
|
114
src/qrcode.ts
Normal file
114
src/qrcode.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
|
||||
import { toString, QRCodeRenderersOptions } from 'qrcode';
|
||||
|
||||
type Colors = QRCodeRenderersOptions['color'];
|
||||
|
||||
export interface WifiInfo {
|
||||
ssid: string;
|
||||
password: string;
|
||||
hidden: boolean;
|
||||
encryption: 'WPA' | 'WEP' | 'None';
|
||||
}
|
||||
|
||||
export interface MeCardInfo {
|
||||
adr?: string;
|
||||
bday?: string;
|
||||
email?: string;
|
||||
n?: string;
|
||||
nickname?: string;
|
||||
note?: string;
|
||||
sound?: string;
|
||||
tel?: string;
|
||||
tel_av?: string;
|
||||
url?: string;
|
||||
}
|
||||
|
||||
// todo: css variables
|
||||
export async function generate_qr_code(data: string, colors?: Colors) {
|
||||
colors = colors || {
|
||||
dark: 'var(--theme-text-heading, currentcolor)',
|
||||
light: 'var(--theme-bg-main, transparent)'
|
||||
};
|
||||
|
||||
const svg = await toString(data, {
|
||||
type: 'svg',
|
||||
color: {
|
||||
dark: '#000000',
|
||||
light: '#ffffff'
|
||||
}
|
||||
});
|
||||
|
||||
const colored_svg = svg.replace(/#000000/g, colors.dark).replace(/#ffffff/g, colors.light).trim();
|
||||
|
||||
return colored_svg.replace(/^<svg /, '<svg class="qrcode" ')
|
||||
}
|
||||
|
||||
export function generate_wifi_qr_code(info: WifiInfo, colors?: Colors) {
|
||||
const ssid = escape(info.ssid);
|
||||
const password = escape(info.password);
|
||||
|
||||
let data = `WIFI:S:${ssid};P:${password};H:${info.hidden};`
|
||||
|
||||
if (info.encryption) {
|
||||
data += `T:${info.encryption};`;
|
||||
}
|
||||
|
||||
return generate_qr_code(data, colors);
|
||||
}
|
||||
|
||||
export function generate_mecard_qr_code(info: MeCardInfo, colors?: Colors) {
|
||||
let data = 'MECARD:';
|
||||
|
||||
if (info.adr) {
|
||||
data += `ADR:${escape(info.adr)};`;
|
||||
}
|
||||
|
||||
if (info.bday) {
|
||||
data += `BDAY:${escape(info.bday)};`;
|
||||
}
|
||||
|
||||
if (info.email) {
|
||||
data += `EMAIL:${escape(info.email)};`;
|
||||
}
|
||||
|
||||
if (info.n) {
|
||||
data += `N:${escape(info.n)};`;
|
||||
}
|
||||
|
||||
if (info.nickname) {
|
||||
data += `NICKNAME:${escape(info.nickname)};`;
|
||||
}
|
||||
|
||||
if (info.adr) {
|
||||
data += `ADR:${escape(info.adr)};`;
|
||||
}
|
||||
|
||||
if (info.adr) {
|
||||
data += `ADR:${escape(info.adr)};`;
|
||||
}
|
||||
|
||||
if (info.adr) {
|
||||
data += `ADR:${escape(info.adr)};`;
|
||||
}
|
||||
|
||||
if (info.adr) {
|
||||
data += `ADR:${escape(info.adr)};`;
|
||||
}
|
||||
|
||||
if (info.adr) {
|
||||
data += `ADR:${escape(info.adr)};`;
|
||||
}
|
||||
|
||||
data += ';';
|
||||
|
||||
return generate_qr_code(data, colors);
|
||||
}
|
||||
|
||||
function escape(str: string) {
|
||||
return str
|
||||
.replace(/\\/g, '\\\\')
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/;/g, '\\;')
|
||||
.replace(/,/g, '\\,')
|
||||
.replace(/:/g, '\\:');
|
||||
}
|
68
src/render.ts
Normal file
68
src/render.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import { create_renderer } from './renderer';
|
||||
import { mark_ext } from './mark';
|
||||
import { section_ext } from './section';
|
||||
import { icon_ext } from './icon';
|
||||
import { sanitize_html } from './html-sanitize';
|
||||
import { katex_block_ext, katex_inline_ext } from './katex';
|
||||
import { footnote_list_ext, footnote_ref_ext } from './footnotes';
|
||||
import { description_list_ext } from './description-list';
|
||||
import { resolve_async_bindings } from './async-steps';
|
||||
|
||||
export interface MarkdownOptions {
|
||||
base_url?: string;
|
||||
breaks?: boolean;
|
||||
inline?: boolean;
|
||||
katex_macros?: Record<string, string>;
|
||||
extensions?: MarkdownExtension[];
|
||||
}
|
||||
|
||||
export interface MarkdownExtension {
|
||||
(renderer: marked.Renderer, opts: MarkdownOptions): marked.TokenizerExtension & marked.RendererExtension
|
||||
}
|
||||
|
||||
export async function render_markdown_to_html(markdown: string, options: MarkdownOptions = { }) {
|
||||
const marked_options: marked.MarkedOptions = {
|
||||
baseUrl: options.base_url,
|
||||
breaks: options.breaks || false,
|
||||
renderer: create_renderer(options),
|
||||
};
|
||||
|
||||
marked.use({
|
||||
extensions: [
|
||||
katex_block_ext(marked_options.renderer, options),
|
||||
katex_inline_ext(marked_options.renderer, options),
|
||||
footnote_ref_ext(marked_options.renderer, options),
|
||||
footnote_list_ext(marked_options.renderer, options),
|
||||
mark_ext(marked_options.renderer, options),
|
||||
description_list_ext(marked_options.renderer, options),
|
||||
section_ext(marked_options.renderer, options),
|
||||
icon_ext(marked_options.renderer, options),
|
||||
...(options.extensions || [ ]).map((ext) => {
|
||||
return ext(marked_options.renderer, options);
|
||||
}),
|
||||
],
|
||||
tokenizer: {
|
||||
url(src) {
|
||||
// disable auto-linking; more can be added here to auto-link only sometimes
|
||||
// see: https://github.com/markedjs/marked/issues/882#issuecomment-781628889
|
||||
return null;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const unsafe_html = options.inline
|
||||
? marked.parseInline(markdown, marked_options)
|
||||
: await new Promise<string>((resolve, reject) => {
|
||||
marked.parse(markdown, marked_options, (error, unsafe_html) => {
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
resolve_async_bindings(unsafe_html).then(resolve, reject);
|
||||
});
|
||||
});
|
||||
|
||||
return sanitize_html(unsafe_html);
|
||||
}
|
297
src/renderer.ts
Normal file
297
src/renderer.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import { highlight } from './prism';
|
||||
import katex = require('katex');
|
||||
import type { KatexOptions } from 'katex';
|
||||
import render_bytefield = require('bytefield-svg');
|
||||
import { renderSvg as render_nomnoml } from 'nomnoml';
|
||||
import { pikchr } from 'pikchr';
|
||||
import { parse as parse_yaml } from 'yaml';
|
||||
import { icons } from './icons';
|
||||
import { strip_svg } from './svg';
|
||||
import { generate_mecard_qr_code, generate_qr_code } from './qrcode';
|
||||
import { bind_data_async } from './async-steps';
|
||||
import { render_vega_spec } from './vega';
|
||||
import { parse_attributes } from './attrs';
|
||||
import { MarkdownOptions } from './render';
|
||||
|
||||
export function create_renderer(opts: MarkdownOptions) {
|
||||
const renderer = new marked.Renderer();
|
||||
|
||||
renderer.heading = heading(renderer, opts);
|
||||
renderer.code = code(renderer, opts);
|
||||
|
||||
// ...
|
||||
|
||||
return renderer;
|
||||
}
|
||||
|
||||
function heading(renderer: marked.Renderer, opts: MarkdownOptions) {
|
||||
return function(orig_text: string, level: 1 | 2 | 3 | 4 | 5 | 6, raw: string, slugger) {
|
||||
let { text, id, html_attrs } = parse_attributes(raw);
|
||||
|
||||
if (! id) {
|
||||
id = slugger.slug(text);
|
||||
html_attrs.push(`id="${id}"`);
|
||||
}
|
||||
|
||||
return `
|
||||
<h${level} ${html_attrs.join(' ')}>
|
||||
${text}
|
||||
<a class="heading-anchor" href="#${id}">
|
||||
${icons.link}
|
||||
<span style="display: none">Section titled ${text}</span>
|
||||
</a>
|
||||
</h${level}>
|
||||
`;
|
||||
};
|
||||
}
|
||||
|
||||
function code(renderer: marked.Renderer, opts: MarkdownOptions) {
|
||||
return function(code: string, infostring: string, is_escaped: boolean) {
|
||||
const args = parse_code_args(infostring);
|
||||
|
||||
if (! args || ! args[0]) {
|
||||
return `<pre class="language-txt"><code>${escape(code, is_escaped)}</code></pre>`;
|
||||
}
|
||||
|
||||
let caption = '';
|
||||
const flags = new Set<string>();
|
||||
|
||||
for (let i = 1; i < args.length; i++) {
|
||||
if (args[i][0] === ':') {
|
||||
flags.add(args[i]);
|
||||
}
|
||||
|
||||
else {
|
||||
caption = `<figcaption>${marked.parseInline(args[i], renderer.options)}</figcaption>`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const size
|
||||
= flags.has(':small') ? 'small'
|
||||
: flags.has(':medium') ? 'medium'
|
||||
: flags.has(':large') ? 'large'
|
||||
: flags.has(':full') ? 'full'
|
||||
: 'medium';
|
||||
|
||||
const figure = (content: string) => `<figure data-lang="${args[0]}" data-size="${size}">${content}${caption}</figure>`;
|
||||
|
||||
if (args[0].startsWith('http:')) {
|
||||
return render_http_with_content(code, args[0].slice(5));
|
||||
}
|
||||
|
||||
switch (args[0]) {
|
||||
case 'samp':
|
||||
return figure(`<pre class="language-txt"><samp>${escape(code, is_escaped)}</samp></pre>`);
|
||||
|
||||
case 'bash:samp': {
|
||||
// Find the first newline that is not preceeded by a "\"
|
||||
const end_of_input = /(?<!\\)(?:\r\n|\r|\n)/.exec(code);
|
||||
|
||||
// If there is no such newline, the whole content is input
|
||||
if (! end_of_input) {
|
||||
return figure(`<pre class="language-bash">${render_prism(code, 'bash')}</pre>`);
|
||||
}
|
||||
|
||||
const input = code.slice(0, end_of_input.index);
|
||||
const rendered_input = render_prism(input, 'bash');
|
||||
const output = code.slice(end_of_input.index + 1);
|
||||
const rendered_output = `<samp>${escape(output, is_escaped)}</samp>`;
|
||||
|
||||
return figure(`<pre class="language-bash">${rendered_input}\n${rendered_output}</pre>`);
|
||||
};
|
||||
|
||||
case 'katex': {
|
||||
const katex_opts: KatexOptions = {
|
||||
displayMode: true, // true == "block"
|
||||
output: 'html',
|
||||
macros: opts.katex_macros,
|
||||
};
|
||||
|
||||
return figure(
|
||||
(katex as any).renderToString(code, katex_opts)
|
||||
);
|
||||
};
|
||||
|
||||
case 'nomnoml': {
|
||||
const svg = render_nomnoml(code);
|
||||
return figure(post_process_nomnoml_svg(svg));
|
||||
};
|
||||
|
||||
case 'clojure:bytefield': {
|
||||
const svg = render_bytefield(code);
|
||||
return figure(post_process_bytefield_svg(svg));
|
||||
};
|
||||
|
||||
case 'pikchr': {
|
||||
const svg = pikchr(code);
|
||||
return figure(post_process_pikchr_svg(svg));
|
||||
};
|
||||
|
||||
case 'qrcode': {
|
||||
const promise = generate_qr_code(code);
|
||||
const async_binding = bind_data_async(promise);
|
||||
return figure(async_binding);
|
||||
};
|
||||
|
||||
case 'yaml:mecard': {
|
||||
const parsed = parse_yaml(code);
|
||||
const promise = generate_mecard_qr_code(parsed);
|
||||
const async_binding = bind_data_async(promise);
|
||||
return figure(async_binding);
|
||||
};
|
||||
|
||||
case 'json:vega': {
|
||||
const spec = JSON.parse(code);
|
||||
const promise = render_vega_spec(spec);
|
||||
const binding = bind_data_async(promise);
|
||||
return figure(binding);
|
||||
};
|
||||
|
||||
case 'yaml:vega': {
|
||||
const spec = parse_yaml(code);
|
||||
const promise = render_vega_spec(spec);
|
||||
const binding = bind_data_async(promise);
|
||||
return figure(binding);
|
||||
};
|
||||
|
||||
default:
|
||||
return figure(`<pre class="language-${args[0] || 'txt'}">${render_prism(code, args[0])}</pre>`);
|
||||
}
|
||||
|
||||
function render_http_with_content(code: string, lang: string) {
|
||||
// Find the first double newline
|
||||
const end_of_header = /(?:\r\n|\r|\n)(?:\r\n|\r|\n)/.exec(code);
|
||||
|
||||
// If there is no such newline, the whole content is HTTP header
|
||||
if (! end_of_header) {
|
||||
return figure(`<pre class="language-http">${render_prism(code, 'http')}</pre>`);
|
||||
}
|
||||
|
||||
const header = code.slice(0, end_of_header.index);
|
||||
const rendered_header = render_prism(header, 'http', true);
|
||||
const content = code.slice(end_of_header.index + 1);
|
||||
const rendered_content = render_prism(content, lang, true);
|
||||
|
||||
return figure(`<pre class="language-http language-${lang}">${rendered_header}\n${rendered_content}</pre>`);
|
||||
}
|
||||
|
||||
function render_prism(code: string, lang: string, include_class = false) {
|
||||
const out = highlight(code, lang);
|
||||
|
||||
if (out != null && out !== code) {
|
||||
is_escaped = true;
|
||||
code = out;
|
||||
}
|
||||
|
||||
const classname = include_class ? `class="language-${lang}"` : '';
|
||||
return `<code ${classname}>${escape(code, is_escaped)}</code>`;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const arg_pattern = /^(?:[a-zA-Z0-9_:-]+|"(?:[^"\n]|(?<=\\)")*")/;
|
||||
|
||||
function parse_code_args(text: string) {
|
||||
const args: string[] = [ ];
|
||||
|
||||
text = text.trim();
|
||||
|
||||
while (text.length) {
|
||||
const match = arg_pattern.exec(text);
|
||||
|
||||
if (! match) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (match[0][0] === '"') {
|
||||
args.push(match[0].slice(1, -1));
|
||||
}
|
||||
|
||||
else {
|
||||
args.push(match[0]);
|
||||
}
|
||||
|
||||
text = text.slice(match[0].length).trimStart();
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
function escape(str: string, is_escaped: boolean) {
|
||||
return is_escaped ? str : str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"').replace(/'/g, ''');
|
||||
}
|
||||
|
||||
const svg_text = /<text /gi
|
||||
const svg_stroke_000000 = /\bstroke="#000000"/gi;
|
||||
|
||||
// todo: css variables
|
||||
function post_process_bytefield_svg(svg: string, size?: string) {
|
||||
svg = strip_svg(svg);
|
||||
|
||||
svg = svg.replace(svg_text, '<text fill="var(--theme-text-body, currentcolor)" ');
|
||||
svg = svg.replace(svg_stroke_000000, 'stroke="var(--theme-line, currentcolor)"');
|
||||
|
||||
return svg;
|
||||
}
|
||||
|
||||
const svg_fill_33322e = /\bfill="#33322e"/gi;
|
||||
const svg_fill_eee8d5 = /\bfill="#eee8d5"/gi;
|
||||
const svg_fill_fdf6e3 = /\bfill="#fdf6e3"/gi;
|
||||
const svg_stroke_33322e = /\bstroke="#33322E"/gi;
|
||||
const svg_font_family_helvetica = /\bfont-family="helvetica"/gi;
|
||||
const svg_nomnoml_filled_arrow_head = /<g fill="#33322E">\s*<path d="([^"]+)">\s*<\/path>\s*<\/g>/gi;
|
||||
const svg_nomnoml_unfilled_arrow_head = /<path d="([^"]+)">/gi;
|
||||
|
||||
// todo: css variables
|
||||
function post_process_nomnoml_svg(svg: string, size?: string) {
|
||||
svg = strip_svg(svg);
|
||||
|
||||
// nomnoml uses some specific built-in styles for things, which we will be replacing
|
||||
// with variables (referencing the color themes) that fall back to safe defaults for
|
||||
// rendering the svg in a context that has css (like an RSS feed or other embedded
|
||||
// use case)
|
||||
|
||||
// default text font
|
||||
svg = svg.replace(svg_font_family_helvetica, 'font-family="var(--theme-open-sans, helvetica)"');
|
||||
|
||||
// root-level boxes background
|
||||
svg = svg.replace(svg_fill_eee8d5, 'fill="var(--theme-bg-light, transparent)"');
|
||||
|
||||
// outlines and relationship lines
|
||||
svg = svg.replace(svg_stroke_33322e, 'stroke="var(--theme-line, currentcolor)"');
|
||||
|
||||
// arrow heads
|
||||
svg = svg.replace(svg_nomnoml_filled_arrow_head, ($0, $1) => `<path d="${$1}" fill="var(--theme-line, currentcolor)"></path>`);
|
||||
svg = svg.replace(svg_nomnoml_unfilled_arrow_head, ($0, $1) => `<path d="${$1}" fill="none">`);
|
||||
|
||||
// text color
|
||||
svg = svg.replace(svg_fill_33322e, 'fill="var(--theme-text-body, currentcolor)"');
|
||||
|
||||
// nested boxes background
|
||||
svg = svg.replace(svg_fill_fdf6e3, 'fill="var(--theme-bg-heavy, transparent)"');
|
||||
|
||||
return svg;
|
||||
}
|
||||
|
||||
const svg_text_fill_rgb_000 = /\b<text fill="rgb\(0,0,0\)"/gi;
|
||||
const svg_fill_rgb_000 = /fill:rgb\(0,0,0\)/gi;
|
||||
const svg_stroke_rgb_000 = /stroke:rgb\(0,0,0\)/gi;
|
||||
|
||||
// todo: css variables
|
||||
function post_process_pikchr_svg(svg: string, size?: string) {
|
||||
svg = strip_svg(svg);
|
||||
|
||||
// text
|
||||
svg = svg.replace(svg_text_fill_rgb_000, '<text fill="var(--theme-text-body, currentcolor)"');
|
||||
|
||||
// arrow heads
|
||||
svg = svg.replace(svg_fill_rgb_000, 'fill:var(--theme-line, currentcolor)');
|
||||
|
||||
// lines / boxes
|
||||
svg = svg.replace(svg_stroke_rgb_000, 'stroke:var(--theme-line, currentcolor)');
|
||||
|
||||
return svg;
|
||||
}
|
37
src/section.ts
Normal file
37
src/section.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
|
||||
import { marked } from 'marked';
|
||||
import { ParsedAttributes, parse_attributes } from './attrs';
|
||||
import { MarkdownOptions } from './render';
|
||||
|
||||
export interface SectionToken extends marked.Tokens.Generic {
|
||||
text: string;
|
||||
attrs: ParsedAttributes;
|
||||
}
|
||||
|
||||
export function section_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
||||
return {
|
||||
name: 'section',
|
||||
level: 'block',
|
||||
start: (src) => src.match(/^!!!/)?.index,
|
||||
tokenizer(src, tokens) {
|
||||
const rule = /^!!!(!*)([^\n]+)?(?:\n)((?:[^!]|!!?(?!!\1))+)!!!\1/;
|
||||
const match = rule.exec(src);
|
||||
|
||||
if (match) {
|
||||
const token: SectionToken = {
|
||||
type: 'section',
|
||||
raw: match[0],
|
||||
text: match[3],
|
||||
attrs: parse_attributes(match[2] || ''),
|
||||
tokens: [ ],
|
||||
};
|
||||
|
||||
this.lexer.blockTokens(match[3], token.tokens);
|
||||
return token;
|
||||
}
|
||||
},
|
||||
renderer(token: SectionToken) {
|
||||
return `<section ${token.attrs.html_attrs.join(' ')}>${this.parser.parse(token.tokens)}</section>`;
|
||||
}
|
||||
};
|
||||
}
|
12
src/svg.ts
Normal file
12
src/svg.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
const svg_header = /^<\?xml version="1\.0" encoding="UTF-8"\?>/;
|
||||
const svg_dimensions = /^<svg[^>]* (height="[\d\.]+"\s+width="[\d\.]+"|width="[\d\.]+"\s+height="[\d\.]+")/;
|
||||
|
||||
// Removes fixed dimension attributes and meta-declaration from SVGs so we can scale them with CSS
|
||||
export function strip_svg(svg: string) {
|
||||
return svg
|
||||
.replace(svg_header, '')
|
||||
.replace(svg_dimensions, (match, dimensions) => {
|
||||
return match.slice(0, -dimensions.length);
|
||||
});
|
||||
}
|
28
src/vega/data-colors.ts
Normal file
28
src/vega/data-colors.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
// note: fallback colors come from "category10" scheme
|
||||
// https://vega.github.io/vega/docs/schemes/#category10
|
||||
// todo: css variables
|
||||
export const chart_data_colors = [
|
||||
'var(--theme-chart-data-0, #1f77b4)',
|
||||
'var(--theme-chart-data-1, #ff7f0e)',
|
||||
'var(--theme-chart-data-2, #2ca02c)',
|
||||
'var(--theme-chart-data-3, #d62728)',
|
||||
'var(--theme-chart-data-4, #9467bd)',
|
||||
'var(--theme-chart-data-5, #8c564b)',
|
||||
'var(--theme-chart-data-6, #e377c2)',
|
||||
'var(--theme-chart-data-7, #7f7f7f)',
|
||||
'var(--theme-chart-data-8, #bcbd22)',
|
||||
'var(--theme-chart-data-9, #17becf)',
|
||||
];
|
||||
|
||||
export function* chart_data_color_generator() : Generator<string, never> {
|
||||
let next = 0;
|
||||
|
||||
while (true) {
|
||||
yield chart_data_colors[next++];
|
||||
|
||||
if (next >= chart_data_colors.length) {
|
||||
next = 0;
|
||||
}
|
||||
}
|
||||
}
|
15
src/vega/index.ts
Normal file
15
src/vega/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
import { Spec, View } from 'vega';
|
||||
import { strip_svg } from '../svg';
|
||||
import { parse_vega_spec } from './vega-spec';
|
||||
|
||||
export async function render_vega_spec(spec: Spec) {
|
||||
const runtime = parse_vega_spec(spec);
|
||||
|
||||
const view = new View(runtime, { renderer: 'none' });
|
||||
let svg = await view.toSVG();
|
||||
|
||||
svg = strip_svg(svg);
|
||||
|
||||
return svg;
|
||||
}
|
39
src/vega/vega-spec.ts
Normal file
39
src/vega/vega-spec.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
|
||||
import * as vega from 'vega';
|
||||
import { chart_data_color_generator } from './data-colors';
|
||||
|
||||
// todo: css variables
|
||||
export function parse_vega_spec(spec: vega.Spec) {
|
||||
const config: vega.Config = {
|
||||
rect: {
|
||||
fill: 'var(--theme-line, currentcolor)',
|
||||
},
|
||||
text: {
|
||||
font: 'var(--font-open-sans, sans-serif)',
|
||||
fill: 'var(--theme-text-body, currentcolor)',
|
||||
},
|
||||
axis: {
|
||||
labelFont: 'var(--font-open-sans, sans-serif)',
|
||||
labelColor: 'var(--theme-text-body, currentcolor)',
|
||||
domainColor: 'var(--theme-line, currentcolor)',
|
||||
gridColor: 'var(--theme-line, currentcolor)',
|
||||
tickColor: 'var(--theme-line, currentcolor)',
|
||||
}
|
||||
};
|
||||
|
||||
if (spec.marks) {
|
||||
const colors = chart_data_color_generator();
|
||||
|
||||
for (const mark of spec.marks) {
|
||||
switch (mark.type) {
|
||||
case 'rect':
|
||||
mark.encode.enter.fill = { value: colors.next().value };
|
||||
break;
|
||||
|
||||
// todo: apply colors to more mark types
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return vega.parse(spec, config);
|
||||
}
|
Reference in New Issue
Block a user