207 lines
4.7 KiB
TypeScript
207 lines
4.7 KiB
TypeScript
|
|
import { marked } from 'marked';
|
|
import { MarkdownOptions } from './render';
|
|
|
|
const footnotes = Symbol('footnotes');
|
|
|
|
// We're going to hang some extra data off of the lexer so we can reference it
|
|
// later when generating links back to references
|
|
declare module 'marked' {
|
|
export interface Lexer {
|
|
[footnotes]: Record<string, number>;
|
|
}
|
|
}
|
|
|
|
export interface FootnoteLinkToken extends marked.Tokens.Generic {
|
|
id: string;
|
|
inst: number;
|
|
}
|
|
|
|
export function footnote_ref_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
|
return {
|
|
name: 'footnote_ref',
|
|
level: 'inline',
|
|
start: (src) => src.match(/\[\^/)?.index,
|
|
tokenizer(src, tokens) {
|
|
const rule = /^\[\^([a-zA-Z0-9-\._, §]+)]/;
|
|
const match = rule.exec(src);
|
|
|
|
if (match) {
|
|
const id = match[1];
|
|
|
|
return {
|
|
type: 'footnote_ref',
|
|
raw: match[0],
|
|
id: id,
|
|
inst: next_cite_inst(this.lexer, id)
|
|
};
|
|
}
|
|
},
|
|
renderer(token: FootnoteLinkToken) {
|
|
return `<sup id="cite:ref-${token.id}-${token.inst}"><a href="#cite:note-${token.id}">[${token.id}]</a></sup>`;
|
|
}
|
|
};
|
|
}
|
|
|
|
export interface FootnoteListToken extends marked.Tokens.Generic {
|
|
text: string;
|
|
items: FootnoteToken[];
|
|
}
|
|
|
|
export interface FootnoteToken extends marked.Tokens.Generic {
|
|
id: string;
|
|
text: string;
|
|
inst_count() : number;
|
|
}
|
|
|
|
export function footnote_list_ext(renderer: marked.Renderer, opts: MarkdownOptions) : marked.TokenizerExtension & marked.RendererExtension {
|
|
return {
|
|
name: 'footnote_list',
|
|
level: 'block',
|
|
start: (src) => src.match(/^\[/)?.index,
|
|
tokenizer(src, tokens) {
|
|
const token: FootnoteListToken = {
|
|
type: 'footnote_list',
|
|
raw: '',
|
|
text: '',
|
|
items: [ ]
|
|
};
|
|
|
|
let remaining = src;
|
|
const prefix_rule = /^\[\^([a-zA-Z0-9\., _§-]+)]:/;
|
|
const whitespace_rule = /^\s*(?:\n|$)/;
|
|
|
|
if (! prefix_rule.test(src)) {
|
|
return null;
|
|
}
|
|
|
|
const items: { prefix: string, content: string }[] = [ ];
|
|
let current: { prefix: string, content: string };
|
|
|
|
function take(str: string) {
|
|
token.raw += str;
|
|
remaining = remaining.slice(str.length);
|
|
return str;
|
|
}
|
|
|
|
line_loop:
|
|
while (true) {
|
|
const prefix_match = prefix_rule.exec(remaining);
|
|
|
|
if (prefix_match) {
|
|
take(prefix_match[0]);
|
|
|
|
items.push(
|
|
current = {
|
|
prefix: prefix_match[1],
|
|
content: '',
|
|
}
|
|
);
|
|
|
|
continue line_loop;
|
|
}
|
|
|
|
const whitespace_match = whitespace_rule.exec(remaining);
|
|
|
|
if (whitespace_match) {
|
|
if (! take(whitespace_match[0])) {
|
|
break line_loop;
|
|
}
|
|
|
|
current = null;
|
|
continue line_loop;
|
|
}
|
|
|
|
if (current) {
|
|
const next_newline = remaining.indexOf('\n');
|
|
|
|
if (next_newline < 0) {
|
|
current.content += take(remaining);
|
|
break;
|
|
}
|
|
|
|
current.content += take(remaining.slice(0, next_newline + 1));
|
|
continue line_loop;
|
|
}
|
|
|
|
break;
|
|
}
|
|
|
|
token.text = token.raw.trim();
|
|
token.items = items.map(({ prefix, content }) => {
|
|
const token = {
|
|
type: 'footnote',
|
|
raw: `[^${prefix}]:${content}`,
|
|
id: prefix,
|
|
text: content,
|
|
tokens: [ ],
|
|
inst_count: () => get_cite_inst_count(this.lexer, prefix)
|
|
};
|
|
|
|
this.lexer.inline(token.text, token.tokens);
|
|
return token;
|
|
});
|
|
|
|
return token;
|
|
},
|
|
renderer(token: FootnoteListToken) {
|
|
const items = token.items.map((item) => (`
|
|
<li role="doc-footnote" id="cite:note-${item.id}">
|
|
<span class="cite-label">[${item.id}]</span>
|
|
${footnote_link_backs(item.id, item.inst_count())}
|
|
${this.parser.parseInline(item.tokens, renderer)}
|
|
</li>
|
|
`));
|
|
|
|
return `<ul role="doc-endnotes">${items.join('')}</ul>`;
|
|
}
|
|
};
|
|
}
|
|
|
|
function get_cite_inst_count(lexer: marked.Lexer, id: string) {
|
|
if (! lexer[footnotes]) {
|
|
lexer[footnotes] = { };
|
|
}
|
|
|
|
return lexer[footnotes][id] || 0;
|
|
}
|
|
|
|
function next_cite_inst(lexer: marked.Lexer, id: string) {
|
|
if (! lexer[footnotes]) {
|
|
lexer[footnotes] = { };
|
|
}
|
|
|
|
if (! lexer[footnotes][id]) {
|
|
lexer[footnotes][id] = 0;
|
|
}
|
|
|
|
return ++lexer[footnotes][id];
|
|
}
|
|
|
|
const letters = 'abcdefghijklmnopqrstuvwxyz';
|
|
|
|
function footnote_link_backs(id: string, count: number) {
|
|
if (! count) {
|
|
return '';
|
|
}
|
|
|
|
if (count === 1) {
|
|
return `<sup><a href="#cite:ref-${id}-1" title="Back to reference">^</a></sup>`;
|
|
}
|
|
|
|
// note: We're using letters for link backs; If we run out, only
|
|
// show the first 26 references
|
|
// todo: A more ideal solution would be to start using double-letters,
|
|
// like "aa", "ab", "ac", etc. after running out of single letter options
|
|
count = Math.min(count, 26);
|
|
|
|
const links: string[] = [ ];
|
|
|
|
for (let i = 0; i < count; i++) {
|
|
const letter = letters[i];
|
|
links[i] = `<a href="#cite:ref-${id}-${i + 1}" title="Back to reference ${letter}">${letter}</a>`;
|
|
}
|
|
|
|
return `<sup>^ ${links.join(' ')}</sup>`;
|
|
}
|