1from collections.abc import Mapping, Sequence
2from dataclasses import dataclass
3from typing import cast, Optional
4
5from .md import md_escape, md_make_code, Renderer
6
7from markdown_it.token import Token
8
9@dataclass(kw_only=True)
10class List:
11 next_idx: Optional[int] = None
12 compact: bool
13 first_item_seen: bool = False
14
15@dataclass
16class Par:
17 indent: str
18 continuing: bool = False
19
20class CommonMarkRenderer(Renderer):
21 __output__ = "commonmark"
22
23 _parstack: list[Par]
24 _link_stack: list[str]
25 _list_stack: list[List]
26
27 def __init__(self, manpage_urls: Mapping[str, str]):
28 super().__init__(manpage_urls)
29 self._parstack = [ Par("") ]
30 self._link_stack = []
31 self._list_stack = []
32
33 def _enter_block(self, extra_indent: str) -> None:
34 self._parstack.append(Par(self._parstack[-1].indent + extra_indent))
35 def _leave_block(self) -> None:
36 self._parstack.pop()
37 self._parstack[-1].continuing = True
38 def _break(self) -> str:
39 self._parstack[-1].continuing = True
40 return f"\n{self._parstack[-1].indent}"
41 def _maybe_parbreak(self) -> str:
42 result = f"\n{self._parstack[-1].indent}" * 2 if self._parstack[-1].continuing else ""
43 self._parstack[-1].continuing = True
44 return result
45
46 def _admonition_open(self, kind: str) -> str:
47 pbreak = self._maybe_parbreak()
48 self._enter_block("")
49 return f"{pbreak}**{kind}:** "
50 def _admonition_close(self) -> str:
51 self._leave_block()
52 return ""
53
54 def _indent_raw(self, s: str) -> str:
55 if '\n' not in s:
56 return s
57 return f"\n{self._parstack[-1].indent}".join(s.splitlines())
58
59 def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
60 self._parstack[-1].continuing = True
61 return self._indent_raw(md_escape(token.content))
62 def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
63 return self._maybe_parbreak()
64 def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
65 return ""
66 def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
67 return f" {self._break()}"
68 def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
69 return self._break()
70 def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
71 self._parstack[-1].continuing = True
72 return md_make_code(token.content)
73 def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
74 return self.fence(token, tokens, i)
75 def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
76 self._parstack[-1].continuing = True
77 self._link_stack.append(cast(str, token.attrs['href']))
78 return "["
79 def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
80 return f"]({md_escape(self._link_stack.pop())})"
81 def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
82 lst = self._list_stack[-1]
83 lbreak = "" if not lst.first_item_seen else self._break() * (1 if lst.compact else 2)
84 lst.first_item_seen = True
85 head = " -"
86 if lst.next_idx is not None:
87 head = f" {lst.next_idx}."
88 lst.next_idx += 1
89 self._enter_block(" " * (len(head) + 1))
90 return f'{lbreak}{head} '
91 def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
92 self._leave_block()
93 return ""
94 def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
95 self._list_stack.append(List(compact=bool(token.meta['compact'])))
96 return self._maybe_parbreak()
97 def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
98 self._list_stack.pop()
99 return ""
100 def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
101 return "*"
102 def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
103 return "*"
104 def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
105 return "**"
106 def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
107 return "**"
108 def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
109 code = token.content
110 if code.endswith('\n'):
111 code = code[:-1]
112 pbreak = self._maybe_parbreak()
113 return pbreak + self._indent_raw(md_make_code(code, info=token.info, multiline=True))
114 def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
115 pbreak = self._maybe_parbreak()
116 self._enter_block("> ")
117 return pbreak + "> "
118 def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
119 self._leave_block()
120 return ""
121 def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
122 return self._admonition_open("Note")
123 def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
124 return self._admonition_close()
125 def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
126 return self._admonition_open("Caution")
127 def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
128 return self._admonition_close()
129 def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
130 return self._admonition_open("Important")
131 def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
132 return self._admonition_close()
133 def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
134 return self._admonition_open("Tip")
135 def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
136 return self._admonition_close()
137 def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
138 return self._admonition_open("Warning")
139 def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
140 return self._admonition_close()
141 def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
142 self._list_stack.append(List(compact=False))
143 return ""
144 def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
145 self._list_stack.pop()
146 return ""
147 def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
148 pbreak = self._maybe_parbreak()
149 self._enter_block(" ")
150 # add an opening zero-width non-joiner to separate *our* emphasis from possible
151 # emphasis in the provided term
152 return f'{pbreak} - *{chr(0x200C)}'
153 def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
154 return f"{chr(0x200C)}*"
155 def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
156 self._parstack[-1].continuing = True
157 return ""
158 def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
159 self._leave_block()
160 return ""
161 def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
162 self._parstack[-1].continuing = True
163 content = md_make_code(token.content)
164 if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
165 return f"[{content}]({url})"
166 return content # no roles in regular commonmark
167 def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
168 # there's no way we can emit attrspans correctly in all cases. we could use inline
169 # html for ids, but that would not round-trip. same holds for classes. since this
170 # renderer is only used for approximate options export and all of these things are
171 # not allowed in options we can ignore them for now.
172 return ""
173 def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
174 return ""
175 def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
176 return token.markup + " "
177 def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
178 return "\n"
179 def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
180 self._list_stack.append(
181 List(next_idx = cast(int, token.attrs.get('start', 1)),
182 compact = bool(token.meta['compact'])))
183 return self._maybe_parbreak()
184 def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
185 self._list_stack.pop()
186 return ""
187 def image(self, token: Token, tokens: Sequence[Token], i: int) -> str:
188 if title := cast(str, token.attrs.get('title', '')):
189 title = ' "' + title.replace('"', '\\"') + '"'
190 return f''