about summary refs log tree commit diff
path: root/pkgs/tools/nix
diff options
context:
space:
mode:
Diffstat (limited to 'pkgs/tools/nix')
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py12
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py12
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py40
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py22
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py33
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py61
6 files changed, 159 insertions, 21 deletions
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
index 4c90606ff4558..1c1e95a29ef2c 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
@@ -218,11 +218,15 @@ class DocBookRenderer(Renderer):
             result += f"<partintro{maybe_id}>"
         return result
     def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
-        if id := token.attrs.get('id'):
-            return f"<anchor xml:id={quoteattr(cast(str, id))} />"
-        return ""
+        if id := cast(str, token.attrs.get('id', '')):
+            id = f'xml:id={quoteattr(id)}' if id else ''
+        return f'<example {id}>'
     def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
-        return ""
+        return "</example>"
+    def example_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "<title>"
+    def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</title>"
 
     def _close_headings(self, level: Optional[int]) -> str:
         # we rely on markdown-it producing h{1..6} tags in token.tag for this to work
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
index 39d2da6adf8c0..ed9cd54855460 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
@@ -214,11 +214,15 @@ class HTMLRenderer(Renderer):
         self._ordered_list_nesting -= 1;
         return "</ol></div>"
     def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
-        if id := token.attrs.get('id'):
-            return f'<a id="{escape(cast(str, id), True)}" />'
-        return ""
+        if id := cast(str, token.attrs.get('id', '')):
+            id = f'id="{escape(id, True)}"' if id else ''
+        return f'<div class="example"><a {id} />'
     def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
-        return ""
+        return '</div></div><br class="example-break" />'
+    def example_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<p class="title"><strong>'
+    def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '</strong></p><div class="example-contents">'
 
     def _make_hN(self, level: int) -> tuple[str, str]:
         return f"h{min(6, max(1, level + self._hlevel_offset))}", ""
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
index 40dea3c7d1d85..1963989d53658 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
@@ -402,6 +402,18 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
         )
         if not (items := walk_and_emit(toc, toc_depth)):
             return ""
+        examples = ""
+        if toc.examples:
+            examples_entries = [
+                f'<dt>{i + 1}. <a href="{ex.target.href()}">{ex.target.toc_html}</a></dt>'
+                for i, ex in enumerate(toc.examples)
+            ]
+            examples = (
+                '<div class="list-of-examples">'
+                '<p><strong>List of Examples</strong><p>'
+                f'<dl>{"".join(examples_entries)}</dl>'
+                '</div>'
+            )
         return (
             f'<div class="toc">'
             f' <p><strong>Table of Contents</strong></p>'
@@ -409,6 +421,7 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
             f'  {"".join(items)}'
             f' </dl>'
             f'</div>'
+            f'{examples}'
         )
 
     def _make_hN(self, level: int) -> tuple[str, str]:
@@ -513,6 +526,25 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
             self._redirection_targets.add(into)
         return tokens
 
+    def _number_examples(self, tokens: Sequence[Token], start: int = 1) -> int:
+        for (i, token) in enumerate(tokens):
+            if token.type == "example_title_open":
+                title = tokens[i + 1]
+                assert title.type == 'inline' and title.children
+                # the prefix is split into two tokens because the xref title_html will want
+                # only the first of the two, but both must be rendered into the example itself.
+                title.children = (
+                    [
+                        Token('text', '', 0, content=f'Example {start}'),
+                        Token('text', '', 0, content='. ')
+                    ] + title.children
+                )
+                start += 1
+            elif token.type.startswith('included_') and token.type != 'included_options':
+                for sub, _path in token.meta['included']:
+                    start = self._number_examples(sub, start)
+        return start
+
     # xref | (id, type, heading inlines, file, starts new file)
     def _collect_ids(self, tokens: Sequence[Token], target_file: str, typ: str, file_changed: bool
                      ) -> list[XrefTarget | tuple[str, str, Token, str, bool]]:
@@ -534,6 +566,8 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
                 subtyp = bt.type.removeprefix('included_').removesuffix('s')
                 for si, (sub, _path) in enumerate(bt.meta['included']):
                     result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file)
+            elif bt.type == 'example_open' and (id := cast(str, bt.attrs.get('id', ''))):
+                result.append((id, 'example', tokens[i + 2], target_file, False))
             elif bt.type == 'inline':
                 assert bt.children
                 result += self._collect_ids(bt.children, target_file, typ, False)
@@ -558,6 +592,11 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
             title = prefix + title_html
             toc_html = f"{n}. {title_html}"
             title_html = f"Appendix&nbsp;{n}"
+        elif typ == 'example':
+            # skip the prepended `Example N. ` from _number_examples
+            toc_html, title = self._renderer.renderInline(inlines.children[2:]), title_html
+            # xref title wants only the prepended text, sans the trailing colon and space
+            title_html = self._renderer.renderInline(inlines.children[0:1])
         else:
             toc_html, title = title_html, title_html
             title_html = (
@@ -569,6 +608,7 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
         return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment)
 
     def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
+        self._number_examples(tokens)
         xref_queue = self._collect_ids(tokens, outfile.name, 'book', True)
 
         failed = False
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py
index c271ca3c5aa5f..95e6e9474e73f 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py
@@ -14,7 +14,7 @@ from .utils import Freezeable
 FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix']
 
 # in the TOC all fragments are allowed, plus the all-encompassing book.
-TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix']
+TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix', 'example']
 
 def is_include(token: Token) -> bool:
     return token.type == "fence" and token.info.startswith("{=include=} ")
@@ -124,6 +124,7 @@ class TocEntry(Freezeable):
     next: TocEntry | None = None
     children: list[TocEntry] = dc.field(default_factory=list)
     starts_new_chunk: bool = False
+    examples: list[TocEntry] = dc.field(default_factory=list)
 
     @property
     def root(self) -> TocEntry:
@@ -138,13 +139,13 @@ class TocEntry(Freezeable):
 
     @classmethod
     def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry:
-        result = cls._collect_entries(xrefs, tokens, 'book')
+        entries, examples = cls._collect_entries(xrefs, tokens, 'book')
 
         def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]:
             this.parent = parent
             return itertools.chain([this], *[ flatten_with_parent(c, this) for c in this.children ])
 
-        flat = list(flatten_with_parent(result, None))
+        flat = list(flatten_with_parent(entries, None))
         prev = flat[0]
         prev.starts_new_chunk = True
         paths_seen = set([prev.target.path])
@@ -155,32 +156,39 @@ class TocEntry(Freezeable):
                 prev = c
             paths_seen.add(c.target.path)
 
+        flat[0].examples = examples
+
         for c in flat:
             c.freeze()
 
-        return result
+        return entries
 
     @classmethod
     def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token],
-                         kind: TocEntryType) -> TocEntry:
+                         kind: TocEntryType) -> tuple[TocEntry, list[TocEntry]]:
         # we assume that check_structure has been run recursively over the entire input.
         # list contains (tag, entry) pairs that will collapse to a single entry for
         # the full sequence.
         entries: list[tuple[str, TocEntry]] = []
+        examples: list[TocEntry] = []
         for token in tokens:
             if token.type.startswith('included_') and (included := token.meta.get('included')):
                 fragment_type_str = token.type[9:].removesuffix('s')
                 assert fragment_type_str in get_args(TocEntryType)
                 fragment_type = cast(TocEntryType, fragment_type_str)
                 for fragment, _path in included:
-                    entries[-1][1].children.append(cls._collect_entries(xrefs, fragment, fragment_type))
+                    subentries, subexamples = cls._collect_entries(xrefs, fragment, fragment_type)
+                    entries[-1][1].children.append(subentries)
+                    examples += subexamples
             elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))):
                 while len(entries) > 1 and entries[-1][0] >= token.tag:
                     entries[-2][1].children.append(entries.pop()[1])
                 entries.append((token.tag,
                                 TocEntry(kind if token.tag == 'h1' else 'section', xrefs[id])))
                 token.meta['TocEntry'] = entries[-1][1]
+            elif token.type == 'example_open' and (id := cast(str, token.attrs.get('id', ''))):
+                examples.append(TocEntry('example', xrefs[id]))
 
         while len(entries) > 1:
             entries[-2][1].children.append(entries.pop()[1])
-        return entries[0][1]
+        return (entries[0][1], examples)
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py
index e8fee1b713282..ce79b0dee794d 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py
@@ -88,6 +88,8 @@ class Renderer:
             "ordered_list_close": self.ordered_list_close,
             "example_open": self.example_open,
             "example_close": self.example_close,
+            "example_title_open": self.example_title_open,
+            "example_title_close": self.example_title_close,
         }
 
         self._admonitions = {
@@ -219,6 +221,10 @@ class Renderer:
         raise RuntimeError("md token not supported", token)
     def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
+    def example_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        raise RuntimeError("md token not supported", token)
+    def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        raise RuntimeError("md token not supported", token)
 
 def _is_escaped(src: str, pos: int) -> bool:
     found = 0
@@ -417,6 +423,32 @@ def _block_attr(md: markdown_it.MarkdownIt) -> None:
 
     md.core.ruler.push("block_attr", block_attr)
 
+def _example_titles(md: markdown_it.MarkdownIt) -> None:
+    """
+    find title headings of examples and stick them into meta for renderers, then
+    remove them from the token stream. also checks whether any example contains a
+    non-title heading since those would make toc generation extremely complicated.
+    """
+    def example_titles(state: markdown_it.rules_core.StateCore) -> None:
+        in_example = [False]
+        for i, token in enumerate(state.tokens):
+            if token.type == 'example_open':
+                if state.tokens[i + 1].type == 'heading_open':
+                    assert state.tokens[i + 3].type == 'heading_close'
+                    state.tokens[i + 1].type = 'example_title_open'
+                    state.tokens[i + 3].type = 'example_title_close'
+                else:
+                    assert token.map
+                    raise RuntimeError(f"found example without title in line {token.map[0] + 1}")
+                in_example.append(True)
+            elif token.type == 'example_close':
+                in_example.pop()
+            elif token.type == 'heading_open' and in_example[-1]:
+                assert token.map
+                raise RuntimeError(f"unexpected non-title heading in example in line {token.map[0] + 1}")
+
+    md.core.ruler.push("example_titles", example_titles)
+
 TR = TypeVar('TR', bound='Renderer')
 
 class Converter(ABC, Generic[TR]):
@@ -459,6 +491,7 @@ class Converter(ABC, Generic[TR]):
         self._md.use(_heading_ids)
         self._md.use(_compact_list_attr)
         self._md.use(_block_attr)
+        self._md.use(_example_titles)
         self._md.enable(["smartquotes", "replacements"])
 
     def _parse(self, src: str) -> list[Token]:
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py
index f94ede6382bf0..fb7a4ab0117f7 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py
@@ -1,4 +1,5 @@
 import nixos_render_docs as nrd
+import pytest
 
 from markdown_it.token import Token
 
@@ -427,18 +428,38 @@ def test_admonitions() -> None:
 
 def test_example() -> None:
     c = Converter({})
-    assert c._parse("::: {.example}") == [
-        Token(type='example_open', tag='div', nesting=1, attrs={}, map=[0, 1], level=0, children=None,
+    assert c._parse("::: {.example}\n# foo") == [
+        Token(type='example_open', tag='div', nesting=1, attrs={}, map=[0, 2], level=0, children=None,
               content='', markup=':::', info=' {.example}', meta={}, block=True, hidden=False),
+        Token(type='example_title_open', tag='h1', nesting=1, attrs={}, map=[1, 2], level=1, children=None,
+              content='', markup='#', info='', meta={}, block=True, hidden=False),
+        Token(type='inline', tag='', nesting=0, attrs={}, map=[1, 2], level=2,
+              content='foo', markup='', info='', meta={}, block=True, hidden=False,
+              children=[
+                  Token(type='text', tag='', nesting=0, attrs={}, map=None, level=0, children=None,
+                        content='foo', markup='', info='', meta={}, block=False, hidden=False)
+              ]),
+        Token(type='example_title_close', tag='h1', nesting=-1, attrs={}, map=None, level=1, children=None,
+              content='', markup='#', info='', meta={}, block=True, hidden=False),
         Token(type='example_close', tag='div', nesting=-1, attrs={}, map=None, level=0, children=None,
-              content='', markup=':::', info='', meta={}, block=True, hidden=False)
+              content='', markup='', info='', meta={}, block=True, hidden=False)
     ]
-    assert c._parse("::: {#eid .example}") == [
-        Token(type='example_open', tag='div', nesting=1, attrs={'id': 'eid'}, map=[0, 1], level=0,
+    assert c._parse("::: {#eid .example}\n# foo") == [
+        Token(type='example_open', tag='div', nesting=1, attrs={'id': 'eid'}, map=[0, 2], level=0,
               children=None, content='', markup=':::', info=' {#eid .example}', meta={}, block=True,
               hidden=False),
+        Token(type='example_title_open', tag='h1', nesting=1, attrs={}, map=[1, 2], level=1, children=None,
+              content='', markup='#', info='', meta={}, block=True, hidden=False),
+        Token(type='inline', tag='', nesting=0, attrs={}, map=[1, 2], level=2,
+              content='foo', markup='', info='', meta={}, block=True, hidden=False,
+              children=[
+                  Token(type='text', tag='', nesting=0, attrs={}, map=None, level=0, children=None,
+                        content='foo', markup='', info='', meta={}, block=False, hidden=False)
+              ]),
+        Token(type='example_title_close', tag='h1', nesting=-1, attrs={}, map=None, level=1, children=None,
+              content='', markup='#', info='', meta={}, block=True, hidden=False),
         Token(type='example_close', tag='div', nesting=-1, attrs={}, map=None, level=0, children=None,
-              content='', markup=':::', info='', meta={}, block=True, hidden=False)
+              content='', markup='', info='', meta={}, block=True, hidden=False)
     ]
     assert c._parse("::: {.example .note}") == [
         Token(type='paragraph_open', tag='p', nesting=1, attrs={}, map=[0, 1], level=0, children=None,
@@ -452,3 +473,31 @@ def test_example() -> None:
         Token(type='paragraph_close', tag='p', nesting=-1, attrs={}, map=None, level=0, children=None,
               content='', markup='', info='', meta={}, block=True, hidden=False)
     ]
+    assert c._parse("::: {.example}\n### foo: `code`\nbar\n:::\nbaz") == [
+        Token(type='example_open', tag='div', nesting=1, map=[0, 3], markup=':::', info=' {.example}',
+              block=True),
+        Token(type='example_title_open', tag='h3', nesting=1, map=[1, 2], level=1, markup='###', block=True),
+        Token(type='inline', tag='', nesting=0, map=[1, 2], level=2, content='foo: `code`', block=True,
+              children=[
+                  Token(type='text', tag='', nesting=0, content='foo: '),
+                  Token(type='code_inline', tag='code', nesting=0, content='code', markup='`')
+              ]),
+        Token(type='example_title_close', tag='h3', nesting=-1, level=1, markup='###', block=True),
+        Token(type='paragraph_open', tag='p', nesting=1, map=[2, 3], level=1, block=True),
+        Token(type='inline', tag='', nesting=0, map=[2, 3], level=2, content='bar', block=True,
+              children=[
+                  Token(type='text', tag='', nesting=0, content='bar')
+              ]),
+        Token(type='paragraph_close', tag='p', nesting=-1, level=1, block=True),
+        Token(type='example_close', tag='div', nesting=-1, markup=':::', block=True),
+        Token(type='paragraph_open', tag='p', nesting=1, map=[4, 5], block=True),
+        Token(type='inline', tag='', nesting=0, map=[4, 5], level=1, content='baz', block=True,
+              children=[
+                  Token(type='text', tag='', nesting=0, content='baz')
+              ]),
+        Token(type='paragraph_close', tag='p', nesting=-1, block=True)
+    ]
+
+    with pytest.raises(RuntimeError) as exc:
+        c._parse("::: {.example}\n### foo\n### bar\n:::")
+    assert exc.value.args[0] == 'unexpected non-title heading in example in line 3'