Merge master into staging-next

Changed files
+1982 -896
doc
builders
lib
nixos
pkgs
applications
misc
video
mkvtoolnix
window-managers
phosh
development
interpreters
python
cpython
libraries
libdatovka
mpdecimal
python-modules
lsprotocol
peaqevcore
ulid-transform
games
os-specific
linux
servers
monitoring
grafana
prometheus
tools
top-level
+1
doc/builders/images/makediskimage.section.md
···
diskSize = "auto";
additionalSpace = "0M"; # Defaults to 512M.
copyChannel = false;
+
memSize = 2048; # Qemu VM memory size in megabytes. Defaults to 1024M.
}
```
+4 -1
lib/ascii-table.nix
···
-
{ " " = 32;
+
{ "\t" = 9;
+
"\n" = 10;
+
"\r" = 13;
+
" " = 32;
"!" = 33;
"\"" = 34;
"#" = 35;
+1 -1
lib/default.nix
···
escapeShellArg escapeShellArgs
isStorePath isStringLike
isValidPosixName toShellVar toShellVars
-
escapeRegex escapeXML replaceChars lowerChars
+
escapeRegex escapeURL escapeXML replaceChars lowerChars
upperChars toLower toUpper addContextFrom splitString
removePrefix removeSuffix versionOlder versionAtLeast
getName getVersion
+18 -3
lib/strings.nix
···
unsafeDiscardStringContext
;
+
asciiTable = import ./ascii-table.nix;
+
/* Concatenate a list of strings.
Type: concatStrings :: [string] -> string
···
=> 40
*/
-
charToInt = let
-
table = import ./ascii-table.nix;
-
in c: builtins.getAttr c table;
+
charToInt = c: builtins.getAttr c asciiTable;
/* Escape occurrence of the elements of `list` in `string` by
prefixing it with a backslash.
···
*/
escapeC = list: replaceStrings list (map (c: "\\x${ toLower (lib.toHexString (charToInt c))}") list);
+
+
/* Escape the string so it can be safely placed inside a URL
+
query.
+
+
Type: escapeURL :: string -> string
+
+
Example:
+
escapeURL "foo/bar baz"
+
=> "foo%2Fbar%20baz"
+
*/
+
escapeURL = let
+
unreserved = [ "A" "B" "C" "D" "E" "F" "G" "H" "I" "J" "K" "L" "M" "N" "O" "P" "Q" "R" "S" "T" "U" "V" "W" "X" "Y" "Z" "a" "b" "c" "d" "e" "f" "g" "h" "i" "j" "k" "l" "m" "n" "o" "p" "q" "r" "s" "t" "u" "v" "w" "x" "y" "z" "0" "1" "2" "3" "4" "5" "6" "7" "8" "9" "-" "_" "." "~" ];
+
toEscape = builtins.removeAttrs asciiTable unreserved;
+
in
+
replaceStrings (builtins.attrNames toEscape) (lib.mapAttrsToList (_: c: "%${fixedWidthString 2 "0" (lib.toHexString c)}") toEscape);
/* Quote string to be used safely within the Bourne shell.
+9
lib/tests/misc.nix
···
expected = "Hello\\x20World";
};
+
testEscapeURL = testAllTrue [
+
("" == strings.escapeURL "")
+
("Hello" == strings.escapeURL "Hello")
+
("Hello%20World" == strings.escapeURL "Hello World")
+
("Hello%2FWorld" == strings.escapeURL "Hello/World")
+
("42%25" == strings.escapeURL "42%")
+
("%20%3F%26%3D%23%2B%25%21%3C%3E%23%22%7B%7D%7C%5C%5E%5B%5D%60%09%3A%2F%40%24%27%28%29%2A%2C%3B" == strings.escapeURL " ?&=#+%!<>#\"{}|\\^[]`\t:/@$'()*,;")
+
];
+
testToInt = testAllTrue [
# Naive
(123 == toInt "123")
+61 -29
nixos/doc/manual/default.nix
···
}
'';
+
prepareManualFromMD = ''
+
cp -r --no-preserve=all $inputs/* .
+
+
substituteInPlace ./manual.md \
+
--replace '@NIXOS_VERSION@' "${version}"
+
substituteInPlace ./configuration/configuration.md \
+
--replace \
+
'@MODULE_CHAPTERS@' \
+
${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)}
+
substituteInPlace ./nixos-options.md \
+
--replace \
+
'@NIXOS_OPTIONS_JSON@' \
+
${optionsDoc.optionsJSON}/share/doc/nixos/options.json
+
substituteInPlace ./development/writing-nixos-tests.section.md \
+
--replace \
+
'@NIXOS_TEST_OPTIONS_JSON@' \
+
${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json
+
'';
+
manual-combined = runCommand "nixos-manual-combined"
{ inputs = lib.sourceFilesBySuffices ./. [ ".xml" ".md" ];
nativeBuildInputs = [ pkgs.nixos-render-docs pkgs.libxml2.bin pkgs.libxslt.bin ];
meta.description = "The NixOS manual as plain docbook XML";
}
''
-
cp -r --no-preserve=all $inputs/* .
-
-
substituteInPlace ./manual.md \
-
--replace '@NIXOS_VERSION@' "${version}"
-
substituteInPlace ./configuration/configuration.md \
-
--replace \
-
'@MODULE_CHAPTERS@' \
-
${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)}
-
substituteInPlace ./nixos-options.md \
-
--replace \
-
'@NIXOS_OPTIONS_JSON@' \
-
${optionsDoc.optionsJSON}/share/doc/nixos/options.json
-
substituteInPlace ./development/writing-nixos-tests.section.md \
-
--replace \
-
'@NIXOS_TEST_OPTIONS_JSON@' \
-
${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json
+
${prepareManualFromMD}
nixos-render-docs -j $NIX_BUILD_CORES manual docbook \
--manpage-urls ${manpageUrls} \
···
# Generate the NixOS manual.
manualHTML = runCommand "nixos-manual-html"
-
{ nativeBuildInputs = [ buildPackages.libxml2.bin buildPackages.libxslt.bin ];
+
{ nativeBuildInputs =
+
if allowDocBook then [
+
buildPackages.libxml2.bin
+
buildPackages.libxslt.bin
+
] else [
+
buildPackages.nixos-render-docs
+
];
+
inputs = lib.optionals (! allowDocBook) (lib.sourceFilesBySuffices ./. [ ".md" ]);
meta.description = "The NixOS manual in HTML format";
allowedReferences = ["out"];
}
···
# Generate the HTML manual.
dst=$out/share/doc/nixos
mkdir -p $dst
-
xsltproc \
-
${manualXsltprocOptions} \
-
--stringparam id.warnings "1" \
-
--nonet --output $dst/ \
-
${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \
-
${manual-combined}/manual-combined.xml \
-
|& tee xsltproc.out
-
grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false
-
rm xsltproc.out
-
-
mkdir -p $dst/images/callouts
-
cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/
cp ${../../../doc/style.css} $dst/style.css
cp ${../../../doc/overrides.css} $dst/overrides.css
cp -r ${pkgs.documentation-highlighter} $dst/highlightjs
+
+
${if allowDocBook then ''
+
xsltproc \
+
${manualXsltprocOptions} \
+
--stringparam id.warnings "1" \
+
--nonet --output $dst/ \
+
${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \
+
${manual-combined}/manual-combined.xml \
+
|& tee xsltproc.out
+
grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false
+
rm xsltproc.out
+
+
mkdir -p $dst/images/callouts
+
cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/
+
'' else ''
+
${prepareManualFromMD}
+
+
# TODO generator is set like this because the docbook/md manual compare workflow will
+
# trigger if it's different
+
nixos-render-docs -j $NIX_BUILD_CORES manual html \
+
--manpage-urls ${manpageUrls} \
+
--revision ${lib.escapeShellArg revision} \
+
--generator "DocBook XSL Stylesheets V${docbook_xsl_ns.version}" \
+
--stylesheet style.css \
+
--stylesheet overrides.css \
+
--stylesheet highlightjs/mono-blue.css \
+
--script ./highlightjs/highlight.pack.js \
+
--script ./highlightjs/loader.js \
+
--toc-depth 1 \
+
--chunk-toc-depth 1 \
+
./manual.md \
+
$dst/index.html
+
''}
mkdir -p $out/nix-support
echo "nix-build out $out" >> $out/nix-support/hydra-build-products
+4 -1
nixos/doc/manual/manual.md
···
contributing-to-this-manual.chapter.md
```
-
```{=include=} appendix
+
```{=include=} appendix html:into-file=//options.html
nixos-options.md
+
```
+
+
```{=include=} appendix html:into-file=//release-notes.html
release-notes/release-notes.md
```
+2
nixos/doc/manual/release-notes/rl-2305.section.md
···
- NixOS now defaults to using nsncd (a non-caching reimplementation in Rust) as NSS lookup dispatcher, instead of the buggy and deprecated glibc-provided nscd. If you need to switch back, set `services.nscd.enableNsncd = false`, but please open an issue in nixpkgs so your issue can be fixed.
+
- `services.borgmatic` now allows for multiple configurations, placed in `/etc/borgmatic.d/`, you can define them with `services.borgmatic.configurations`.
+
- The `dnsmasq` service now takes configuration via the
`services.dnsmasq.settings` attribute set. The option
`services.dnsmasq.extraConfig` will be deprecated when NixOS 22.11 reaches
+4 -1
nixos/lib/make-disk-image.nix
···
, # Shell code executed after the VM has finished.
postVM ? ""
+
, # Guest memory size
+
memSize ? 1024
+
, # Copy the contents of the Nix store to the root of the image and
# skip further setup. Incompatible with `contents`,
# `installBootLoader` and `configFile`.
···
"-drive if=pflash,format=raw,unit=1,file=$efiVars"
]
);
-
memSize = 1024;
+
inherit memSize;
} ''
export PATH=${binPath}:$PATH
+4
nixos/lib/make-multi-disk-zfs-image.nix
···
, # Shell code executed after the VM has finished.
postVM ? ""
+
, # Guest memory size
+
memSize ? 1024
+
, name ? "nixos-disk-image"
, # Disk image format, one of qcow2, qcow2-compressed, vdi, vpc, raw.
···
{
QEMU_OPTS = "-drive file=$bootDiskImage,if=virtio,cache=unsafe,werror=report"
+ " -drive file=$rootDiskImage,if=virtio,cache=unsafe,werror=report";
+
inherit memSize;
preVM = ''
PATH=$PATH:${pkgs.qemu_kvm}/bin
mkdir $out
+52 -34
nixos/modules/services/backup/borgmatic.nix
···
let
cfg = config.services.borgmatic;
settingsFormat = pkgs.formats.yaml { };
+
+
cfgType = with types; submodule {
+
freeformType = settingsFormat.type;
+
options.location = {
+
source_directories = mkOption {
+
type = listOf str;
+
description = mdDoc ''
+
List of source directories to backup (required). Globs and
+
tildes are expanded.
+
'';
+
example = [ "/home" "/etc" "/var/log/syslog*" ];
+
};
+
repositories = mkOption {
+
type = listOf str;
+
description = mdDoc ''
+
Paths to local or remote repositories (required). Tildes are
+
expanded. Multiple repositories are backed up to in
+
sequence. Borg placeholders can be used. See the output of
+
"borg help placeholders" for details. See ssh_command for
+
SSH options like identity file or port. If systemd service
+
is used, then add local repository paths in the systemd
+
service file to the ReadWritePaths list.
+
'';
+
example = [
+
"ssh://user@backupserver/./sourcehostname.borg"
+
"ssh://user@backupserver/./{fqdn}"
+
"/var/local/backups/local.borg"
+
];
+
};
+
};
+
};
+
cfgfile = settingsFormat.generate "config.yaml" cfg.settings;
-
in {
+
in
+
{
options.services.borgmatic = {
-
enable = mkEnableOption (lib.mdDoc "borgmatic");
+
enable = mkEnableOption (mdDoc "borgmatic");
settings = mkOption {
-
description = lib.mdDoc ''
+
description = mdDoc ''
See https://torsion.org/borgmatic/docs/reference/configuration/
'';
-
type = types.submodule {
-
freeformType = settingsFormat.type;
-
options.location = {
-
source_directories = mkOption {
-
type = types.listOf types.str;
-
description = lib.mdDoc ''
-
List of source directories to backup (required). Globs and
-
tildes are expanded.
-
'';
-
example = [ "/home" "/etc" "/var/log/syslog*" ];
-
};
-
repositories = mkOption {
-
type = types.listOf types.str;
-
description = lib.mdDoc ''
-
Paths to local or remote repositories (required). Tildes are
-
expanded. Multiple repositories are backed up to in
-
sequence. Borg placeholders can be used. See the output of
-
"borg help placeholders" for details. See ssh_command for
-
SSH options like identity file or port. If systemd service
-
is used, then add local repository paths in the systemd
-
service file to the ReadWritePaths list.
-
'';
-
example = [
-
"user@backupserver:sourcehostname.borg"
-
"user@backupserver:{fqdn}"
-
];
-
};
-
};
-
};
+
default = null;
+
type = types.nullOr cfgType;
+
};
+
+
configurations = mkOption {
+
description = mdDoc ''
+
Set of borgmatic configurations, see https://torsion.org/borgmatic/docs/reference/configuration/
+
'';
+
default = { };
+
type = types.attrsOf cfgType;
};
};
···
environment.systemPackages = [ pkgs.borgmatic ];
-
environment.etc."borgmatic/config.yaml".source = cfgfile;
+
environment.etc = (optionalAttrs (cfg.settings != null) { "borgmatic/config.yaml".source = cfgfile; }) //
+
mapAttrs'
+
(name: value: nameValuePair
+
"borgmatic.d/${name}.yaml"
+
{ source = settingsFormat.generate "${name}.yaml" value; })
+
cfg.configurations;
systemd.packages = [ pkgs.borgmatic ];
-
};
}
+2 -2
nixos/modules/services/web-apps/akkoma.md
···
{option}`services.systemd.akkoma.serviceConfig.BindPaths` and
{option}`services.systemd.akkoma.serviceConfig.BindReadOnlyPaths` permit access to outside paths
through bind mounts. Refer to
-
[{manpage}`systemd.exec(5)`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=)
-
for details.
+
[`BindPaths=`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=)
+
of {manpage}`systemd.exec(5)` for details.
### Distributed deployment {#modules-services-akkoma-distributed-deployment}
+1 -1
nixos/modules/system/boot/networkd.nix
···
Extra command-line arguments to pass to systemd-networkd-wait-online.
These also affect per-interface `systemd-network-wait-online@` services.
-
See [{manpage}`systemd-networkd-wait-online.service(8)`](https://www.freedesktop.org/software/systemd/man/systemd-networkd-wait-online.service.html) for all available options.
+
See {manpage}`systemd-networkd-wait-online.service(8)` for all available options.
'';
type = with types; listOf str;
default = [];
+1 -1
nixos/tests/gitea.nix
···
{ system ? builtins.currentSystem,
config ? {},
-
giteaPackage,
+
giteaPackage ? pkgs.gitea,
pkgs ? import ../.. { inherit system config; }
}:
+1 -1
nixos/tests/phosh.nix
···
in {
name = "phosh";
meta = with pkgs.lib.maintainers; {
-
maintainers = [ zhaofengli ];
+
maintainers = [ tomfitzhenry zhaofengli ];
};
nodes = {
+3 -3
pkgs/applications/misc/hugo/default.nix
···
buildGoModule rec {
pname = "hugo";
-
version = "0.110.0";
+
version = "0.111.1";
src = fetchFromGitHub {
owner = "gohugoio";
repo = pname;
rev = "v${version}";
-
hash = "sha256-7B0C8191lUGsv81+0eKDrBm+5hLlFjID3RTuajSg/RM=";
+
hash = "sha256-3bg7cmM05ekR5gtJCEJk3flplw8MRc9hVqlZx3ZUIaw=";
};
-
vendorHash = "sha256-GtywXjtAF5Q4jUz2clfseUJVqiU+eSguG/ZoKy2TzuA=";
+
vendorHash = "sha256-xiysjJi3bL0xIoEEo7xXQbznFzwKJrCT6l/bxEbDRUI=";
doCheck = false;
+2 -2
pkgs/applications/misc/mwic/default.nix
···
{ lib, stdenv, fetchurl, pythonPackages }:
stdenv.mkDerivation rec {
-
version = "0.7.9";
+
version = "0.7.10";
pname = "mwic";
src = fetchurl {
url = "https://github.com/jwilk/mwic/releases/download/${version}/${pname}-${version}.tar.gz";
-
sha256 = "sha256-i7DSvUBUMOvn2aYpwYOCDHKq0nkleknD7k2xopo+C5s=";
+
sha256 = "sha256-dmIHPehkxpSb78ymVpcPCu4L41coskrHQOg067dprOo=";
};
makeFlags=["PREFIX=\${out}"];
+1 -1
pkgs/applications/misc/phoc/default.nix
···
description = "Wayland compositor for mobile phones like the Librem 5";
homepage = "https://gitlab.gnome.org/World/Phosh/phoc";
license = licenses.gpl3Plus;
-
maintainers = with maintainers; [ masipcat zhaofengli ];
+
maintainers = with maintainers; [ masipcat tomfitzhenry zhaofengli ];
platforms = platforms.linux;
};
}
+2 -2
pkgs/applications/video/mkvtoolnix/default.nix
···
in
stdenv.mkDerivation rec {
pname = "mkvtoolnix";
-
version = "73.0.0";
+
version = "74.0.0";
src = fetchFromGitLab {
owner = "mbunkus";
repo = "mkvtoolnix";
rev = "release-${version}";
-
sha256 = "HGoT3t/ooRMiyjUkHnvVGOB04IU5U8VEKDixhE57kR8=";
+
sha256 = "sha256-p8rIAHSqYCOlNbuxisQlIkMh2OArc+MOYn1kgC5kJsc=";
};
nativeBuildInputs = [
+1 -1
pkgs/applications/window-managers/phosh/default.nix
···
homepage = "https://gitlab.gnome.org/World/Phosh/phosh";
changelog = "https://gitlab.gnome.org/World/Phosh/phosh/-/blob/v${version}/debian/changelog";
license = licenses.gpl3Plus;
-
maintainers = with maintainers; [ masipcat zhaofengli ];
+
maintainers = with maintainers; [ masipcat tomfitzhenry zhaofengli ];
platforms = platforms.linux;
};
}
+18 -4
pkgs/development/interpreters/python/cpython/default.nix
···
, libxcrypt
, self
, configd
+
, darwin
, autoreconfHook
, autoconf-archive
, pkg-config
···
, stripBytecode ? true
, includeSiteCustomize ? true
, static ? stdenv.hostPlatform.isStatic
+
, enableFramework ? false
, enableOptimizations ? false
# enableNoSemanticInterposition is a subset of the enableOptimizations flag that doesn't harm reproducibility.
# clang starts supporting `-fno-sematic-interposition` with version 10
···
&& libX11 != null;
assert bluezSupport -> bluez != null;
+
+
assert enableFramework -> stdenv.isDarwin;
assert lib.assertMsg (reproducibleBuild -> stripBytecode)
"Deterministic builds require stripping bytecode.";
···
buildPackages = pkgsBuildHost;
inherit (passthru) pythonForBuild;
+
inherit (darwin.apple_sdk.frameworks) Cocoa;
+
tzdataSupport = tzdata != null && passthru.pythonAtLeast "3.9";
passthru = let
···
++ optionals x11Support [ tcl tk libX11 xorgproto ]
++ optionals (bluezSupport && stdenv.isLinux) [ bluez ]
++ optionals stdenv.isDarwin [ configd ])
+
+
++ optionals enableFramework [ Cocoa ]
++ optionals tzdataSupport [ tzdata ]; # `zoneinfo` module
hasDistutilsCxxPatch = !(stdenv.cc.isGNU or false);
···
"--without-ensurepip"
"--with-system-expat"
"--with-system-ffi"
-
] ++ optionals (!static) [
+
] ++ optionals (!static && !enableFramework) [
"--enable-shared"
+
] ++ optionals enableFramework [
+
"--enable-framework=${placeholder "out"}/Library/Frameworks"
] ++ optionals enableOptimizations [
"--enable-optimizations"
] ++ optionals enableLTO [
···
] ++ optionals tzdataSupport [
tzdata
]);
-
in ''
+
in lib.optionalString enableFramework ''
+
for dir in include lib share; do
+
ln -s $out/Library/Frameworks/Python.framework/Versions/Current/$dir $out/$dir
+
done
+
'' + ''
# needed for some packages, especially packages that backport functionality
# to 2.x from 3.x
for item in $out/lib/${libPrefix}/test/*; do
···
# Enforce that we don't have references to the OpenSSL -dev package, which we
# explicitly specify in our configure flags above.
disallowedReferences =
-
lib.optionals (openssl' != null && !static) [ openssl'.dev ]
+
lib.optionals (openssl' != null && !static && !enableFramework) [ openssl'.dev ]
++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
# Ensure we don't have references to build-time packages.
# These typically end up in shebangs.
···
high level dynamic data types.
'';
license = licenses.psfl;
-
platforms = with platforms; linux ++ darwin;
+
platforms = platforms.linux ++ platforms.darwin;
maintainers = with maintainers; [ fridh ];
};
}
+2 -2
pkgs/development/libraries/libdatovka/default.nix
···
stdenv.mkDerivation rec {
pname = "libdatovka";
-
version = "0.2.1";
+
version = "0.3.0";
src = fetchurl {
url = "https://gitlab.nic.cz/datovka/libdatovka/-/archive/v${version}/libdatovka-v${version}.tar.gz";
-
sha256 = "sha256-687d8ZD9zfMeo62YWCW5Kc0CXkKClxtbbwXR51pPwBE=";
+
sha256 = "sha256-aG7U8jP3pvOeFDetYVOx+cE78ys0uSkKNjSgB09ste8=";
};
patches = [
+9 -1
pkgs/development/libraries/mpdecimal/default.nix
···
stdenv.mkDerivation rec {
pname = "mpdecimal";
version = "2.5.1";
-
outputs = [ "out" "doc" ];
+
outputs = [ "out" "cxx" "doc" "dev" ];
src = fetchurl {
url = "https://www.bytereef.org/software/mpdecimal/releases/mpdecimal-${version}.tar.gz";
···
};
configureFlags = [ "LD=${stdenv.cc.targetPrefix}cc" ];
+
+
postInstall = ''
+
mkdir -p $cxx/lib
+
mv $out/lib/*c++* $cxx/lib
+
+
mkdir -p $dev/nix-support
+
echo -n $cxx >> $dev/nix-support/propagated-build-inputs
+
'';
meta = {
description = "Library for arbitrary precision decimal floating point arithmetic";
+2 -2
pkgs/development/python-modules/lsprotocol/default.nix
···
buildPythonPackage rec {
pname = "lsprotocol";
-
version = "2022.0.0a9";
+
version = "2022.0.0a10";
format = "pyproject";
disabled = pythonOlder "3.7";
···
owner = "microsoft";
repo = pname;
rev = "refs/tags/${version}";
-
hash = "sha256-6XecPKuBhwtkmZrGozzO+VEryI5wwy9hlvWE1oV6ajk=";
+
hash = "sha256-IAFNEWpBRVAGcJNIV1bog9K2nANRw/qJfCJ9+Wu/yJc=";
};
nativeBuildInputs = [
+2 -2
pkgs/development/python-modules/peaqevcore/default.nix
···
buildPythonPackage rec {
pname = "peaqevcore";
-
version = "12.2.1";
+
version = "12.2.6";
format = "setuptools";
disabled = pythonOlder "3.7";
src = fetchPypi {
inherit pname version;
-
hash = "sha256-WOuKGVrNZzvY7F0Mvj3MjSdTu47c5Y11ySe1qorzlWE=";
+
hash = "sha256-IAqXp/d0f1khhNpkp4uQmxqJ4Xh8Nl87i+iMa3U9EDM=";
};
postPatch = ''
+51
pkgs/development/python-modules/ulid-transform/default.nix
···
+
{ lib
+
, cython
+
, buildPythonPackage
+
, fetchFromGitHub
+
, poetry-core
+
, pytestCheckHook
+
, pythonOlder
+
, setuptools
+
}:
+
+
buildPythonPackage rec {
+
pname = "ulid-transform";
+
version = "0.4.0";
+
format = "pyproject";
+
+
disabled = pythonOlder "3.9";
+
+
src = fetchFromGitHub {
+
owner = "bdraco";
+
repo = pname;
+
rev = "refs/tags/v${version}";
+
hash = "sha256-JuTIE8FAVZkfn+byJ1z9/ep9Oih1uXpz/QTB2OfM0WU=";
+
};
+
+
nativeBuildInputs = [
+
cython
+
poetry-core
+
setuptools
+
];
+
+
nativeCheckInputs = [
+
pytestCheckHook
+
];
+
+
postPatch = ''
+
substituteInPlace pyproject.toml \
+
--replace " --cov=ulid_transform --cov-report=term-missing:skip-covered" ""
+
'';
+
+
pythonImportsCheck = [
+
"ulid_transform"
+
];
+
+
meta = with lib; {
+
description = "Library to create and transform ULIDs";
+
homepage = "https://github.com/bdraco/ulid-transform";
+
changelog = "https://github.com/bdraco/ulid-transform/releases/tag/v${version}";
+
license = with licenses; [ mit ];
+
maintainers = with maintainers; [ fab ];
+
};
+
}
+2 -2
pkgs/games/osu-lazer/bin.nix
···
appimageTools.wrapType2 rec {
pname = "osu-lazer-bin";
-
version = "2023.207.0";
+
version = "2023.301.0";
src = fetchurl {
url = "https://github.com/ppy/osu/releases/download/${version}/osu.AppImage";
-
sha256 = "sha256-xJQcqNV/Pr3gEGStczc3gv8AYrEKFsAo2g4WtA59fwk=";
+
sha256 = "sha256-0c74bGOY9f2K52xE7CZy/i3OfyCC+a6XGI30c6hI7jM=";
};
extraPkgs = pkgs: with pkgs; [ icu ];
+2 -2
pkgs/games/osu-lazer/default.nix
···
buildDotnetModule rec {
pname = "osu-lazer";
-
version = "2023.207.0";
+
version = "2023.301.0";
src = fetchFromGitHub {
owner = "ppy";
repo = "osu";
rev = version;
-
sha256 = "sha256-s0gzSfj4+xk3joS7S68ZGjgatiJY2Y1FBCmrhptaWIk=";
+
sha256 = "sha256-SUVxe3PdUch8NYR7X4fatbmSpyYewI69usBDICcSq3s=";
};
projectFile = "osu.Desktop/osu.Desktop.csproj";
+41 -40
pkgs/games/osu-lazer/deps.nix
···
# Please dont edit it manually, your changes might get overwritten!
{ fetchNuGet }: [
-
(fetchNuGet { pname = "AutoMapper"; version = "11.0.1"; sha256 = "1z1x5c1dkwk6142km5q6jglhpq9x82alwjjy5a72c8qnq9ppdfg3"; })
+
(fetchNuGet { pname = "AutoMapper"; version = "12.0.1"; sha256 = "0s0wjl4ck3sal8a50x786wxs9mbca7bxaqk3558yx5wpld4h4z3b"; })
(fetchNuGet { pname = "Clowd.Squirrel"; version = "2.9.42"; sha256 = "1xxrr9jmgn343d467nz40569mkybinnmxaxyc4fhgy6yddvzk1y0"; })
(fetchNuGet { pname = "DiffPlex"; version = "1.7.1"; sha256 = "1q78r70pirgb7j5wkh454ws237lihh0fig212cpbj02cz53c2h6j"; })
-
(fetchNuGet { pname = "DiscordRichPresence"; version = "1.1.1.14"; sha256 = "18adkrddjlci5ajs17ck1c8cd8id3cgjylqvfggyqwrmsh7yr4j6"; })
+
(fetchNuGet { pname = "DiscordRichPresence"; version = "1.1.3.18"; sha256 = "0p4bhaggjjfd4gl06yiphqgncxgcq2bws4sjkrw0n2ldf3hgrps3"; })
(fetchNuGet { pname = "FFmpeg.AutoGen"; version = "4.3.0.1"; sha256 = "0n6x57mnnvcjnrs8zyvy07h5zm4bcfy9gh4n4bvd9fx5ys4pxkvv"; })
(fetchNuGet { pname = "Fody"; version = "6.6.4"; sha256 = "1hhdwj0ska7dvak9hki8cnyfmmw5r8yw8w24gzsdwhqx68dnrvsx"; })
(fetchNuGet { pname = "HidSharpCore"; version = "1.2.1.1"; sha256 = "1zkndglmz0s8rblfhnqcvv90rkq2i7lf4bc380g7z8h1avf2ikll"; })
···
(fetchNuGet { pname = "JetBrains.Annotations"; version = "2021.3.0"; sha256 = "01ssylllbwpana2w3iybi533zlvcsbhzjc8kr0g4kg307kjbfn8v"; })
(fetchNuGet { pname = "managed-midi"; version = "1.10.0"; sha256 = "1rih8iq8k4j6n3206d2j7z4vygp725kzs95c6yc7p1mlhfiiimvq"; })
(fetchNuGet { pname = "Markdig"; version = "0.23.0"; sha256 = "1bwn885w7balwncmr764vidyyp9bixqlq6r3lhsapj8ykrpxxa70"; })
-
(fetchNuGet { pname = "MessagePack"; version = "2.4.35"; sha256 = "0y8pz073ync51cv39lxldc797nmcm39r4pdhy2il6r95rppjqg5h"; })
-
(fetchNuGet { pname = "MessagePack.Annotations"; version = "2.4.35"; sha256 = "1jny2r6rwq7xzwymm779w9x8a5rhyln97mxzplxwd53wwbb0wbzd"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.Connections.Abstractions"; version = "6.0.10"; sha256 = "1wic0bghgwg2r8q676miv3kk7ph5g46kvkw1iljr4b8s58mqbwas"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Client"; version = "6.0.10"; sha256 = "1a8m44qgjwfhmqpfsyyb1hgak3sh99s62hnfmphxsflfvx611mbb"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Common"; version = "6.0.10"; sha256 = "0vqc62xjiwlqwifx3nj0nwssjrdqka2avpqiiwylsbd48s1ahxdy"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client"; version = "6.0.10"; sha256 = "090ggwxv2j86hkmnzqxa728wpn5g30dfqd05widhd7n1m51igq71"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client.Core"; version = "6.0.10"; sha256 = "13i22fkai420fvr71c3pfnadspcv8jpf5bci9fn3yh580bfqw21a"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Common"; version = "6.0.10"; sha256 = "0kmy2h310hqpr6bgd128r4q7ny4i7qjfvgrv1swhqv2j9n1yriby"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.Json"; version = "6.0.10"; sha256 = "13q429kwbijyfgpb4dp04lr2c691ra5br5wf8g7s260pij10x1nz"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.MessagePack"; version = "6.0.10"; sha256 = "068gw5q25yaf5k5c96kswmna1jixpw6s82r7gmgnw54rcc8gdz3f"; })
-
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.NewtonsoftJson"; version = "6.0.10"; sha256 = "1k7jvvvz8wwbd1bw1shcgrgz2gw3l877krhw39b9sj2vbwzc8bn7"; })
-
(fetchNuGet { pname = "Microsoft.CodeAnalysis.BannedApiAnalyzers"; version = "3.3.3"; sha256 = "1z6x0d8lpcfjr3sxy25493i17vvcg5bsay6c03qan6mnj5aqzw2k"; })
+
(fetchNuGet { pname = "MessagePack"; version = "2.4.59"; sha256 = "13igx5m5hkqqyhyw04z2nwfxn2jwlrpvvwx4c8qrayv9j4l31ajm"; })
+
(fetchNuGet { pname = "MessagePack.Annotations"; version = "2.4.59"; sha256 = "1y8mg95x87jddk0hyf58cc1zy666mqbla7479njkm7kmpwz61s8c"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.Connections.Abstractions"; version = "7.0.2"; sha256 = "1k5gjiwmcrbwfz54jafz6mmf4md7jgk3j8jdpp9ax72glwa7ia4a"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Client"; version = "7.0.2"; sha256 = "0rnra67gkg0qs7wys8bacm1raf9khb688ch2yr56m88kwdk5bhw4"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Common"; version = "7.0.2"; sha256 = "19dviyc68m56mmy05lylhp2bxvww2gqx1y07kc0yqp61rcjb1d85"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client"; version = "7.0.2"; sha256 = "0ms9syxlxk6f5pxjw23s2cz4ld60vk84v67l0bhnnb8v42rz97nn"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client.Core"; version = "7.0.2"; sha256 = "15qs3pdji2sd629as4i8zd5bjbs165waim9jypxqjkb55bslz8d7"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Common"; version = "7.0.2"; sha256 = "0c3ia03m1shc2xslqln5m986kpvc1dqb15j85vqxbzb0jj6fr52y"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.Json"; version = "7.0.2"; sha256 = "028r8sk5dlxkfxw6wz2ys62rm9dqa85s6rfhilrfy1phsl47rkal"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.MessagePack"; version = "7.0.2"; sha256 = "1zkznsq5r7gg2pnlj9y7swrbvzyywf6q5xf9ggcwbvccwp0g6jr4"; })
+
(fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.NewtonsoftJson"; version = "7.0.2"; sha256 = "1x5pymqc315nb8z2414dvqdpcfd5zy5slcfa9b3vjhrbbbngaly7"; })
+
(fetchNuGet { pname = "Microsoft.CodeAnalysis.BannedApiAnalyzers"; version = "3.3.4"; sha256 = "1vzrni7n94f17bzc13lrvcxvgspx9s25ap1p005z6i1ikx6wgx30"; })
(fetchNuGet { pname = "Microsoft.CSharp"; version = "4.5.0"; sha256 = "01i28nvzccxbqmiz217fxs6hnjwmd5fafs37rd49a6qp53y6623l"; })
(fetchNuGet { pname = "Microsoft.CSharp"; version = "4.7.0"; sha256 = "0gd67zlw554j098kabg887b5a6pq9kzavpa3jjy5w53ccjzjfy8j"; })
-
(fetchNuGet { pname = "Microsoft.Data.Sqlite.Core"; version = "6.0.10"; sha256 = "1sdh5rw2pyg6c64z0haxf57bakd5kwaav624vlqif1m59iz26rag"; })
+
(fetchNuGet { pname = "Microsoft.Data.Sqlite.Core"; version = "7.0.2"; sha256 = "0xipbci6pshj825a1r8nlc19hf26n4ba33sx7dbx727ja5lyjv8m"; })
(fetchNuGet { pname = "Microsoft.Diagnostics.NETCore.Client"; version = "0.2.61701"; sha256 = "1ic1607jj4ln8dbibf1fz5v9svk9x2kqlgvhndc6ijaqnbc4wcr1"; })
(fetchNuGet { pname = "Microsoft.Diagnostics.Runtime"; version = "2.0.161401"; sha256 = "02qcm8nv1ch07g8b0i60ynrjn33b8y5ivyk4rxal3vd9zfi6pvwi"; })
(fetchNuGet { pname = "Microsoft.DotNet.PlatformAbstractions"; version = "2.0.3"; sha256 = "020214swxm0hip1d9gjskrzmqzjnji7c6l5b3xcch8vp166066m9"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.Configuration.Abstractions"; version = "6.0.0"; sha256 = "0w6wwxv12nbc3sghvr68847wc9skkdgsicrz3fx4chgng1i3xy0j"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.Configuration.Abstractions"; version = "7.0.0"; sha256 = "1as8cygz0pagg17w22nsf6mb49lr2mcl1x8i3ad1wi8lyzygy1a3"; })
(fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "6.0.0-rc.1.21451.13"; sha256 = "0r6945jq7c2f1wjifq514zvngicndjqfnsjya6hqw0yzah0jr56c"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "6.0.1"; sha256 = "0kl5ypidmzllyxb91gwy3z950dc416p1y8wikzbdbp0l7aaaxq2p"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "6.0.0"; sha256 = "1vi67fw7q99gj7jd64gnnfr4d2c0ijpva7g9prps48ja6g91x6a9"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "7.0.0"; sha256 = "121zs4jp8iimgbpzm3wsglhjwkc06irg1pxy8c1zcdlsg34cfq1p"; })
(fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "6.0.0-rc.1.21451.13"; sha256 = "11dg16x6g0gssb143qpghxz1s41himvhr7yhjwxs9hacx4ij2dm1"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "7.0.0"; sha256 = "181d7mp9307fs17lyy42f8cxnjwysddmpsalky4m0pqxcimnr6g7"; })
(fetchNuGet { pname = "Microsoft.Extensions.DependencyModel"; version = "2.0.3"; sha256 = "0dpyjp0hy9kkvk2dd4dclfmb10yq5avsw2a6v8nra9g6ii2p1nla"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.Features"; version = "6.0.10"; sha256 = "10avgg7c4iggq3i7gba0srd01fip637mmc903ymdpa2c92qgkqr8"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.Logging"; version = "6.0.0"; sha256 = "0fd9jii3y3irfcwlsiww1y9npjgabzarh33rn566wpcz24lijszi"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "6.0.0"; sha256 = "0b75fmins171zi6bfdcq1kcvyrirs8n91mknjnxy4c3ygi1rrnj0"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "6.0.2"; sha256 = "1wv54f3p3r2zj1pr9a6z8zqrh2ihm6v6qcw2pjwis1lcc0qb472m"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.Features"; version = "7.0.2"; sha256 = "18ipxpw73wi5gdj7vxhmqgk8rl3l95w6h5ajxbccdfyv5p75v66d"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.Logging"; version = "7.0.0"; sha256 = "1bqd3pqn5dacgnkq0grc17cgb2i0w8z1raw12nwm3p3zhrfcvgxf"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "7.0.0"; sha256 = "1gn7d18i1wfy13vrwhmdv1rmsb4vrk26kqdld4cgvh77yigj90xs"; })
(fetchNuGet { pname = "Microsoft.Extensions.ObjectPool"; version = "5.0.11"; sha256 = "0i7li76gmk6hml12aig4cvyvja9mgl16qr8pkwvx5vm6lc9a3nn4"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.Options"; version = "6.0.0"; sha256 = "008pnk2p50i594ahz308v81a41mbjz9mwcarqhmrjpl2d20c868g"; })
-
(fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "6.0.0"; sha256 = "1kjiw6s4yfz9gm7mx3wkhp06ghnbs95icj9hi505shz9rjrg42q2"; })
-
(fetchNuGet { pname = "Microsoft.NET.StringTools"; version = "1.0.0"; sha256 = "06yakiyzgss399giivfx6xdrnfxqfsvy5fzm90scjanvandv0sdj"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.Options"; version = "7.0.0"; sha256 = "0b90zkrsk5dw3wr749rbynhpxlg4bgqdnd7d5vdlw2g9c7zlhgx6"; })
+
(fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "7.0.0"; sha256 = "1b4km9fszid9vp2zb3gya5ni9fn8bq62bzaas2ck2r7gs0sdys80"; })
+
(fetchNuGet { pname = "Microsoft.NET.StringTools"; version = "17.4.0"; sha256 = "1smx30nq22plrn2mw4wb5vfgxk6hyx12b60c4wabmpnr81lq3nzv"; })
(fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.0.1"; sha256 = "01al6cfxp68dscl15z7rxfw9zvhm64dncsw09a1vmdkacsa2v6lr"; })
(fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.1.0"; sha256 = "08vh1r12g6ykjygq5d3vq09zylgb84l63k49jc4v8faw9g93iqqm"; })
(fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "2.0.0"; sha256 = "1fk2fk2639i7nzy58m9dvpdnzql4vb8yl8vr19r2fp8lmj9w2jr0"; })
···
(fetchNuGet { pname = "NativeLibraryLoader"; version = "1.0.12"; sha256 = "1nkn5iylxj8i7355cljfvrn3ha7ylf30dh8f63zhybc2vb8hbpkk"; })
(fetchNuGet { pname = "NETStandard.Library"; version = "1.6.1"; sha256 = "1z70wvsx2d847a2cjfii7b83pjfs34q05gb037fdjikv5kbagml8"; })
(fetchNuGet { pname = "NETStandard.Library"; version = "2.0.0"; sha256 = "1bc4ba8ahgk15m8k4nd7x406nhi0kwqzbgjk2dmw52ss553xz7iy"; })
-
(fetchNuGet { pname = "Newtonsoft.Json"; version = "12.0.2"; sha256 = "0w2fbji1smd2y7x25qqibf1qrznmv4s6s0jvrbvr6alb7mfyqvh5"; })
+
(fetchNuGet { pname = "Newtonsoft.Json"; version = "13.0.1"; sha256 = "0fijg0w6iwap8gvzyjnndds0q4b8anwxxvik7y8vgq97dram4srb"; })
(fetchNuGet { pname = "Newtonsoft.Json"; version = "13.0.2"; sha256 = "1p9splg1min274dpz7xdfgzrwkyfd3xlkygwpr1xgjvvyjvs6b0i"; })
(fetchNuGet { pname = "NuGet.Common"; version = "5.11.0"; sha256 = "1amf6scr5mcjdvd1fflag6i4qjwmydq5qwp6g3f099n901zq0dr3"; })
(fetchNuGet { pname = "NuGet.Configuration"; version = "5.11.0"; sha256 = "1s9pbrh7xy9jz7npz0sahdsj1cw8gfx1fwf3knv0ms1n0c9bk53l"; })
···
(fetchNuGet { pname = "ppy.ManagedBass"; version = "2022.1216.0"; sha256 = "19nnj1hq2v21mrplnivjr9c4y3wg4hhfnc062sjgzkmiv1cchvf8"; })
(fetchNuGet { pname = "ppy.ManagedBass.Fx"; version = "2022.1216.0"; sha256 = "1vw573mkligpx9qiqasw1683cqaa1kgnxhlnbdcj9c4320b1pwjm"; })
(fetchNuGet { pname = "ppy.ManagedBass.Mix"; version = "2022.1216.0"; sha256 = "185bpvgbnd8y20r7vxb1an4pd1aal9b7b5wvmv3knz0qg8j0chd9"; })
-
(fetchNuGet { pname = "ppy.osu.Framework"; version = "2023.131.0"; sha256 = "1mbgcg0c8w6114c36jxypz7z1yps5zgw3f2lxw75fra0rylwqm23"; })
+
(fetchNuGet { pname = "ppy.osu.Framework"; version = "2023.228.0"; sha256 = "1acr957wlpgwng6mvyh6m1wv59ljvk9wh2aclds8ary8li00skdb"; })
(fetchNuGet { pname = "ppy.osu.Framework.NativeLibs"; version = "2022.525.0"; sha256 = "1zsqj3xng06bb46vg79xx35n2dsh3crqg951r1ga2gxqzgzy4nk0"; })
(fetchNuGet { pname = "ppy.osu.Framework.SourceGeneration"; version = "2022.1222.1"; sha256 = "1pwwsp4rfzl6166mhrn5lsnyazpckhfh1m6ggf9d1lw2wb58vxfr"; })
-
(fetchNuGet { pname = "ppy.osu.Game.Resources"; version = "2023.202.0"; sha256 = "13apknxly9fqqchmdvkdgfq2jbimln0ixg2d7yn6jcfd235279mj"; })
+
(fetchNuGet { pname = "ppy.osu.Game.Resources"; version = "2023.228.0"; sha256 = "12i5z7pkm03zc34q162qjas20v4d9rd1qwbwz1l4iyv010riaa43"; })
(fetchNuGet { pname = "ppy.osuTK.NS20"; version = "1.0.211"; sha256 = "0j4a9n39pqm0cgdcps47p5n2mqph3h94r7hmf0bs59imif4jxvjy"; })
(fetchNuGet { pname = "ppy.SDL2-CS"; version = "1.0.630-alpha"; sha256 = "0jrf70jrz976b49ac0ygfy9qph2w7fnbfrqv0g0x7hlpaip33ra8"; })
-
(fetchNuGet { pname = "Realm"; version = "10.18.0"; sha256 = "0dzwpcqkp8x8zah1bpx8cf01w4j1vi4gvipmaxlxczrc8p0f9zws"; })
-
(fetchNuGet { pname = "Realm.Fody"; version = "10.18.0"; sha256 = "1d2y7kz1jp1b11kskgk0fpp6ci17aqkrhzdfq5vcr4y7a8hbi9j5"; })
-
(fetchNuGet { pname = "Realm.SourceGenerator"; version = "10.18.0"; sha256 = "10bj3mgxdxgwsnpgbvlpnsj5ha582dvkvjnhb4qk7558g262dia8"; })
+
(fetchNuGet { pname = "Realm"; version = "10.20.0"; sha256 = "0gy0l2r7726wb6i599n55dn9035h0g7k0binfiy2dy9bjwz60jqk"; })
+
(fetchNuGet { pname = "Realm.Fody"; version = "10.20.0"; sha256 = "0rwcbbzr41iww3k59rjgy5xy7bna1x906h5blbllpywgpc2l5afw"; })
+
(fetchNuGet { pname = "Realm.SourceGenerator"; version = "10.20.0"; sha256 = "0y0bwqg87pmsld7cmawwwz2ps5lpkbyyzkb9cj0fbynsn4jdygg0"; })
(fetchNuGet { pname = "Remotion.Linq"; version = "2.2.0"; sha256 = "1y46ni0xswmmiryp8sydjgryafwn458dr91f9xn653w73kdyk4xf"; })
(fetchNuGet { pname = "runtime.any.System.Collections"; version = "4.3.0"; sha256 = "0bv5qgm6vr47ynxqbnkc7i797fdi8gbjjxii173syrx14nmrkwg0"; })
(fetchNuGet { pname = "runtime.any.System.Diagnostics.Tools"; version = "4.3.0"; sha256 = "1wl76vk12zhdh66vmagni66h5xbhgqq7zkdpgw21jhxhvlbcl8pk"; })
···
(fetchNuGet { pname = "runtime.unix.System.Net.Sockets"; version = "4.3.0"; sha256 = "03npdxzy8gfv035bv1b9rz7c7hv0rxl5904wjz51if491mw0xy12"; })
(fetchNuGet { pname = "runtime.unix.System.Private.Uri"; version = "4.3.0"; sha256 = "1jx02q6kiwlvfksq1q9qr17fj78y5v6mwsszav4qcz9z25d5g6vk"; })
(fetchNuGet { pname = "runtime.unix.System.Runtime.Extensions"; version = "4.3.0"; sha256 = "0pnxxmm8whx38dp6yvwgmh22smknxmqs5n513fc7m4wxvs1bvi4p"; })
-
(fetchNuGet { pname = "Sentry"; version = "3.23.1"; sha256 = "0cch803ixx5vqfm2zv5qdkkyksh1184669r1109snbkvvv5qy1g9"; })
+
(fetchNuGet { pname = "Sentry"; version = "3.28.1"; sha256 = "09xl3bm5clqxnn8wyy36zwmj8ai8zci6ngw64d0r3rzgd95gbf61"; })
(fetchNuGet { pname = "SharpCompress"; version = "0.31.0"; sha256 = "01az7amjkxjbya5rdcqwxzrh2d3kybf1gsd3617rsxvvxadyra1r"; })
(fetchNuGet { pname = "SharpCompress"; version = "0.32.2"; sha256 = "1p198bl08ia89rf4n6yjpacj3yrz6s574snsfl40l8vlqcdrc1pm"; })
(fetchNuGet { pname = "SharpFNT"; version = "2.0.0"; sha256 = "1bgacgh9hbck0qvji6frbb50sdiqfdng2fvvfgfw8b9qaql91mx0"; })
(fetchNuGet { pname = "SharpGen.Runtime"; version = "2.0.0-beta.10"; sha256 = "0yxq0b4m96z71afc7sywfrlwz2pgr5nilacmssjk803v70f0ydr1"; })
(fetchNuGet { pname = "SharpGen.Runtime.COM"; version = "2.0.0-beta.10"; sha256 = "1qvpphja72x9r3yi96bnmwwy30b1n155v2yy2gzlxjil6qg3xjmb"; })
(fetchNuGet { pname = "SixLabors.ImageSharp"; version = "2.1.0"; sha256 = "0lmj3qs39v5jcf2rjwav43nqnc7g6sd4l226l2jw85nidzmpvkwr"; })
-
(fetchNuGet { pname = "SQLitePCLRaw.bundle_e_sqlite3"; version = "2.1.2"; sha256 = "07rc4pj3rphi8nhzkcvilnm0fv27qcdp68jdwk4g0zjk7yfvbcay"; })
-
(fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.0.6"; sha256 = "1w4iyg0v1v1z2m7akq7rv8lsgixp2m08732vr14vgpqs918bsy1i"; })
+
(fetchNuGet { pname = "SQLitePCLRaw.bundle_e_sqlite3"; version = "2.1.4"; sha256 = "0shdspl9cm71wwqg9103s44r0l01r3sgnpxr523y4a0wlgac50g0"; })
(fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.1.2"; sha256 = "19hxv895lairrjmk4gkzd3mcb6b0na45xn4n551h4kckplqadg3d"; })
-
(fetchNuGet { pname = "SQLitePCLRaw.lib.e_sqlite3"; version = "2.1.2"; sha256 = "0jn98bkjk8h4smi09z31ib6s6392054lwmkziqmkqf5gf614k2fz"; })
-
(fetchNuGet { pname = "SQLitePCLRaw.provider.e_sqlite3"; version = "2.1.2"; sha256 = "0bnm2fhvcsyg5ry74gal2cziqnyf5a8d2cb491vsa7j41hbbx7kv"; })
+
(fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.1.4"; sha256 = "09akxz92qipr1cj8mk2hw99i0b81wwbwx26gpk21471zh543f8ld"; })
+
(fetchNuGet { pname = "SQLitePCLRaw.lib.e_sqlite3"; version = "2.1.4"; sha256 = "11l85ksv1ck46j8z08fyf0c3l572zmp9ynb7p5chm5iyrh8xwkkn"; })
+
(fetchNuGet { pname = "SQLitePCLRaw.provider.e_sqlite3"; version = "2.1.4"; sha256 = "0b8f51nrjkq0pmfzjaqk5rp7r0cp2lbdm2whynj3xsjklppzmn35"; })
(fetchNuGet { pname = "StbiSharp"; version = "1.1.0"; sha256 = "0wbw20m7nyhxj32k153l668sxigamlwig0qpz8l8d0jqz35vizm0"; })
(fetchNuGet { pname = "System.AppContext"; version = "4.1.0"; sha256 = "0fv3cma1jp4vgj7a8hqc9n7hr1f1kjp541s6z0q1r6nazb4iz9mz"; })
(fetchNuGet { pname = "System.AppContext"; version = "4.3.0"; sha256 = "1649qvy3dar900z3g817h17nl8jp4ka5vcfmsr05kh0fshn7j3ya"; })
···
(fetchNuGet { pname = "System.Diagnostics.Debug"; version = "4.0.11"; sha256 = "0gmjghrqmlgzxivd2xl50ncbglb7ljzb66rlx8ws6dv8jm0d5siz"; })
(fetchNuGet { pname = "System.Diagnostics.Debug"; version = "4.3.0"; sha256 = "00yjlf19wjydyr6cfviaph3vsjzg3d5nvnya26i2fvfg53sknh3y"; })
(fetchNuGet { pname = "System.Diagnostics.DiagnosticSource"; version = "4.3.0"; sha256 = "0z6m3pbiy0qw6rn3n209rrzf9x1k4002zh90vwcrsym09ipm2liq"; })
-
(fetchNuGet { pname = "System.Diagnostics.DiagnosticSource"; version = "6.0.0"; sha256 = "0rrihs9lnb1h6x4h0hn6kgfnh58qq7hx8qq99gh6fayx4dcnx3s5"; })
(fetchNuGet { pname = "System.Diagnostics.Tools"; version = "4.3.0"; sha256 = "0in3pic3s2ddyibi8cvgl102zmvp9r9mchh82ns9f0ms4basylw1"; })
(fetchNuGet { pname = "System.Diagnostics.Tracing"; version = "4.3.0"; sha256 = "1m3bx6c2s958qligl67q7grkwfz3w53hpy7nc97mh6f7j5k168c4"; })
(fetchNuGet { pname = "System.Dynamic.Runtime"; version = "4.0.11"; sha256 = "1pla2dx8gkidf7xkciig6nifdsb494axjvzvann8g2lp3dbqasm9"; })
···
(fetchNuGet { pname = "System.IO.FileSystem"; version = "4.0.1"; sha256 = "0kgfpw6w4djqra3w5crrg8xivbanh1w9dh3qapb28q060wb9flp1"; })
(fetchNuGet { pname = "System.IO.FileSystem"; version = "4.3.0"; sha256 = "0z2dfrbra9i6y16mm9v1v6k47f0fm617vlb7s5iybjjsz6g1ilmw"; })
(fetchNuGet { pname = "System.IO.FileSystem.Primitives"; version = "4.3.0"; sha256 = "0j6ndgglcf4brg2lz4wzsh1av1gh8xrzdsn9f0yznskhqn1xzj9c"; })
-
(fetchNuGet { pname = "System.IO.Packaging"; version = "6.0.0"; sha256 = "112nq0k2jc4vh71rifqqmpjxkaanxfapk7g8947jkfgq3lmfmaac"; })
-
(fetchNuGet { pname = "System.IO.Pipelines"; version = "6.0.3"; sha256 = "1jgdazpmwc21dd9naq3l9n5s8a1jnbwlvgkf1pnm0aji6jd4xqdz"; })
+
(fetchNuGet { pname = "System.IO.Packaging"; version = "7.0.0"; sha256 = "16fgj2ab5ci217shmfsi6c0rnmkh90h6vyb60503nhpmh7y8di13"; })
+
(fetchNuGet { pname = "System.IO.Pipelines"; version = "7.0.0"; sha256 = "1ila2vgi1w435j7g2y7ykp2pdbh9c5a02vm85vql89az93b7qvav"; })
(fetchNuGet { pname = "System.Linq"; version = "4.1.0"; sha256 = "1ppg83svb39hj4hpp5k7kcryzrf3sfnm08vxd5sm2drrijsla2k5"; })
(fetchNuGet { pname = "System.Linq"; version = "4.3.0"; sha256 = "1w0gmba695rbr80l1k2h4mrwzbzsyfl2z4klmpbsvsg5pm4a56s7"; })
(fetchNuGet { pname = "System.Linq.Expressions"; version = "4.1.0"; sha256 = "1gpdxl6ip06cnab7n3zlcg6mqp7kknf73s8wjinzi4p0apw82fpg"; })
···
(fetchNuGet { pname = "System.Linq.Queryable"; version = "4.0.1"; sha256 = "11jn9k34g245yyf260gr3ldzvaqa9477w2c5nhb1p8vjx4xm3qaw"; })
(fetchNuGet { pname = "System.Memory"; version = "4.5.3"; sha256 = "0naqahm3wljxb5a911d37mwjqjdxv9l0b49p5dmfyijvni2ppy8a"; })
(fetchNuGet { pname = "System.Memory"; version = "4.5.4"; sha256 = "14gbbs22mcxwggn0fcfs1b062521azb9fbb7c113x0mq6dzq9h6y"; })
+
(fetchNuGet { pname = "System.Memory"; version = "4.5.5"; sha256 = "08jsfwimcarfzrhlyvjjid61j02irx6xsklf32rv57x2aaikvx0h"; })
(fetchNuGet { pname = "System.Net.Http"; version = "4.3.0"; sha256 = "1i4gc757xqrzflbk7kc5ksn20kwwfjhw9w7pgdkn19y3cgnl302j"; })
(fetchNuGet { pname = "System.Net.NameResolution"; version = "4.3.0"; sha256 = "15r75pwc0rm3vvwsn8rvm2krf929mjfwliv0mpicjnii24470rkq"; })
(fetchNuGet { pname = "System.Net.Primitives"; version = "4.3.0"; sha256 = "0c87k50rmdgmxx7df2khd9qj7q35j9rzdmm2572cc55dygmdk3ii"; })
···
(fetchNuGet { pname = "System.Text.Encoding"; version = "4.3.0"; sha256 = "1f04lkir4iladpp51sdgmis9dj4y8v08cka0mbmsy0frc9a4gjqr"; })
(fetchNuGet { pname = "System.Text.Encoding.CodePages"; version = "5.0.0"; sha256 = "1bn2pzaaq4wx9ixirr8151vm5hynn3lmrljcgjx9yghmm4k677k0"; })
(fetchNuGet { pname = "System.Text.Encoding.Extensions"; version = "4.3.0"; sha256 = "11q1y8hh5hrp5a3kw25cb6l00v5l5dvirkz8jr3sq00h1xgcgrxy"; })
+
(fetchNuGet { pname = "System.Text.Encodings.Web"; version = "7.0.0"; sha256 = "1151hbyrcf8kyg1jz8k9awpbic98lwz9x129rg7zk1wrs6vjlpxl"; })
+
(fetchNuGet { pname = "System.Text.Json"; version = "7.0.1"; sha256 = "1lqh6nrrkx4sksvn5509y6j9z8zkhcls0yghd0n31zywmmy3pnf2"; })
(fetchNuGet { pname = "System.Text.RegularExpressions"; version = "4.3.0"; sha256 = "1bgq51k7fwld0njylfn7qc5fmwrk2137gdq7djqdsw347paa9c2l"; })
(fetchNuGet { pname = "System.Threading"; version = "4.0.11"; sha256 = "19x946h926bzvbsgj28csn46gak2crv2skpwsx80hbgazmkgb1ls"; })
(fetchNuGet { pname = "System.Threading"; version = "4.3.0"; sha256 = "0rw9wfamvhayp5zh3j7p1yfmx9b5khbf4q50d8k5rk993rskfd34"; })
-
(fetchNuGet { pname = "System.Threading.Channels"; version = "6.0.0"; sha256 = "1qbyi7yymqc56frqy7awvcqc1m7x3xrpx87a37dgb3mbrjg9hlcj"; })
+
(fetchNuGet { pname = "System.Threading.Channels"; version = "7.0.0"; sha256 = "1qrmqa6hpzswlmyp3yqsbnmia9i5iz1y208xpqc1y88b1f6j1v8a"; })
(fetchNuGet { pname = "System.Threading.Tasks"; version = "4.0.11"; sha256 = "0nr1r41rak82qfa5m0lhk9mp0k93bvfd7bbd9sdzwx9mb36g28p5"; })
(fetchNuGet { pname = "System.Threading.Tasks"; version = "4.3.0"; sha256 = "134z3v9abw3a6jsw17xl3f6hqjpak5l682k2vz39spj4kmydg6k7"; })
(fetchNuGet { pname = "System.Threading.Tasks.Extensions"; version = "4.3.0"; sha256 = "1xxcx2xh8jin360yjwm4x4cf5y3a2bwpn2ygkfkwkicz7zk50s2z"; })
+5 -5
pkgs/os-specific/linux/kernel/zen-kernels.nix
···
# comments with variant added for update script
# ./update-zen.py zen
zenVariant = {
-
version = "6.2.1"; #zen
+
version = "6.2.2"; #zen
suffix = "zen1"; #zen
-
sha256 = "1ypgdc4bz35cqqwp8nka6rx7m9dqfl6wzfb8ad27gqgxwzil3sjg"; #zen
+
sha256 = "004aghwdclky7w341yg9nkr5r58qnp4hxnmvxrp2z06pzcbsq933"; #zen
isLqx = false;
};
# ./update-zen.py lqx
lqxVariant = {
-
version = "6.1.13"; #lqx
-
suffix = "lqx2"; #lqx
-
sha256 = "1264cfkb3kfrava8g7byr10avkjg0k281annqppcqqjkyjf63q4y"; #lqx
+
version = "6.1.14"; #lqx
+
suffix = "lqx1"; #lqx
+
sha256 = "026nnmbpipk4gg7llsvm4fgws3ka0hjdywl7h0a8bvq6n9by15i6"; #lqx
isLqx = true;
};
zenKernelsFor = { version, suffix, sha256, isLqx }: buildLinux (args // {
+3 -3
pkgs/servers/monitoring/grafana/default.nix
···
buildGoModule rec {
pname = "grafana";
-
version = "9.4.2";
+
version = "9.4.3";
excludedPackages = [ "alert_webhook_listener" "clean-swagger" "release_publisher" "slow_proxy" "slow_proxy_mac" "macaron" "devenv" ];
···
rev = "v${version}";
owner = "grafana";
repo = "grafana";
-
sha256 = "sha256-dSKIQiav6y4P1e/7CptIdRuOrDdXdvItCaRBcbepadE=";
+
sha256 = "sha256-LYUbypPXoWwWA4u2JxhUS/lozQNo2DCFGDPCmNP3GoE=";
};
srcStatic = fetchurl {
url = "https://dl.grafana.com/oss/release/grafana-${version}.linux-amd64.tar.gz";
-
sha256 = "sha256-dBp6V5ozu1koSoXIecjysSIdG0hL1K5lH9Z8yougUKo=";
+
sha256 = "sha256-aq6/sMfYVebxh46+zxphfWttFN4vBpUgCLXobLWVozk=";
};
vendorSha256 = "sha256-atnlEdGDiUqQkslvRlPSi6VC5rEvRVV6R2Wxur3geew=";
+3 -3
pkgs/servers/monitoring/prometheus/redis-exporter.nix
···
buildGoModule rec {
pname = "redis_exporter";
-
version = "1.47.0";
+
version = "1.48.0";
src = fetchFromGitHub {
owner = "oliver006";
repo = "redis_exporter";
rev = "v${version}";
-
sha256 = "sha256-pSLFfArmG4DIgYUD8qz71P+7RYIQuUycnYzNFXNhZ8A=";
+
sha256 = "sha256-hBkekoVwNuRDGhpvbW57eR+UUMkntdEcHJAVQbwk7NE=";
};
vendorHash = "sha256-Owfxy7WkucQ6BM8yjnZg9/8CgopGTtbQTTUuxoT3RRE=";
···
description = "Prometheus exporter for Redis metrics";
inherit (src.meta) homepage;
license = licenses.mit;
-
maintainers = with maintainers; [ eskytthe srhb ];
+
maintainers = with maintainers; [ eskytthe srhb ma27 ];
platforms = platforms.unix;
};
}
+49 -95
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py
···
-
from collections.abc import Mapping, MutableMapping, Sequence
+
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Optional
from urllib.parse import quote
from .md import Renderer
-
import markdown_it
from markdown_it.token import Token
-
from markdown_it.utils import OptionsDict
_asciidoc_escapes = {
# escape all dots, just in case one is pasted at SOL
···
_list_stack: list[List]
_attrspans: list[str]
-
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
-
super().__init__(manpage_urls, parser)
+
def __init__(self, manpage_urls: Mapping[str, str]):
+
super().__init__(manpage_urls)
self._parstack = [ Par("\n\n", "====") ]
self._list_stack = []
self._attrspans = []
···
self._list_stack.pop()
return ""
-
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return asciidoc_escape(token.content)
-
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
-
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " +\n"
-
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f" "
-
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"``{asciidoc_escape(token.content)}``"
-
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return self.fence(token, tokens, i, options, env)
-
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return self.fence(token, tokens, i)
+
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"link:{quote(cast(str, token.attrs['href']), safe='/:')}["
-
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "]"
-
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
# allow the next token to be a block or an inline.
return f'\n{self._list_stack[-1].head} {{empty}}'
-
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
-
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '*')
-
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()
-
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
-
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
-
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
-
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
-
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
attrs = f"[source,{token.info}]\n" if token.info else ""
code = token.content
if code.endswith('\n'):
code = code[:-1]
return f"{self._break(True)}{attrs}----\n{code}\n----"
-
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._break(True)
self._enter_block(False)
return f"{pbreak}[quote]\n{self._parstack[-2].block_delim}\n"
-
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return f"\n{self._parstack[-1].block_delim}"
-
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("NOTE")
-
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("CAUTION")
-
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("IMPORTANT")
-
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("TIP")
-
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("WARNING")
-
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{self._break()}[]"
-
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
-
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
return ":: {empty}"
-
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
-
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
content = asciidoc_escape(token.content)
if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
return f"link:{quote(url, safe='/:')}[{content}]"
return f"[.{token.meta['name']}]``{asciidoc_escape(token.content)}``"
-
def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"[[{token.attrs['id']}]]"
-
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
(id_part, class_part) = ("", "")
if id := token.attrs.get('id'):
···
class_part = "kbd:["
self._attrspans.append("]")
else:
-
return super().attr_span_begin(token, tokens, i, options, env)
+
return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
-
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
-
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return token.markup.replace("#", "=") + " "
-
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
-
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '.')
-
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()
+47 -92
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py
···
-
from collections.abc import Mapping, MutableMapping, Sequence
+
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Optional
from .md import md_escape, md_make_code, Renderer
-
import markdown_it
from markdown_it.token import Token
-
from markdown_it.utils import OptionsDict
@dataclass(kw_only=True)
class List:
···
_link_stack: list[str]
_list_stack: list[List]
-
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
-
super().__init__(manpage_urls, parser)
+
def __init__(self, manpage_urls: Mapping[str, str]):
+
super().__init__(manpage_urls)
self._parstack = [ Par("") ]
self._link_stack = []
self._list_stack = []
···
return s
return f"\n{self._parstack[-1].indent}".join(s.splitlines())
-
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return self._indent_raw(md_escape(token.content))
-
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._maybe_parbreak()
-
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f" {self._break()}"
-
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
-
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return md_make_code(token.content)
-
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return self.fence(token, tokens, i, options, env)
-
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return self.fence(token, tokens, i)
+
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
self._link_stack.append(cast(str, token.attrs['href']))
return "["
-
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"]({md_escape(self._link_stack.pop())})"
-
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
lst = self._list_stack[-1]
lbreak = "" if not lst.first_item_seen else self._break() * (1 if lst.compact else 2)
lst.first_item_seen = True
···
lst.next_idx += 1
self._enter_block(" " * (len(head) + 1))
return f'{lbreak}{head} '
-
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
-
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(compact=bool(token.meta['compact'])))
return self._maybe_parbreak()
-
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
-
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "*"
-
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "*"
-
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
-
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
-
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
code = token.content
if code.endswith('\n'):
code = code[:-1]
pbreak = self._maybe_parbreak()
return pbreak + self._indent_raw(md_make_code(code, info=token.info, multiline=True))
-
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._maybe_parbreak()
self._enter_block("> ")
return pbreak + "> "
-
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
-
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Note")
-
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Caution")
-
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Important")
-
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Tip")
-
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Warning")
-
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(compact=False))
return ""
-
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
-
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._maybe_parbreak()
self._enter_block(" ")
# add an opening zero-width non-joiner to separate *our* emphasis from possible
# emphasis in the provided term
return f'{pbreak} - *{chr(0x200C)}'
-
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{chr(0x200C)}*"
-
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return ""
-
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
-
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
content = md_make_code(token.content)
if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
return f"[{content}]({url})"
return content # no roles in regular commonmark
-
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# there's no way we can emit attrspans correctly in all cases. we could use inline
# html for ids, but that would not round-trip. same holds for classes. since this
# renderer is only used for approximate options export and all of these things are
# not allowed in options we can ignore them for now.
return ""
-
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return token.markup + " "
-
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
-
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(
List(next_idx = cast(int, token.attrs.get('start', 1)),
compact = bool(token.meta['compact'])))
return self._maybe_parbreak()
-
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
+58 -108
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
···
-
from collections.abc import Mapping, MutableMapping, Sequence
+
from collections.abc import Mapping, Sequence
from typing import Any, cast, Optional, NamedTuple
import markdown_it
from markdown_it.token import Token
-
from markdown_it.utils import OptionsDict
from xml.sax.saxutils import escape, quoteattr
from .md import Renderer
···
partintro_closed: bool = False
class DocBookRenderer(Renderer):
-
__output__ = "docbook"
_link_tags: list[str]
_deflists: list[Deflist]
_headings: list[Heading]
_attrspans: list[str]
-
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
-
super().__init__(manpage_urls, parser)
+
def __init__(self, manpage_urls: Mapping[str, str]):
+
super().__init__(manpage_urls)
self._link_tags = []
self._deflists = []
self._headings = []
self._attrspans = []
-
def render(self, tokens: Sequence[Token], options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
result = super().render(tokens, options, env)
-
result += self._close_headings(None, env)
+
def render(self, tokens: Sequence[Token]) -> str:
+
result = super().render(tokens)
+
result += self._close_headings(None)
return result
-
def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def renderInline(self, tokens: Sequence[Token]) -> str:
# HACK to support docbook links and xrefs. link handling is only necessary because the docbook
# manpage stylesheet converts - in urls to a mathematical minus, which may be somewhat incorrect.
for i, token in enumerate(tokens):
···
if tokens[i + 1].type == 'text' and tokens[i + 1].content == token.attrs['href']:
tokens[i + 1].content = ''
-
return super().renderInline(tokens, options, env)
+
return super().renderInline(tokens)
-
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return escape(token.content)
-
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para>"
-
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</para>"
-
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<literallayout>\n</literallayout>"
-
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# should check options.breaks() and emit hard break if so
return "\n"
-
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"<literal>{escape(token.content)}</literal>"
-
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"<programlisting>{escape(token.content)}</programlisting>"
-
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._link_tags.append(token.tag)
href = cast(str, token.attrs['href'])
(attr, start) = ('linkend', 1) if href[0] == '#' else ('xlink:href', 0)
return f"<{token.tag} {attr}={quoteattr(href[start:])}>"
-
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"</{self._link_tags.pop()}>"
-
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<listitem>"
-
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</listitem>\n"
# HACK open and close para for docbook change size. remove soon.
-
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
return f"<para><itemizedlist{spacing}>\n"
-
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n</itemizedlist></para>"
-
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<emphasis>"
-
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</emphasis>"
-
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<emphasis role=\"strong\">"
-
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</emphasis>"
-
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
info = f" language={quoteattr(token.info)}" if token.info != "" else ""
return f"<programlisting{info}>{escape(token.content)}</programlisting>"
-
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><blockquote>"
-
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</blockquote></para>"
-
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><note>"
-
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</note></para>"
-
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><caution>"
-
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</caution></para>"
-
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><important>"
-
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</important></para>"
-
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><tip>"
-
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</tip></para>"
-
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><warning>"
-
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</warning></para>"
# markdown-it emits tokens based on the html syntax tree, but docbook is
# slightly different. html has <dl>{<dt/>{<dd/>}}</dl>,
# docbook has <variablelist>{<varlistentry><term/><listitem/></varlistentry>}<variablelist>
# we have to reject multiple definitions for the same term for time being.
-
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists.append(Deflist())
return "<para><variablelist>"
-
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists.pop()
return "</variablelist></para>"
-
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists[-1].has_dd = False
return "<varlistentry><term>"
-
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</term>"
-
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if self._deflists[-1].has_dd:
raise Exception("multiple definitions per term not supported")
self._deflists[-1].has_dd = True
return "<listitem>"
-
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</listitem></varlistentry>"
-
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.meta['name'] == 'command':
return f"<command>{escape(token.content)}</command>"
if token.meta['name'] == 'file':
···
else:
return ref
raise NotImplementedError("md node not supported yet", token)
-
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# we currently support *only* inline anchors and the special .keycap class to produce
# <keycap> docbook elements.
(id_part, class_part) = ("", "")
···
class_part = "<keycap>"
self._attrspans.append("</keycap>")
else:
-
return super().attr_span_begin(token, tokens, i, options, env)
+
return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
-
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
-
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
start = f' startingnumber="{token.attrs["start"]}"' if 'start' in token.attrs else ""
spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
return f"<orderedlist{start}{spacing}>"
-
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"</orderedlist>"
-
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
hlevel = int(token.tag[1:])
-
result = self._close_headings(hlevel, env)
-
(tag, attrs) = self._heading_tag(token, tokens, i, options, env)
+
result = self._close_headings(hlevel)
+
(tag, attrs) = self._heading_tag(token, tokens, i)
self._headings.append(Heading(tag, hlevel))
attrs_str = "".join([ f" {k}={quoteattr(v)}" for k, v in attrs.items() ])
return result + f'<{tag}{attrs_str}>\n<title>'
-
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
heading = self._headings[-1]
result = '</title>'
if heading.container_tag == 'part':
···
maybe_id = " xml:id=" + quoteattr(id + "-intro")
result += f"<partintro{maybe_id}>"
return result
-
def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if id := token.attrs.get('id'):
return f"<anchor xml:id={quoteattr(cast(str, id))} />"
return ""
-
def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def _close_headings(self, level: Optional[int], env: MutableMapping[str, Any]) -> str:
+
def _close_headings(self, level: Optional[int]) -> str:
# we rely on markdown-it producing h{1..6} tags in token.tag for this to work
result = []
while len(self._headings):
···
break
return "\n".join(result)
-
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
+
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
attrs = {}
if id := token.attrs.get('id'):
attrs['xml:id'] = cast(str, id)
+245
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
···
+
from collections.abc import Mapping, Sequence
+
from typing import cast, Optional, NamedTuple
+
+
from html import escape
+
from markdown_it.token import Token
+
+
from .manual_structure import XrefTarget
+
from .md import Renderer
+
+
class UnresolvedXrefError(Exception):
+
pass
+
+
class Heading(NamedTuple):
+
container_tag: str
+
level: int
+
html_tag: str
+
# special handling for part content: whether partinfo div was already closed from
+
# elsewhere or still needs closing.
+
partintro_closed: bool
+
# tocs are generated when the heading opens, but have to be emitted into the file
+
# after the heading titlepage (and maybe partinfo) has been closed.
+
toc_fragment: str
+
+
_bullet_list_styles = [ 'disc', 'circle', 'square' ]
+
_ordered_list_styles = [ '1', 'a', 'i', 'A', 'I' ]
+
+
class HTMLRenderer(Renderer):
+
_xref_targets: Mapping[str, XrefTarget]
+
+
_headings: list[Heading]
+
_attrspans: list[str]
+
_hlevel_offset: int = 0
+
_bullet_list_nesting: int = 0
+
_ordered_list_nesting: int = 0
+
+
def __init__(self, manpage_urls: Mapping[str, str], xref_targets: Mapping[str, XrefTarget]):
+
super().__init__(manpage_urls)
+
self._headings = []
+
self._attrspans = []
+
self._xref_targets = xref_targets
+
+
def render(self, tokens: Sequence[Token]) -> str:
+
result = super().render(tokens)
+
result += self._close_headings(None)
+
return result
+
+
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return escape(token.content)
+
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "<p>"
+
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</p>"
+
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "<br />"
+
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "\n"
+
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return f'<code class="literal">{escape(token.content)}</code>'
+
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return self.fence(token, tokens, i)
+
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
href = escape(cast(str, token.attrs['href']), True)
+
tag, title, target, text = "link", "", 'target="_top"', ""
+
if href.startswith('#'):
+
if not (xref := self._xref_targets.get(href[1:])):
+
raise UnresolvedXrefError(f"bad local reference, id {href} not known")
+
if tokens[i + 1].type == 'link_close':
+
tag, text = "xref", xref.title_html
+
if xref.title:
+
title = f'title="{escape(xref.title, True)}"'
+
target, href = "", xref.href()
+
return f'<a class="{tag}" href="{href}" {title} {target}>{text}'
+
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</a>"
+
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<li class="listitem">'
+
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</li>"
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
extra = 'compact' if token.meta.get('compact', False) else ''
+
style = _bullet_list_styles[self._bullet_list_nesting % len(_bullet_list_styles)]
+
self._bullet_list_nesting += 1
+
return f'<div class="itemizedlist"><ul class="itemizedlist {extra}" style="list-style-type: {style};">'
+
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
self._bullet_list_nesting -= 1
+
return "</ul></div>"
+
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<span class="emphasis"><em>'
+
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</em></span>"
+
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<span class="strong"><strong>'
+
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</strong></span>"
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
# TODO use token.info. docbook doesn't so we can't yet.
+
return f'<pre class="programlisting">\n{escape(token.content)}</pre>'
+
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<div class="blockquote"><blockquote class="blockquote">'
+
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</blockquote></div>"
+
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<div class="note"><h3 class="title">Note</h3>'
+
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</div>"
+
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<div class="caution"><h3 class="title">Caution</h3>'
+
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</div>"
+
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<div class="important"><h3 class="title">Important</h3>'
+
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</div>"
+
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<div class="tip"><h3 class="title">Tip</h3>'
+
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</div>"
+
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<div class="warning"><h3 class="title">Warning</h3>'
+
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</div>"
+
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<div class="variablelist"><dl class="variablelist">'
+
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</dl></div>"
+
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return '<dt><span class="term">'
+
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</span></dt>"
+
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "<dd>"
+
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "</dd>"
+
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
if token.meta['name'] == 'command':
+
return f'<span class="command"><strong>{escape(token.content)}</strong></span>'
+
if token.meta['name'] == 'file':
+
return f'<code class="filename">{escape(token.content)}</code>'
+
if token.meta['name'] == 'var':
+
return f'<code class="varname">{escape(token.content)}</code>'
+
if token.meta['name'] == 'env':
+
return f'<code class="envar">{escape(token.content)}</code>'
+
if token.meta['name'] == 'option':
+
return f'<code class="option">{escape(token.content)}</code>'
+
if token.meta['name'] == 'manpage':
+
[page, section] = [ s.strip() for s in token.content.rsplit('(', 1) ]
+
section = section[:-1]
+
man = f"{page}({section})"
+
title = f'<span class="refentrytitle">{escape(page)}</span>'
+
vol = f"({escape(section)})"
+
ref = f'<span class="citerefentry">{title}{vol}</span>'
+
if man in self._manpage_urls:
+
return f'<a class="link" href="{escape(self._manpage_urls[man], True)}" target="_top">{ref}</a>'
+
else:
+
return ref
+
return super().myst_role(token, tokens, i)
+
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
# we currently support *only* inline anchors and the special .keycap class to produce
+
# keycap-styled spans.
+
(id_part, class_part) = ("", "")
+
if s := token.attrs.get('id'):
+
id_part = f'<a id="{escape(cast(str, s), True)}" />'
+
if s := token.attrs.get('class'):
+
if s == 'keycap':
+
class_part = '<span class="keycap"><strong>'
+
self._attrspans.append("</strong></span>")
+
else:
+
return super().attr_span_begin(token, tokens, i)
+
else:
+
self._attrspans.append("")
+
return id_part + class_part
+
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return self._attrspans.pop()
+
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
hlevel = int(token.tag[1:])
+
htag, hstyle = self._make_hN(hlevel)
+
if hstyle:
+
hstyle = f'style="{escape(hstyle, True)}"'
+
if anchor := cast(str, token.attrs.get('id', '')):
+
anchor = f'<a id="{escape(anchor, True)}"></a>'
+
result = self._close_headings(hlevel)
+
tag = self._heading_tag(token, tokens, i)
+
toc_fragment = self._build_toc(tokens, i)
+
self._headings.append(Heading(tag, hlevel, htag, tag != 'part', toc_fragment))
+
return (
+
f'{result}'
+
f'<div class="{tag}">'
+
f' <div class="titlepage">'
+
f' <div>'
+
f' <div>'
+
f' <{htag} class="title" {hstyle}>'
+
f' {anchor}'
+
)
+
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
heading = self._headings[-1]
+
result = (
+
f' </{heading.html_tag}>'
+
f' </div>'
+
f' </div>'
+
f'</div>'
+
)
+
if heading.container_tag == 'part':
+
result += '<div class="partintro">'
+
else:
+
result += heading.toc_fragment
+
return result
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
extra = 'compact' if token.meta.get('compact', False) else ''
+
start = f'start="{token.attrs["start"]}"' if 'start' in token.attrs else ""
+
style = _ordered_list_styles[self._ordered_list_nesting % len(_ordered_list_styles)]
+
self._ordered_list_nesting += 1
+
return f'<div class="orderedlist"><ol class="orderedlist {extra}" {start} type="{style}">'
+
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
self._ordered_list_nesting -= 1;
+
return "</ol></div>"
+
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
if id := token.attrs.get('id'):
+
return f'<a id="{escape(cast(str, id), True)}" />'
+
return ""
+
def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return ""
+
+
def _make_hN(self, level: int) -> tuple[str, str]:
+
return f"h{min(6, max(1, level + self._hlevel_offset))}", ""
+
+
def _maybe_close_partintro(self) -> str:
+
if self._headings:
+
heading = self._headings[-1]
+
if heading.container_tag == 'part' and not heading.partintro_closed:
+
self._headings[-1] = heading._replace(partintro_closed=True)
+
return heading.toc_fragment + "</div>"
+
return ""
+
+
def _close_headings(self, level: Optional[int]) -> str:
+
result = []
+
while len(self._headings) and (level is None or self._headings[-1].level >= level):
+
result.append(self._maybe_close_partintro())
+
result.append("</div>")
+
self._headings.pop()
+
return "\n".join(result)
+
+
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "section"
+
def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
+
return ""
+50 -98
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py
···
-
from collections.abc import Mapping, MutableMapping, Sequence
+
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Iterable, Optional
···
import markdown_it
from markdown_it.token import Token
-
from markdown_it.utils import OptionsDict
from .md import Renderer
···
# horizontal motion in a line) we do attempt to copy the style of mdoc(7) semantic requests
# as appropriate for each markup element.
class ManpageRenderer(Renderer):
-
__output__ = "man"
-
# whether to emit mdoc .Ql equivalents for inline code or just the contents. this is
# mainly used by the options manpage converter to not emit extra quotes in defaults
# and examples where it's already clear from context that the following text is code.
···
_list_stack: list[List]
_font_stack: list[str]
-
def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str],
-
parser: Optional[markdown_it.MarkdownIt] = None):
-
super().__init__(manpage_urls, parser)
+
def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str]):
+
super().__init__(manpage_urls)
self._href_targets = href_targets
self._link_stack = []
self._do_parbreak_stack = []
···
self._leave_block()
return ".RE"
-
def render(self, tokens: Sequence[Token], options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def render(self, tokens: Sequence[Token]) -> str:
self._do_parbreak_stack = [ False ]
self._font_stack = [ "\\fR" ]
-
return super().render(tokens, options, env)
+
return super().render(tokens)
-
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return man_escape(token.content)
-
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._maybe_parbreak()
-
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".br"
-
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " "
-
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
s = _protect_spaces(man_escape(token.content))
return f"\\fR\\(oq{s}\\(cq\\fP" if self.inline_code_is_quoted else s
-
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return self.fence(token, tokens, i, options, env)
-
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return self.fence(token, tokens, i)
+
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = cast(str, token.attrs['href'])
self._link_stack.append(href)
text = ""
···
text = self._href_targets[href]
self._font_stack.append("\\fB")
return f"\\fB{text}\0 <"
-
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = self._link_stack.pop()
text = ""
if self.link_footnotes is not None:
···
text = "\\fR" + man_escape(f"[{idx}]")
self._font_stack.pop()
return f">\0 {text}{self._font_stack[-1]}"
-
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block()
lst = self._list_stack[-1]
maybe_space = '' if lst.compact or not lst.first_item_seen else '.sp\n'
···
f'.RS {lst.width}\n'
f"\\h'-{len(head) + 1}'\\fB{man_escape(head)}\\fP\\h'1'\\c"
)
-
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
-
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(width=4, compact=bool(token.meta['compact'])))
return self._maybe_parbreak()
-
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
-
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.append("\\fI")
return "\\fI"
-
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.pop()
return self._font_stack[-1]
-
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.append("\\fB")
return "\\fB"
-
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.pop()
return self._font_stack[-1]
-
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
s = man_escape(token.content).rstrip('\n')
return (
'.sp\n'
···
'.fi\n'
'.RE'
)
-
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
maybe_par = self._maybe_parbreak("\n")
self._enter_block()
return (
···
".RS 4\n"
f"\\h'-3'\\fI\\(lq\\(rq\\fP\\h'1'\\c"
)
-
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
-
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Note")
-
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Caution")
-
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Important")
-
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Tip")
-
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Warning")
-
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
-
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".RS 4"
-
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".RE"
-
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".PP"
-
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block()
return ".RS 4"
-
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
-
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.meta['name'] in [ 'command', 'env', 'option' ]:
return f'\\fB{man_escape(token.content)}\\fP'
elif token.meta['name'] in [ 'file', 'var' ]:
···
return f'\\fB{man_escape(page)}\\fP\\fR({man_escape(section)})\\fP'
else:
raise NotImplementedError("md node not supported yet", token)
-
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# mdoc knows no anchors so we can drop those, but classes must be rejected.
if 'class' in token.attrs:
-
return super().attr_span_begin(token, tokens, i, options, env)
+
return super().attr_span_begin(token, tokens, i)
return ""
-
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
-
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in manpages", token)
-
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in manpages", token)
-
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# max item head width for a number, a dot, and one leading space and one trailing space
width = 3 + len(str(cast(int, token.meta['end'])))
self._list_stack.append(
···
next_idx = cast(int, token.attrs.get('start', 1)),
compact = bool(token.meta['compact'])))
return self._maybe_parbreak()
-
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
+552 -140
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
···
import argparse
+
import html
import json
+
import re
+
import xml.sax.saxutils as xml
from abc import abstractmethod
-
from collections.abc import Mapping, MutableMapping, Sequence
+
from collections.abc import Mapping, Sequence
from pathlib import Path
-
from typing import Any, cast, NamedTuple, Optional, Union
-
from xml.sax.saxutils import escape, quoteattr
+
from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple, Optional, Union
import markdown_it
from markdown_it.token import Token
-
from markdown_it.utils import OptionsDict
+
+
from . import md, options
+
from .docbook import DocBookRenderer, Heading, make_xml_id
+
from .html import HTMLRenderer, UnresolvedXrefError
+
from .manual_structure import check_structure, FragmentType, is_include, TocEntry, TocEntryType, XrefTarget
+
from .md import Converter, Renderer
+
from .utils import Freezeable
+
+
class BaseConverter(Converter[md.TR], Generic[md.TR]):
+
# per-converter configuration for ns:arg=value arguments to include blocks, following
+
# the include type. html converters need something like this to support chunking, or
+
# another external method like the chunktocs docbook uses (but block options seem like
+
# a much nicer of doing this).
+
INCLUDE_ARGS_NS: ClassVar[str]
+
INCLUDE_FRAGMENT_ALLOWED_ARGS: ClassVar[set[str]] = set()
+
INCLUDE_OPTIONS_ALLOWED_ARGS: ClassVar[set[str]] = set()
+
+
_base_paths: list[Path]
+
_current_type: list[TocEntryType]
+
+
def convert(self, infile: Path, outfile: Path) -> None:
+
self._base_paths = [ infile ]
+
self._current_type = ['book']
+
try:
+
tokens = self._parse(infile.read_text())
+
self._postprocess(infile, outfile, tokens)
+
converted = self._renderer.render(tokens)
+
outfile.write_text(converted)
+
except Exception as e:
+
raise RuntimeError(f"failed to render manual {infile}") from e
+
+
def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
+
pass
+
+
def _parse(self, src: str) -> list[Token]:
+
tokens = super()._parse(src)
+
check_structure(self._current_type[-1], tokens)
+
for token in tokens:
+
if not is_include(token):
+
continue
+
directive = token.info[12:].split()
+
if not directive:
+
continue
+
args = { k: v for k, _sep, v in map(lambda s: s.partition('='), directive[1:]) }
+
typ = directive[0]
+
if typ == 'options':
+
token.type = 'included_options'
+
self._process_include_args(token, args, self.INCLUDE_OPTIONS_ALLOWED_ARGS)
+
self._parse_options(token, args)
+
else:
+
fragment_type = typ.removesuffix('s')
+
if fragment_type not in get_args(FragmentType):
+
raise RuntimeError(f"unsupported structural include type '{typ}'")
+
self._current_type.append(cast(FragmentType, fragment_type))
+
token.type = 'included_' + typ
+
self._process_include_args(token, args, self.INCLUDE_FRAGMENT_ALLOWED_ARGS)
+
self._parse_included_blocks(token, args)
+
self._current_type.pop()
+
return tokens
+
+
def _process_include_args(self, token: Token, args: dict[str, str], allowed: set[str]) -> None:
+
ns = self.INCLUDE_ARGS_NS + ":"
+
args = { k[len(ns):]: v for k, v in args.items() if k.startswith(ns) }
+
if unknown := set(args.keys()) - allowed:
+
assert token.map
+
raise RuntimeError(f"unrecognized include argument in line {token.map[0] + 1}", unknown)
+
token.meta['include-args'] = args
+
+
def _parse_included_blocks(self, token: Token, block_args: dict[str, str]) -> None:
+
assert token.map
+
included = token.meta['included'] = []
+
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
+
line = line.strip()
+
path = self._base_paths[-1].parent / line
+
if path in self._base_paths:
+
raise RuntimeError(f"circular include found in line {lnum}")
+
try:
+
self._base_paths.append(path)
+
with open(path, 'r') as f:
+
tokens = self._parse(f.read())
+
included.append((tokens, path))
+
self._base_paths.pop()
+
except Exception as e:
+
raise RuntimeError(f"processing included file {path} from line {lnum}") from e
+
+
def _parse_options(self, token: Token, block_args: dict[str, str]) -> None:
+
assert token.map
+
+
items = {}
+
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
+
if len(args := line.split(":", 1)) != 2:
+
raise RuntimeError(f"options directive with no argument in line {lnum}")
+
(k, v) = (args[0].strip(), args[1].strip())
+
if k in items:
+
raise RuntimeError(f"duplicate options directive {k} in line {lnum}")
+
items[k] = v
+
try:
+
id_prefix = items.pop('id-prefix')
+
varlist_id = items.pop('list-id')
+
source = items.pop('source')
+
except KeyError as e:
+
raise RuntimeError(f"options directive {e} missing in block at line {token.map[0] + 1}")
+
if items.keys():
+
raise RuntimeError(
+
f"unsupported options directives in block at line {token.map[0] + 1}",
+
" ".join(items.keys()))
-
from . import options
-
from .docbook import DocBookRenderer, Heading
-
from .md import Converter
+
try:
+
with open(self._base_paths[-1].parent / source, 'r') as f:
+
token.meta['id-prefix'] = id_prefix
+
token.meta['list-id'] = varlist_id
+
token.meta['source'] = json.load(f)
+
except Exception as e:
+
raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e
-
class ManualDocBookRenderer(DocBookRenderer):
+
class RendererMixin(Renderer):
_toplevel_tag: str
+
_revision: str
-
def __init__(self, toplevel_tag: str, manpage_urls: Mapping[str, str],
-
parser: Optional[markdown_it.MarkdownIt] = None):
-
super().__init__(manpage_urls, parser)
+
def __init__(self, toplevel_tag: str, revision: str, *args: Any, **kwargs: Any):
+
super().__init__(*args, **kwargs)
self._toplevel_tag = toplevel_tag
+
self._revision = revision
self.rules |= {
'included_sections': lambda *args: self._included_thing("section", *args),
'included_chapters': lambda *args: self._included_thing("chapter", *args),
···
'included_options': self.included_options,
}
-
def render(self, tokens: Sequence[Token], options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
wanted = { 'h1': 'title' }
-
wanted |= { 'h2': 'subtitle' } if self._toplevel_tag == 'book' else {}
-
for (i, (tag, kind)) in enumerate(wanted.items()):
-
if len(tokens) < 3 * (i + 1):
-
raise RuntimeError(f"missing {kind} ({tag}) heading")
-
token = tokens[3 * i]
-
if token.type != 'heading_open' or token.tag != tag:
-
assert token.map
-
raise RuntimeError(f"expected {kind} ({tag}) heading in line {token.map[0] + 1}", token)
-
for t in tokens[3 * len(wanted):]:
-
if t.type != 'heading_open' or (info := wanted.get(t.tag)) is None:
-
continue
-
assert t.map
-
raise RuntimeError(
-
f"only one {info[0]} heading ({t.markup} [text...]) allowed per "
-
f"{self._toplevel_tag}, but found a second in lines [{t.map[0] + 1}..{t.map[1]}]. "
-
"please remove all such headings except the first or demote the subsequent headings.",
-
t)
-
+
def render(self, tokens: Sequence[Token]) -> str:
# books get special handling because they have *two* title tags. doing this with
# generic code is more complicated than it's worth. the checks above have verified
# that both titles actually exist.
if self._toplevel_tag == 'book':
-
assert tokens[1].children
-
assert tokens[4].children
-
if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
-
maybe_id = "xml:id=" + quoteattr(maybe_id)
-
return (f'<book xmlns="http://docbook.org/ns/docbook"'
-
f' xmlns:xlink="http://www.w3.org/1999/xlink"'
-
f' {maybe_id} version="5.0">'
-
f' <title>{self.renderInline(tokens[1].children, options, env)}</title>'
-
f' <subtitle>{self.renderInline(tokens[4].children, options, env)}</subtitle>'
-
f' {super().render(tokens[6:], options, env)}'
-
f'</book>')
+
return self._render_book(tokens)
-
return super().render(tokens, options, env)
+
return super().render(tokens)
-
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
-
(tag, attrs) = super()._heading_tag(token, tokens, i, options, env)
+
@abstractmethod
+
def _render_book(self, tokens: Sequence[Token]) -> str:
+
raise NotImplementedError()
+
+
@abstractmethod
+
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
+
raise NotImplementedError()
+
+
@abstractmethod
+
def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
raise NotImplementedError()
+
+
class ManualDocBookRenderer(RendererMixin, DocBookRenderer):
+
def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]):
+
super().__init__(toplevel_tag, revision, manpage_urls)
+
+
def _render_book(self, tokens: Sequence[Token]) -> str:
+
assert tokens[1].children
+
assert tokens[4].children
+
if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
+
maybe_id = "xml:id=" + xml.quoteattr(maybe_id)
+
return (f'<book xmlns="http://docbook.org/ns/docbook"'
+
f' xmlns:xlink="http://www.w3.org/1999/xlink"'
+
f' {maybe_id} version="5.0">'
+
f' <title>{self.renderInline(tokens[1].children)}</title>'
+
f' <subtitle>{self.renderInline(tokens[4].children)}</subtitle>'
+
f' {super(DocBookRenderer, self).render(tokens[6:])}'
+
f'</book>')
+
+
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
+
(tag, attrs) = super()._heading_tag(token, tokens, i)
# render() has already verified that we don't have supernumerary headings and since the
# book tag is handled specially we can leave the check this simple
if token.tag != 'h1':
···
'xmlns:xlink': "http://www.w3.org/1999/xlink",
})
-
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int,
-
options: OptionsDict, env: MutableMapping[str, Any]) -> str:
+
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
result = []
# close existing partintro. the generic render doesn't really need this because
# it doesn't have a concept of structure in the way the manual does.
···
self._headings[-1] = self._headings[-1]._replace(partintro_closed=True)
# must nest properly for structural includes. this requires saving at least
# the headings stack, but creating new renderers is cheap and much easier.
-
r = ManualDocBookRenderer(tag, self._manpage_urls, None)
+
r = ManualDocBookRenderer(tag, self._revision, self._manpage_urls)
for (included, path) in token.meta['included']:
try:
-
result.append(r.render(included, options, env))
+
result.append(r.render(included))
except Exception as e:
raise RuntimeError(f"rendering {path}") from e
return "".join(result)
-
def included_options(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return cast(str, token.meta['rendered-options'])
+
def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
conv = options.DocBookConverter(self._manpage_urls, self._revision, False, 'fragment',
+
token.meta['list-id'], token.meta['id-prefix'])
+
conv.add_options(token.meta['source'])
+
return conv.finalize(fragment=True)
# TODO minimize docbook diffs with existing conversions. remove soon.
-
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return super().paragraph_open(token, tokens, i, options, env) + "\n "
-
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return "\n" + super().paragraph_close(token, tokens, i, options, env)
-
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return f"<programlisting>\n{escape(token.content)}</programlisting>"
-
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
info = f" language={quoteattr(token.info)}" if token.info != "" else ""
-
return f"<programlisting{info}>\n{escape(token.content)}</programlisting>"
+
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return super().paragraph_open(token, tokens, i) + "\n "
+
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return "\n" + super().paragraph_close(token, tokens, i)
+
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return f"<programlisting>\n{xml.escape(token.content)}</programlisting>"
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
info = f" language={xml.quoteattr(token.info)}" if token.info != "" else ""
+
return f"<programlisting{info}>\n{xml.escape(token.content)}</programlisting>"
+
+
class DocBookConverter(BaseConverter[ManualDocBookRenderer]):
+
INCLUDE_ARGS_NS = "docbook"
+
+
def __init__(self, manpage_urls: Mapping[str, str], revision: str):
+
super().__init__()
+
self._renderer = ManualDocBookRenderer('book', revision, manpage_urls)
+
+
+
class HTMLParameters(NamedTuple):
+
generator: str
+
stylesheets: Sequence[str]
+
scripts: Sequence[str]
+
toc_depth: int
+
chunk_toc_depth: int
+
+
class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
+
_base_path: Path
+
_html_params: HTMLParameters
+
+
def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters,
+
manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget],
+
base_path: Path):
+
super().__init__(toplevel_tag, revision, manpage_urls, xref_targets)
+
self._base_path, self._html_params = base_path, html_params
+
+
def _push(self, tag: str, hlevel_offset: int) -> Any:
+
result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset)
+
self._hlevel_offset += hlevel_offset
+
self._toplevel_tag, self._headings, self._attrspans = tag, [], []
+
return result
+
+
def _pop(self, state: Any) -> None:
+
(self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) = state
+
+
def _render_book(self, tokens: Sequence[Token]) -> str:
+
assert tokens[4].children
+
title_id = cast(str, tokens[0].attrs.get('id', ""))
+
title = self._xref_targets[title_id].title
+
# subtitles don't have IDs, so we can't use xrefs to get them
+
subtitle = self.renderInline(tokens[4].children)
-
class DocBookConverter(Converter):
-
def __renderer__(self, manpage_urls: Mapping[str, str],
-
parser: Optional[markdown_it.MarkdownIt]) -> ManualDocBookRenderer:
-
return ManualDocBookRenderer('book', manpage_urls, parser)
+
toc = TocEntry.of(tokens[0])
+
return "\n".join([
+
self._file_header(toc),
+
' <div class="book">',
+
' <div class="titlepage">',
+
' <div>',
+
f' <div><h1 class="title"><a id="{html.escape(title_id, True)}"></a>{title}</h1></div>',
+
f' <div><h2 class="subtitle">{subtitle}</h2></div>',
+
' </div>',
+
" <hr />",
+
' </div>',
+
self._build_toc(tokens, 0),
+
super(HTMLRenderer, self).render(tokens[6:]),
+
' </div>',
+
self._file_footer(toc),
+
])
-
_base_paths: list[Path]
+
def _file_header(self, toc: TocEntry) -> str:
+
prev_link, up_link, next_link = "", "", ""
+
prev_a, next_a, parent_title = "", "", "&nbsp;"
+
home = toc.root
+
if toc.prev:
+
prev_link = f'<link rel="prev" href="{toc.prev.target.href()}" title="{toc.prev.target.title}" />'
+
prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>'
+
if toc.parent:
+
up_link = (
+
f'<link rel="up" href="{toc.parent.target.href()}" '
+
f'title="{toc.parent.target.title}" />'
+
)
+
if (part := toc.parent) and part.kind != 'book':
+
assert part.target.title
+
parent_title = part.target.title
+
if toc.next:
+
next_link = f'<link rel="next" href="{toc.next.target.href()}" title="{toc.next.target.title}" />'
+
next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
+
return "\n".join([
+
'<?xml version="1.0" encoding="utf-8" standalone="no"?>',
+
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"',
+
' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">',
+
'<html xmlns="http://www.w3.org/1999/xhtml">',
+
' <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />',
+
f' <title>{toc.target.title}</title>',
+
"".join((f'<link rel="stylesheet" type="text/css" href="{html.escape(style, True)}" />'
+
for style in self._html_params.stylesheets)),
+
"".join((f'<script src="{html.escape(script, True)}" type="text/javascript"></script>'
+
for script in self._html_params.scripts)),
+
f' <meta name="generator" content="{html.escape(self._html_params.generator, True)}" />',
+
f' <link rel="home" href="{home.target.href()}" title="{home.target.title}" />',
+
f' {up_link}{prev_link}{next_link}',
+
' </head>',
+
' <body>',
+
' <div class="navheader">',
+
' <table width="100%" summary="Navigation header">',
+
' <tr>',
+
f' <th colspan="3" align="center">{toc.target.title}</th>',
+
' </tr>',
+
' <tr>',
+
f' <td width="20%" align="left">{prev_a}&nbsp;</td>',
+
f' <th width="60%" align="center">{parent_title}</th>',
+
f' <td width="20%" align="right">&nbsp;{next_a}</td>',
+
' </tr>',
+
' </table>',
+
' <hr />',
+
' </div>',
+
])
+
+
def _file_footer(self, toc: TocEntry) -> str:
+
# prev, next = self._get_prev_and_next()
+
prev_a, up_a, home_a, next_a = "", "&nbsp;", "&nbsp;", ""
+
prev_text, up_text, next_text = "", "", ""
+
home = toc.root
+
if toc.prev:
+
prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>'
+
assert toc.prev.target.title
+
prev_text = toc.prev.target.title
+
if toc.parent:
+
home_a = f'<a accesskey="h" href="{home.target.href()}">Home</a>'
+
if toc.parent != home:
+
up_a = f'<a accesskey="u" href="{toc.parent.target.href()}">Up</a>'
+
if toc.next:
+
next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
+
assert toc.next.target.title
+
next_text = toc.next.target.title
+
return "\n".join([
+
' <div class="navfooter">',
+
' <hr />',
+
' <table width="100%" summary="Navigation footer">',
+
' <tr>',
+
f' <td width="40%" align="left">{prev_a}&nbsp;</td>',
+
f' <td width="20%" align="center">{up_a}</td>',
+
f' <td width="40%" align="right">&nbsp;{next_a}</td>',
+
' </tr>',
+
' <tr>',
+
f' <td width="40%" align="left" valign="top">{prev_text}&nbsp;</td>',
+
f' <td width="20%" align="center">{home_a}</td>',
+
f' <td width="40%" align="right" valign="top">&nbsp;{next_text}</td>',
+
' </tr>',
+
' </table>',
+
' </div>',
+
' </body>',
+
'</html>',
+
])
+
+
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
if token.tag == 'h1':
+
return self._toplevel_tag
+
return super()._heading_tag(token, tokens, i)
+
def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
+
toc = TocEntry.of(tokens[i])
+
if toc.kind == 'section':
+
return ""
+
def walk_and_emit(toc: TocEntry, depth: int) -> list[str]:
+
if depth <= 0:
+
return []
+
result = []
+
for child in toc.children:
+
result.append(
+
f'<dt>'
+
f' <span class="{html.escape(child.kind, True)}">'
+
f' <a href="{child.target.href()}">{child.target.toc_html}</a>'
+
f' </span>'
+
f'</dt>'
+
)
+
# we want to look straight through parts because docbook-xsl does too, but it
+
# also makes for more uesful top-level tocs.
+
next_level = walk_and_emit(child, depth - (0 if child.kind == 'part' else 1))
+
if next_level:
+
result.append(f'<dd><dl>{"".join(next_level)}</dl></dd>')
+
return result
+
toc_depth = (
+
self._html_params.chunk_toc_depth
+
if toc.starts_new_chunk and toc.kind != 'book'
+
else self._html_params.toc_depth
+
)
+
if not (items := walk_and_emit(toc, toc_depth)):
+
return ""
+
return (
+
f'<div class="toc">'
+
f' <p><strong>Table of Contents</strong></p>'
+
f' <dl class="toc">'
+
f' {"".join(items)}'
+
f' </dl>'
+
f'</div>'
+
)
+
+
def _make_hN(self, level: int) -> tuple[str, str]:
+
# for some reason chapters don't increase the hN nesting count in docbook xslts. duplicate
+
# this for consistency.
+
if self._toplevel_tag == 'chapter':
+
level -= 1
+
# TODO docbook compat. these are never useful for us, but not having them breaks manual
+
# compare workflows while docbook is still allowed.
+
style = ""
+
if level + self._hlevel_offset < 3 \
+
and (self._toplevel_tag == 'section' or (self._toplevel_tag == 'chapter' and level > 0)):
+
style = "clear: both"
+
tag, hstyle = super()._make_hN(max(1, level))
+
return tag, style
+
+
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
+
outer, inner = [], []
+
# since books have no non-include content the toplevel book wrapper will not count
+
# towards nesting depth. other types will have at least a title+id heading which
+
# *does* count towards the nesting depth. chapters give a -1 to included sections
+
# mirroring the special handing in _make_hN. sigh.
+
hoffset = (
+
0 if not self._headings
+
else self._headings[-1].level - 1 if self._toplevel_tag == 'chapter'
+
else self._headings[-1].level
+
)
+
outer.append(self._maybe_close_partintro())
+
into = token.meta['include-args'].get('into-file')
+
fragments = token.meta['included']
+
state = self._push(tag, hoffset)
+
if into:
+
toc = TocEntry.of(fragments[0][0][0])
+
inner.append(self._file_header(toc))
+
# we do not set _hlevel_offset=0 because docbook doesn't either.
+
else:
+
inner = outer
+
for included, path in fragments:
+
try:
+
inner.append(self.render(included))
+
except Exception as e:
+
raise RuntimeError(f"rendering {path}") from e
+
if into:
+
inner.append(self._file_footer(toc))
+
(self._base_path / into).write_text("".join(inner))
+
self._pop(state)
+
return "".join(outer)
+
+
def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
conv = options.HTMLConverter(self._manpage_urls, self._revision, False,
+
token.meta['list-id'], token.meta['id-prefix'],
+
self._xref_targets)
+
conv.add_options(token.meta['source'])
+
return conv.finalize()
+
+
def _to_base26(n: int) -> str:
+
return (_to_base26(n // 26) if n > 26 else "") + chr(ord("A") + n % 26)
+
+
class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
+
INCLUDE_ARGS_NS = "html"
+
INCLUDE_FRAGMENT_ALLOWED_ARGS = { 'into-file' }
+
_revision: str
+
_html_params: HTMLParameters
+
_manpage_urls: Mapping[str, str]
+
_xref_targets: dict[str, XrefTarget]
+
_redirection_targets: set[str]
+
_appendix_count: int = 0
-
def __init__(self, manpage_urls: Mapping[str, str], revision: str):
-
super().__init__(manpage_urls)
-
self._revision = revision
+
def _next_appendix_id(self) -> str:
+
self._appendix_count += 1
+
return _to_base26(self._appendix_count - 1)
-
def convert(self, file: Path) -> str:
-
self._base_paths = [ file ]
-
try:
-
with open(file, 'r') as f:
-
return self._render(f.read())
-
except Exception as e:
-
raise RuntimeError(f"failed to render manual {file}") from e
+
def __init__(self, revision: str, html_params: HTMLParameters, manpage_urls: Mapping[str, str]):
+
super().__init__()
+
self._revision, self._html_params, self._manpage_urls = revision, html_params, manpage_urls
+
self._xref_targets = {}
+
self._redirection_targets = set()
+
# renderer not set on purpose since it has a dependency on the output path!
-
def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]:
-
tokens = super()._parse(src, env)
+
def convert(self, infile: Path, outfile: Path) -> None:
+
self._renderer = ManualHTMLRenderer('book', self._revision, self._html_params,
+
self._manpage_urls, self._xref_targets, outfile.parent)
+
super().convert(infile, outfile)
+
+
def _parse(self, src: str) -> list[Token]:
+
tokens = super()._parse(src)
for token in tokens:
-
if token.type != "fence" or not token.info.startswith("{=include=} "):
+
if not token.type.startswith('included_') \
+
or not (into := token.meta['include-args'].get('into-file')):
continue
-
typ = token.info[12:].strip()
-
if typ == 'options':
-
token.type = 'included_options'
-
self._parse_options(token)
-
elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]:
-
token.type = 'included_' + typ
-
self._parse_included_blocks(token, env)
-
else:
-
raise RuntimeError(f"unsupported structural include type '{typ}'")
+
assert token.map
+
if len(token.meta['included']) == 0:
+
raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is empty!")
+
# we use blender-style //path to denote paths relative to the origin file
+
# (usually index.html). this makes everything a lot easier and clearer.
+
if not into.startswith("//") or '/' in into[2:]:
+
raise RuntimeError(f"html:into-file must be a relative-to-origin //filename", into)
+
into = token.meta['include-args']['into-file'] = into[2:]
+
if into in self._redirection_targets:
+
raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is already in use")
+
self._redirection_targets.add(into)
return tokens
-
def _parse_included_blocks(self, token: Token, env: Optional[MutableMapping[str, Any]]) -> None:
-
assert token.map
-
included = token.meta['included'] = []
-
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
-
line = line.strip()
-
path = self._base_paths[-1].parent / line
-
if path in self._base_paths:
-
raise RuntimeError(f"circular include found in line {lnum}")
-
try:
-
self._base_paths.append(path)
-
with open(path, 'r') as f:
-
tokens = self._parse(f.read(), env)
-
included.append((tokens, path))
-
self._base_paths.pop()
-
except Exception as e:
-
raise RuntimeError(f"processing included file {path} from line {lnum}") from e
+
# xref | (id, type, heading inlines, file, starts new file)
+
def _collect_ids(self, tokens: Sequence[Token], target_file: str, typ: str, file_changed: bool
+
) -> list[XrefTarget | tuple[str, str, Token, str, bool]]:
+
result: list[XrefTarget | tuple[str, str, Token, str, bool]] = []
+
# collect all IDs and their xref substitutions. headings are deferred until everything
+
# has been parsed so we can resolve links in headings. if that's even used anywhere.
+
for (i, bt) in enumerate(tokens):
+
if bt.type == 'heading_open' and (id := cast(str, bt.attrs.get('id', ''))):
+
result.append((id, typ if bt.tag == 'h1' else 'section', tokens[i + 1], target_file,
+
i == 0 and file_changed))
+
elif bt.type == 'included_options':
+
id_prefix = bt.meta['id-prefix']
+
for opt in bt.meta['source'].keys():
+
id = make_xml_id(f"{id_prefix}{opt}")
+
name = html.escape(opt)
+
result.append(XrefTarget(id, f'<code class="option">{name}</code>', name, None, target_file))
+
elif bt.type.startswith('included_'):
+
sub_file = bt.meta['include-args'].get('into-file', target_file)
+
subtyp = bt.type.removeprefix('included_').removesuffix('s')
+
for si, (sub, _path) in enumerate(bt.meta['included']):
+
result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file)
+
elif bt.type == 'inline':
+
assert bt.children
+
result += self._collect_ids(bt.children, target_file, typ, False)
+
elif id := cast(str, bt.attrs.get('id', '')):
+
# anchors and examples have no titles we could use, but we'll have to put
+
# *something* here to communicate that there's no title.
+
result.append(XrefTarget(id, "???", None, None, target_file))
+
return result
+
+
def _render_xref(self, id: str, typ: str, inlines: Token, path: str, drop_fragment: bool) -> XrefTarget:
+
assert inlines.children
+
title_html = self._renderer.renderInline(inlines.children)
+
if typ == 'appendix':
+
# NOTE the docbook compat is strong here
+
n = self._next_appendix_id()
+
prefix = f"Appendix\u00A0{n}.\u00A0"
+
# HACK for docbook compat: prefix the title inlines with appendix id if
+
# necessary. the alternative is to mess with titlepage rendering in headings,
+
# which seems just a lot worse than this
+
prefix_tokens = [Token(type='text', tag='', nesting=0, content=prefix)]
+
inlines.children = prefix_tokens + list(inlines.children)
+
title = prefix + title_html
+
toc_html = f"{n}. {title_html}"
+
title_html = f"Appendix&nbsp;{n}"
+
else:
+
toc_html, title = title_html, title_html
+
title_html = (
+
f"<em>{title_html}</em>"
+
if typ == 'chapter'
+
else title_html if typ in [ 'book', 'part' ]
+
else f'the section called “{title_html}”'
+
)
+
return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment)
+
+
def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
+
xref_queue = self._collect_ids(tokens, outfile.name, 'book', True)
-
def _parse_options(self, token: Token) -> None:
-
assert token.map
+
failed = False
+
deferred = []
+
while xref_queue:
+
for item in xref_queue:
+
try:
+
target = item if isinstance(item, XrefTarget) else self._render_xref(*item)
+
except UnresolvedXrefError as e:
+
if failed:
+
raise
+
deferred.append(item)
+
continue
-
items = {}
-
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
-
if len(args := line.split(":", 1)) != 2:
-
raise RuntimeError(f"options directive with no argument in line {lnum}")
-
(k, v) = (args[0].strip(), args[1].strip())
-
if k in items:
-
raise RuntimeError(f"duplicate options directive {k} in line {lnum}")
-
items[k] = v
-
try:
-
id_prefix = items.pop('id-prefix')
-
varlist_id = items.pop('list-id')
-
source = items.pop('source')
-
except KeyError as e:
-
raise RuntimeError(f"options directive {e} missing in block at line {token.map[0] + 1}")
-
if items.keys():
-
raise RuntimeError(
-
f"unsupported options directives in block at line {token.map[0] + 1}",
-
" ".join(items.keys()))
+
if target.id in self._xref_targets:
+
raise RuntimeError(f"found duplicate id #{target.id}")
+
self._xref_targets[target.id] = target
+
if len(deferred) == len(xref_queue):
+
failed = True # do another round and report the first error
+
xref_queue = deferred
-
try:
-
conv = options.DocBookConverter(
-
self._manpage_urls, self._revision, False, 'fragment', varlist_id, id_prefix)
-
with open(self._base_paths[-1].parent / source, 'r') as f:
-
conv.add_options(json.load(f))
-
token.meta['rendered-options'] = conv.finalize(fragment=True)
-
except Exception as e:
-
raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e
+
TocEntry.collect_and_link(self._xref_targets, tokens)
···
p.add_argument('infile', type=Path)
p.add_argument('outfile', type=Path)
+
def _build_cli_html(p: argparse.ArgumentParser) -> None:
+
p.add_argument('--manpage-urls', required=True)
+
p.add_argument('--revision', required=True)
+
p.add_argument('--generator', default='nixos-render-docs')
+
p.add_argument('--stylesheet', default=[], action='append')
+
p.add_argument('--script', default=[], action='append')
+
p.add_argument('--toc-depth', default=1, type=int)
+
p.add_argument('--chunk-toc-depth', default=1, type=int)
+
p.add_argument('infile', type=Path)
+
p.add_argument('outfile', type=Path)
+
def _run_cli_db(args: argparse.Namespace) -> None:
with open(args.manpage_urls, 'r') as manpage_urls:
md = DocBookConverter(json.load(manpage_urls), args.revision)
-
converted = md.convert(args.infile)
-
args.outfile.write_text(converted)
+
md.convert(args.infile, args.outfile)
+
+
def _run_cli_html(args: argparse.Namespace) -> None:
+
with open(args.manpage_urls, 'r') as manpage_urls:
+
md = HTMLConverter(
+
args.revision,
+
HTMLParameters(args.generator, args.stylesheet, args.script, args.toc_depth,
+
args.chunk_toc_depth),
+
json.load(manpage_urls))
+
md.convert(args.infile, args.outfile)
def build_cli(p: argparse.ArgumentParser) -> None:
formats = p.add_subparsers(dest='format', required=True)
_build_cli_db(formats.add_parser('docbook'))
+
_build_cli_html(formats.add_parser('html'))
def run_cli(args: argparse.Namespace) -> None:
if args.format == 'docbook':
_run_cli_db(args)
+
elif args.format == 'html':
+
_run_cli_html(args)
else:
raise RuntimeError('format not hooked up', args)
+186
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py
···
+
from __future__ import annotations
+
+
import dataclasses as dc
+
import html
+
import itertools
+
+
from typing import cast, get_args, Iterable, Literal, Sequence
+
+
from markdown_it.token import Token
+
+
from .utils import Freezeable
+
+
# FragmentType is used to restrict structural include blocks.
+
FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix']
+
+
# in the TOC all fragments are allowed, plus the all-encompassing book.
+
TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix']
+
+
def is_include(token: Token) -> bool:
+
return token.type == "fence" and token.info.startswith("{=include=} ")
+
+
# toplevel file must contain only the title headings and includes, anything else
+
# would cause strange rendering.
+
def _check_book_structure(tokens: Sequence[Token]) -> None:
+
for token in tokens[6:]:
+
if not is_include(token):
+
assert token.map
+
raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, "
+
"expected structural include")
+
+
# much like books, parts may not contain headings other than their title heading.
+
# this is a limitation of the current renderers and TOC generators that do not handle
+
# this case well even though it is supported in docbook (and probably supportable
+
# anywhere else).
+
def _check_part_structure(tokens: Sequence[Token]) -> None:
+
_check_fragment_structure(tokens)
+
for token in tokens[3:]:
+
if token.type == 'heading_open':
+
assert token.map
+
raise RuntimeError(f"unexpected heading in line {token.map[0] + 1}")
+
+
# two include blocks must either be adjacent or separated by a heading, otherwise
+
# we cannot generate a correct TOC (since there'd be nothing to link to between
+
# the two includes).
+
def _check_fragment_structure(tokens: Sequence[Token]) -> None:
+
for i, token in enumerate(tokens):
+
if is_include(token) \
+
and i + 1 < len(tokens) \
+
and not (is_include(tokens[i + 1]) or tokens[i + 1].type == 'heading_open'):
+
assert token.map
+
raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, "
+
"expected heading or structural include")
+
+
def check_structure(kind: TocEntryType, tokens: Sequence[Token]) -> None:
+
wanted = { 'h1': 'title' }
+
wanted |= { 'h2': 'subtitle' } if kind == 'book' else {}
+
for (i, (tag, role)) in enumerate(wanted.items()):
+
if len(tokens) < 3 * (i + 1):
+
raise RuntimeError(f"missing {role} ({tag}) heading")
+
token = tokens[3 * i]
+
if token.type != 'heading_open' or token.tag != tag:
+
assert token.map
+
raise RuntimeError(f"expected {role} ({tag}) heading in line {token.map[0] + 1}", token)
+
for t in tokens[3 * len(wanted):]:
+
if t.type != 'heading_open' or not (role := wanted.get(t.tag, '')):
+
continue
+
assert t.map
+
raise RuntimeError(
+
f"only one {role} heading ({t.markup} [text...]) allowed per "
+
f"{kind}, but found a second in line {t.map[0] + 1}. "
+
"please remove all such headings except the first or demote the subsequent headings.",
+
t)
+
+
last_heading_level = 0
+
for token in tokens:
+
if token.type != 'heading_open':
+
continue
+
+
# book subtitle headings do not need an id, only book title headings do.
+
# every other headings needs one too. we need this to build a TOC and to
+
# provide stable links if the manual changes shape.
+
if 'id' not in token.attrs and (kind != 'book' or token.tag != 'h2'):
+
assert token.map
+
raise RuntimeError(f"heading in line {token.map[0] + 1} does not have an id")
+
+
level = int(token.tag[1:]) # because tag = h1..h6
+
if level > last_heading_level + 1:
+
assert token.map
+
raise RuntimeError(f"heading in line {token.map[0] + 1} skips one or more heading levels, "
+
"which is currently not allowed")
+
last_heading_level = level
+
+
if kind == 'book':
+
_check_book_structure(tokens)
+
elif kind == 'part':
+
_check_part_structure(tokens)
+
else:
+
_check_fragment_structure(tokens)
+
+
@dc.dataclass(frozen=True)
+
class XrefTarget:
+
id: str
+
"""link label for `[](#local-references)`"""
+
title_html: str
+
"""toc label"""
+
toc_html: str | None
+
"""text for `<title>` tags and `title="..."` attributes"""
+
title: str | None
+
"""path to file that contains the anchor"""
+
path: str
+
"""whether to drop the `#anchor` from links when expanding xrefs"""
+
drop_fragment: bool = False
+
+
def href(self) -> str:
+
path = html.escape(self.path, True)
+
return path if self.drop_fragment else f"{path}#{html.escape(self.id, True)}"
+
+
@dc.dataclass
+
class TocEntry(Freezeable):
+
kind: TocEntryType
+
target: XrefTarget
+
parent: TocEntry | None = None
+
prev: TocEntry | None = None
+
next: TocEntry | None = None
+
children: list[TocEntry] = dc.field(default_factory=list)
+
starts_new_chunk: bool = False
+
+
@property
+
def root(self) -> TocEntry:
+
return self.parent.root if self.parent else self
+
+
@classmethod
+
def of(cls, token: Token) -> TocEntry:
+
entry = token.meta.get('TocEntry')
+
if not isinstance(entry, TocEntry):
+
raise RuntimeError('requested toc entry, none found', token)
+
return entry
+
+
@classmethod
+
def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry:
+
result = cls._collect_entries(xrefs, tokens, 'book')
+
+
def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]:
+
this.parent = parent
+
return itertools.chain([this], *[ flatten_with_parent(c, this) for c in this.children ])
+
+
flat = list(flatten_with_parent(result, None))
+
prev = flat[0]
+
prev.starts_new_chunk = True
+
paths_seen = set([prev.target.path])
+
for c in flat[1:]:
+
if prev.target.path != c.target.path and c.target.path not in paths_seen:
+
c.starts_new_chunk = True
+
c.prev, prev.next = prev, c
+
prev = c
+
paths_seen.add(c.target.path)
+
+
for c in flat:
+
c.freeze()
+
+
return result
+
+
@classmethod
+
def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token],
+
kind: TocEntryType) -> TocEntry:
+
# we assume that check_structure has been run recursively over the entire input.
+
# list contains (tag, entry) pairs that will collapse to a single entry for
+
# the full sequence.
+
entries: list[tuple[str, TocEntry]] = []
+
for token in tokens:
+
if token.type.startswith('included_') and (included := token.meta.get('included')):
+
fragment_type_str = token.type[9:].removesuffix('s')
+
assert fragment_type_str in get_args(TocEntryType)
+
fragment_type = cast(TocEntryType, fragment_type_str)
+
for fragment, _path in included:
+
entries[-1][1].children.append(cls._collect_entries(xrefs, fragment, fragment_type))
+
elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))):
+
while len(entries) > 1 and entries[-1][0] >= token.tag:
+
entries[-2][1].children.append(entries.pop()[1])
+
entries.append((token.tag,
+
TocEntry(kind if token.tag == 'h1' else 'section', xrefs[id])))
+
token.meta['TocEntry'] = entries[-1][1]
+
+
while len(entries) > 1:
+
entries[-2][1].children.append(entries.pop()[1])
+
return entries[0][1]
+81 -117
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py
···
from abc import ABC
from collections.abc import Mapping, MutableMapping, Sequence
-
from typing import Any, Callable, cast, get_args, Iterable, Literal, NoReturn, Optional
+
from typing import Any, Callable, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar
import dataclasses
import re
···
AdmonitionKind = Literal["note", "caution", "tip", "important", "warning"]
-
class Renderer(markdown_it.renderer.RendererProtocol):
+
class Renderer:
_admonitions: dict[AdmonitionKind, tuple[RenderFn, RenderFn]]
_admonition_stack: list[AdmonitionKind]
-
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
+
def __init__(self, manpage_urls: Mapping[str, str]):
self._manpage_urls = manpage_urls
self.rules = {
'text': self.text,
···
def _join_inline(self, ls: Iterable[str]) -> str:
return "".join(ls)
-
def admonition_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def admonition_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
tag = token.meta['kind']
self._admonition_stack.append(tag)
-
return self._admonitions[tag][0](token, tokens, i, options, env)
-
def admonition_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
-
return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i, options, env)
+
return self._admonitions[tag][0](token, tokens, i)
+
def admonition_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i)
-
def render(self, tokens: Sequence[Token], options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def render(self, tokens: Sequence[Token]) -> str:
def do_one(i: int, token: Token) -> str:
if token.type == "inline":
assert token.children is not None
-
return self.renderInline(token.children, options, env)
+
return self.renderInline(token.children)
elif token.type in self.rules:
-
return self.rules[token.type](tokens[i], tokens, i, options, env)
+
return self.rules[token.type](tokens[i], tokens, i)
else:
raise NotImplementedError("md token not supported yet", token)
return self._join_block(map(lambda arg: do_one(*arg), enumerate(tokens)))
-
def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def renderInline(self, tokens: Sequence[Token]) -> str:
def do_one(i: int, token: Token) -> str:
if token.type in self.rules:
-
return self.rules[token.type](tokens[i], tokens, i, options, env)
+
return self.rules[token.type](tokens[i], tokens, i)
else:
raise NotImplementedError("md token not supported yet", token)
return self._join_inline(map(lambda arg: do_one(*arg), enumerate(tokens)))
-
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
-
def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def _is_escaped(src: str, pos: int) -> bool:
···
md.core.ruler.push("block_attr", block_attr)
-
class Converter(ABC):
-
__renderer__: Callable[[Mapping[str, str], markdown_it.MarkdownIt], Renderer]
+
TR = TypeVar('TR', bound='Renderer')
+
+
class Converter(ABC, Generic[TR]):
+
# we explicitly disable markdown-it rendering support and use our own entirely.
+
# rendering is well separated from parsing and our renderers carry much more state than
+
# markdown-it easily acknowledges as 'good' (unless we used the untyped env args to
+
# shuttle that state around, which is very fragile)
+
class ForbiddenRenderer(markdown_it.renderer.RendererProtocol):
+
__output__ = "none"
+
+
def __init__(self, parser: Optional[markdown_it.MarkdownIt]):
+
pass
+
+
def render(self, tokens: Sequence[Token], options: OptionsDict,
+
env: MutableMapping[str, Any]) -> str:
+
raise NotImplementedError("do not use Converter._md.renderer. 'tis a silly place")
-
def __init__(self, manpage_urls: Mapping[str, str]):
-
self._manpage_urls = manpage_urls
+
_renderer: TR
+
def __init__(self) -> None:
self._md = markdown_it.MarkdownIt(
"commonmark",
{
···
'html': False, # not useful since we target many formats
'typographer': True, # required for smartquotes
},
-
renderer_cls=lambda parser: self.__renderer__(self._manpage_urls, parser)
+
renderer_cls=self.ForbiddenRenderer
)
self._md.use(
container_plugin,
···
self._md.use(_block_attr)
self._md.enable(["smartquotes", "replacements"])
-
def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]:
-
return self._md.parse(src, env if env is not None else {})
+
def _parse(self, src: str) -> list[Token]:
+
return self._md.parse(src, {})
-
def _render(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> str:
-
env = {} if env is None else env
-
tokens = self._parse(src, env)
-
return self._md.renderer.render(tokens, self._md.options, env) # type: ignore[no-any-return]
+
def _render(self, src: str) -> str:
+
tokens = self._parse(src)
+
return self._renderer.render(tokens)
+150 -51
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py
···
from __future__ import annotations
import argparse
+
import html
import json
+
import xml.sax.saxutils as xml
from abc import abstractmethod
-
from collections.abc import Mapping, MutableMapping, Sequence
-
from markdown_it.utils import OptionsDict
+
from collections.abc import Mapping, Sequence
from markdown_it.token import Token
-
from typing import Any, Optional
+
from typing import Any, Generic, Optional
from urllib.parse import quote
-
from xml.sax.saxutils import escape, quoteattr
import markdown_it
+
from . import md
from . import parallel
from .asciidoc import AsciiDocRenderer, asciidoc_escape
from .commonmark import CommonMarkRenderer
from .docbook import DocBookRenderer, make_xml_id
+
from .html import HTMLRenderer
from .manpage import ManpageRenderer, man_escape
+
from .manual_structure import XrefTarget
from .md import Converter, md_escape, md_make_code
from .types import OptionLoc, Option, RenderedOption
···
return None
return option[key] # type: ignore[return-value]
-
class BaseConverter(Converter):
+
class BaseConverter(Converter[md.TR], Generic[md.TR]):
__option_block_separator__: str
_options: dict[str, RenderedOption]
-
def __init__(self, manpage_urls: Mapping[str, str],
-
revision: str,
-
markdown_by_default: bool):
-
super().__init__(manpage_urls)
+
def __init__(self, revision: str, markdown_by_default: bool):
+
super().__init__()
self._options = {}
self._revision = revision
self._markdown_by_default = markdown_by_default
···
# since it's good enough so far.
@classmethod
@abstractmethod
-
def _parallel_render_init_worker(cls, a: Any) -> BaseConverter: raise NotImplementedError()
+
def _parallel_render_init_worker(cls, a: Any) -> BaseConverter[md.TR]: raise NotImplementedError()
def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
try:
···
raise Exception(f"Failed to render option {name}") from e
@classmethod
-
def _parallel_render_step(cls, s: BaseConverter, a: Any) -> RenderedOption:
+
def _parallel_render_step(cls, s: BaseConverter[md.TR], a: Any) -> RenderedOption:
return s._render_option(*a)
def add_options(self, options: dict[str, Any]) -> None:
···
def finalize(self) -> str: raise NotImplementedError()
class OptionDocsRestrictions:
-
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
-
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
-
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
-
def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
class OptionsDocBookRenderer(OptionDocsRestrictions, DocBookRenderer):
# TODO keep optionsDocBook diff small. remove soon if rendering is still good.
-
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
token.meta['compact'] = False
-
return super().ordered_list_open(token, tokens, i, options, env)
-
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-
env: MutableMapping[str, Any]) -> str:
+
return super().ordered_list_open(token, tokens, i)
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
token.meta['compact'] = False
-
return super().bullet_list_open(token, tokens, i, options, env)
+
return super().bullet_list_open(token, tokens, i)
-
class DocBookConverter(BaseConverter):
-
__renderer__ = OptionsDocBookRenderer
+
class DocBookConverter(BaseConverter[OptionsDocBookRenderer]):
__option_block_separator__ = ""
def __init__(self, manpage_urls: Mapping[str, str],
···
document_type: str,
varlist_id: str,
id_prefix: str):
-
super().__init__(manpage_urls, revision, markdown_by_default)
+
super().__init__(revision, markdown_by_default)
+
self._renderer = OptionsDocBookRenderer(manpage_urls)
self._document_type = document_type
self._varlist_id = varlist_id
self._id_prefix = id_prefix
def _parallel_render_prepare(self) -> Any:
-
return (self._manpage_urls, self._revision, self._markdown_by_default, self._document_type,
+
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default, self._document_type,
self._varlist_id, self._id_prefix)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> DocBookConverter:
···
def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]:
if href is not None:
-
href = " xlink:href=" + quoteattr(href)
+
href = " xlink:href=" + xml.quoteattr(href)
return [
f"<member><filename{href}>",
-
escape(name),
+
xml.escape(name),
"</filename></member>"
]
···
result += [
"<varlistentry>",
# NOTE adding extra spaces here introduces spaces into xref link expansions
-
(f"<term xlink:href={quoteattr('#' + id)} xml:id={quoteattr(id)}>" +
-
f"<option>{escape(name)}</option></term>"),
+
(f"<term xlink:href={xml.quoteattr('#' + id)} xml:id={xml.quoteattr(id)}>" +
+
f"<option>{xml.escape(name)}</option></term>"),
"<listitem>"
]
result += opt.lines
···
class OptionsManpageRenderer(OptionDocsRestrictions, ManpageRenderer):
pass
-
class ManpageConverter(BaseConverter):
-
def __renderer__(self, manpage_urls: Mapping[str, str],
-
parser: Optional[markdown_it.MarkdownIt] = None) -> OptionsManpageRenderer:
-
return OptionsManpageRenderer(manpage_urls, self._options_by_id, parser)
-
+
class ManpageConverter(BaseConverter[OptionsManpageRenderer]):
__option_block_separator__ = ".sp"
_options_by_id: dict[str, str]
···
*,
# only for parallel rendering
_options_by_id: Optional[dict[str, str]] = None):
+
super().__init__(revision, markdown_by_default)
self._options_by_id = _options_by_id or {}
-
super().__init__({}, revision, markdown_by_default)
+
self._renderer = OptionsManpageRenderer({}, self._options_by_id)
def _parallel_render_prepare(self) -> Any:
return ((self._revision, self._markdown_by_default), { '_options_by_id': self._options_by_id })
···
return cls(*a[0], **a[1])
def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
-
assert isinstance(self._md.renderer, OptionsManpageRenderer)
-
links = self._md.renderer.link_footnotes = []
+
links = self._renderer.link_footnotes = []
result = super()._render_option(name, option)
-
self._md.renderer.link_footnotes = None
+
self._renderer.link_footnotes = None
return result._replace(links=links)
def add_options(self, options: dict[str, Any]) -> None:
···
if lit := option_is(option, key, 'literalDocBook'):
raise RuntimeError("can't render manpages in the presence of docbook")
else:
-
assert isinstance(self._md.renderer, OptionsManpageRenderer)
try:
-
self._md.renderer.inline_code_is_quoted = False
+
self._renderer.inline_code_is_quoted = False
return super()._render_code(option, key)
finally:
-
self._md.renderer.inline_code_is_quoted = True
+
self._renderer.inline_code_is_quoted = True
def _render_description(self, desc: str | dict[str, Any]) -> list[str]:
if isinstance(desc, str) and not self._markdown_by_default:
···
class OptionsCommonMarkRenderer(OptionDocsRestrictions, CommonMarkRenderer):
pass
-
class CommonMarkConverter(BaseConverter):
-
__renderer__ = OptionsCommonMarkRenderer
+
class CommonMarkConverter(BaseConverter[OptionsCommonMarkRenderer]):
__option_block_separator__ = ""
+
def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool):
+
super().__init__(revision, markdown_by_default)
+
self._renderer = OptionsCommonMarkRenderer(manpage_urls)
+
def _parallel_render_prepare(self) -> Any:
-
return (self._manpage_urls, self._revision, self._markdown_by_default)
+
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> CommonMarkConverter:
return cls(*a)
···
class OptionsAsciiDocRenderer(OptionDocsRestrictions, AsciiDocRenderer):
pass
-
class AsciiDocConverter(BaseConverter):
-
__renderer__ = AsciiDocRenderer
+
class AsciiDocConverter(BaseConverter[OptionsAsciiDocRenderer]):
__option_block_separator__ = ""
+
def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool):
+
super().__init__(revision, markdown_by_default)
+
self._renderer = OptionsAsciiDocRenderer(manpage_urls)
+
def _parallel_render_prepare(self) -> Any:
-
return (self._manpage_urls, self._revision, self._markdown_by_default)
+
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> AsciiDocConverter:
return cls(*a)
···
result.append(f"== {asciidoc_escape(name)}\n")
result += opt.lines
result.append("\n\n")
+
+
return "\n".join(result)
+
+
class OptionsHTMLRenderer(OptionDocsRestrictions, HTMLRenderer):
+
# TODO docbook compat. must be removed together with the matching docbook handlers.
+
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
token.meta['compact'] = False
+
return super().ordered_list_open(token, tokens, i)
+
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
token.meta['compact'] = False
+
return super().bullet_list_open(token, tokens, i)
+
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+
# TODO use token.info. docbook doesn't so we can't yet.
+
return f'<pre class="programlisting">{html.escape(token.content)}</pre>'
+
+
class HTMLConverter(BaseConverter[OptionsHTMLRenderer]):
+
__option_block_separator__ = ""
+
+
def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool,
+
varlist_id: str, id_prefix: str, xref_targets: Mapping[str, XrefTarget]):
+
super().__init__(revision, markdown_by_default)
+
self._xref_targets = xref_targets
+
self._varlist_id = varlist_id
+
self._id_prefix = id_prefix
+
self._renderer = OptionsHTMLRenderer(manpage_urls, self._xref_targets)
+
+
def _parallel_render_prepare(self) -> Any:
+
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default,
+
self._varlist_id, self._id_prefix, self._xref_targets)
+
@classmethod
+
def _parallel_render_init_worker(cls, a: Any) -> HTMLConverter:
+
return cls(*a)
+
+
def _render_code(self, option: dict[str, Any], key: str) -> list[str]:
+
if lit := option_is(option, key, 'literalDocBook'):
+
raise RuntimeError("can't render html in the presence of docbook")
+
else:
+
return super()._render_code(option, key)
+
+
def _render_description(self, desc: str | dict[str, Any]) -> list[str]:
+
if isinstance(desc, str) and not self._markdown_by_default:
+
raise RuntimeError("can't render html in the presence of docbook")
+
else:
+
return super()._render_description(desc)
+
+
def _related_packages_header(self) -> list[str]:
+
return [
+
'<p><span class="emphasis"><em>Related packages:</em></span></p>',
+
]
+
+
def _decl_def_header(self, header: str) -> list[str]:
+
return [
+
f'<p><span class="emphasis"><em>{header}:</em></span></p>',
+
'<table border="0" summary="Simple list" class="simplelist">'
+
]
+
+
def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]:
+
if href is not None:
+
href = f' href="{html.escape(href, True)}"'
+
return [
+
"<tr><td>",
+
f'<code class="filename"><a class="filename" {href} target="_top">',
+
f'{html.escape(name)}',
+
'</a></code>',
+
"</td></tr>"
+
]
+
+
def _decl_def_footer(self) -> list[str]:
+
return [ "</table>" ]
+
+
def finalize(self) -> str:
+
result = []
+
+
result += [
+
'<div class="variablelist">',
+
f'<a id="{html.escape(self._varlist_id, True)}"></a>',
+
' <dl class="variablelist">',
+
]
+
+
for (name, opt) in self._sorted_options():
+
id = make_xml_id(self._id_prefix + name)
+
target = self._xref_targets[id]
+
result += [
+
'<dt>',
+
' <span class="term">',
+
# docbook compat, these could be one tag
+
f' <a id="{html.escape(id, True)}"></a><a class="term" href="{target.href()}">'
+
# no spaces here (and string merging) for docbook output compat
+
f'<code class="option">{html.escape(name)}</code>',
+
' </a>',
+
' </span>',
+
'</dt>',
+
'<dd>',
+
]
+
result += opt.lines
+
result += [
+
"</dd>",
+
]
+
+
result += [
+
" </dl>",
+
"</div>"
+
]
return "\n".join(result)
+2 -3
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py
···
-
from collections.abc import Sequence, MutableMapping
+
from collections.abc import Sequence
from typing import Any, Callable, Optional, Tuple, NamedTuple
from markdown_it.token import Token
-
from markdown_it.utils import OptionsDict
OptionLoc = str | dict[str, str]
Option = dict[str, str | dict[str, str] | list[OptionLoc]]
···
lines: list[str]
links: Optional[list[str]] = None
-
RenderFn = Callable[[Token, Sequence[Token], int, OptionsDict, MutableMapping[str, Any]], str]
+
RenderFn = Callable[[Token, Sequence[Token], int], str]
+21
pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py
···
+
from typing import Any
+
+
_frozen_classes: dict[type, type] = {}
+
+
# make a derived class freezable (ie, disallow modifications).
+
# we do this by changing the class of an instance at runtime when freeze()
+
# is called, providing a derived class that is exactly the same except
+
# for a __setattr__ that raises an error when called. this beats having
+
# a field for frozenness and an unconditional __setattr__ that checks this
+
# field because it does not insert anything into the class dict.
+
class Freezeable:
+
def freeze(self) -> None:
+
cls = type(self)
+
if not (frozen := _frozen_classes.get(cls)):
+
def __setattr__(instance: Any, n: str, v: Any) -> None:
+
raise TypeError(f'{cls.__name__} is frozen')
+
frozen = type(cls.__name__, (cls,), {
+
'__setattr__': __setattr__,
+
})
+
_frozen_classes[cls] = frozen
+
self.__class__ = frozen
+5 -3
pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py
···
-
import nixos_render_docs
+
import nixos_render_docs as nrd
from sample_md import sample1
-
class Converter(nixos_render_docs.md.Converter):
-
__renderer__ = nixos_render_docs.asciidoc.AsciiDocRenderer
+
class Converter(nrd.md.Converter[nrd.asciidoc.AsciiDocRenderer]):
+
def __init__(self, manpage_urls: dict[str, str]):
+
super().__init__()
+
self._renderer = nrd.asciidoc.AsciiDocRenderer(manpage_urls)
def test_lists() -> None:
c = Converter({})
+5 -3
pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py
···
-
import nixos_render_docs
+
import nixos_render_docs as nrd
from sample_md import sample1
···
import markdown_it
-
class Converter(nixos_render_docs.md.Converter):
-
__renderer__ = nixos_render_docs.commonmark.CommonMarkRenderer
+
class Converter(nrd.md.Converter[nrd.commonmark.CommonMarkRenderer]):
+
def __init__(self, manpage_urls: Mapping[str, str]):
+
super().__init__()
+
self._renderer = nrd.commonmark.CommonMarkRenderer(manpage_urls)
# NOTE: in these tests we represent trailing spaces by ` ` and replace them with real space later,
# since a number of editors will strip trailing whitespace on save and that would break the tests.
+5 -3
pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py
···
-
import nixos_render_docs
+
import nixos_render_docs as nrd
from markdown_it.token import Token
-
class Converter(nixos_render_docs.md.Converter):
+
class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
# actual renderer doesn't matter, we're just parsing.
-
__renderer__ = nixos_render_docs.docbook.DocBookRenderer
+
def __init__(self, manpage_urls: dict[str, str]) -> None:
+
super().__init__()
+
self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
def test_heading_id_absent() -> None:
c = Converter({})
+179
pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py
···
+
import nixos_render_docs as nrd
+
import pytest
+
+
from sample_md import sample1
+
+
class Converter(nrd.md.Converter[nrd.html.HTMLRenderer]):
+
def __init__(self, manpage_urls: dict[str, str], xrefs: dict[str, nrd.manual_structure.XrefTarget]):
+
super().__init__()
+
self._renderer = nrd.html.HTMLRenderer(manpage_urls, xrefs)
+
+
def unpretty(s: str) -> str:
+
return "".join(map(str.strip, s.splitlines())).replace('␣', ' ').replace('↵', '\n')
+
+
def test_lists_styles() -> None:
+
# nested lists rotate through a number of list style
+
c = Converter({}, {})
+
assert c._render("- - - - foo") == unpretty("""
+
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;">
+
<li class="listitem">
+
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: circle;">
+
<li class="listitem">
+
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: square;">
+
<li class="listitem">
+
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;">
+
<li class="listitem"><p>foo</p></li>
+
</ul></div>
+
</li>
+
</ul></div>
+
</li>
+
</ul></div>
+
</li>
+
</ul></div>
+
""")
+
assert c._render("1. 1. 1. 1. 1. 1. foo") == unpretty("""
+
<div class="orderedlist"><ol class="orderedlist compact" type="1">
+
<li class="listitem">
+
<div class="orderedlist"><ol class="orderedlist compact" type="a">
+
<li class="listitem">
+
<div class="orderedlist"><ol class="orderedlist compact" type="i">
+
<li class="listitem">
+
<div class="orderedlist"><ol class="orderedlist compact" type="A">
+
<li class="listitem">
+
<div class="orderedlist"><ol class="orderedlist compact" type="I">
+
<li class="listitem">
+
<div class="orderedlist"><ol class="orderedlist compact" type="1">
+
<li class="listitem"><p>foo</p></li>
+
</ol></div>
+
</li>
+
</ol></div>
+
</li>
+
</ol></div>
+
</li>
+
</ol></div>
+
</li>
+
</ol></div>
+
</li>
+
</ol></div>
+
""")
+
+
def test_xrefs() -> None:
+
# nested lists rotate through a number of list style
+
c = Converter({}, {
+
'foo': nrd.manual_structure.XrefTarget('foo', '<hr/>', 'toc1', 'title1', 'index.html'),
+
'bar': nrd.manual_structure.XrefTarget('bar', '<br/>', 'toc2', 'title2', 'index.html', True),
+
})
+
assert c._render("[](#foo)") == '<p><a class="xref" href="index.html#foo" title="title1" ><hr/></a></p>'
+
assert c._render("[](#bar)") == '<p><a class="xref" href="index.html" title="title2" ><br/></a></p>'
+
with pytest.raises(nrd.html.UnresolvedXrefError) as exc:
+
c._render("[](#baz)")
+
assert exc.value.args[0] == 'bad local reference, id #baz not known'
+
+
def test_full() -> None:
+
c = Converter({ 'man(1)': 'http://example.org' }, {})
+
assert c._render(sample1) == unpretty("""
+
<div class="warning">
+
<h3 class="title">Warning</h3>
+
<p>foo</p>
+
<div class="note">
+
<h3 class="title">Note</h3>
+
<p>nested</p>
+
</div>
+
</div>
+
<p>
+
<a class="link" href="link" target="_top">↵
+
multiline↵
+
</a>
+
</p>
+
<p>
+
<a class="link" href="http://example.org" target="_top">
+
<span class="citerefentry"><span class="refentrytitle">man</span>(1)</span>
+
</a> reference
+
</p>
+
<p><a id="b" />some <a id="a" />nested anchors</p>
+
<p>
+
<span class="emphasis"><em>emph</em></span>␣
+
<span class="strong"><strong>strong</strong></span>␣
+
<span class="emphasis"><em>nesting emph <span class="strong"><strong>and strong</strong></span>␣
+
and <code class="literal">code</code></em></span>
+
</p>
+
<div class="itemizedlist">
+
<ul class="itemizedlist " style="list-style-type: disc;">
+
<li class="listitem"><p>wide bullet</p></li>
+
<li class="listitem"><p>list</p></li>
+
</ul>
+
</div>
+
<div class="orderedlist">
+
<ol class="orderedlist " type="1">
+
<li class="listitem"><p>wide ordered</p></li>
+
<li class="listitem"><p>list</p></li>
+
</ol>
+
</div>
+
<div class="itemizedlist">
+
<ul class="itemizedlist compact" style="list-style-type: disc;">
+
<li class="listitem"><p>narrow bullet</p></li>
+
<li class="listitem"><p>list</p></li>
+
</ul>
+
</div>
+
<div class="orderedlist">
+
<ol class="orderedlist compact" type="1">
+
<li class="listitem"><p>narrow ordered</p></li>
+
<li class="listitem"><p>list</p></li>
+
</ol>
+
</div>
+
<div class="blockquote">
+
<blockquote class="blockquote">
+
<p>quotes</p>
+
<div class="blockquote">
+
<blockquote class="blockquote">
+
<p>with <span class="emphasis"><em>nesting</em></span></p>
+
<pre class="programlisting">↵
+
nested code block↵
+
</pre>
+
</blockquote>
+
</div>
+
<div class="itemizedlist">
+
<ul class="itemizedlist compact" style="list-style-type: disc;">
+
<li class="listitem"><p>and lists</p></li>
+
<li class="listitem">
+
<pre class="programlisting">↵
+
containing code↵
+
</pre>
+
</li>
+
</ul>
+
</div>
+
<p>and more quote</p>
+
</blockquote>
+
</div>
+
<div class="orderedlist">
+
<ol class="orderedlist compact" start="100" type="1">
+
<li class="listitem"><p>list starting at 100</p></li>
+
<li class="listitem"><p>goes on</p></li>
+
</ol>
+
</div>
+
<div class="variablelist">
+
<dl class="variablelist">
+
<dt><span class="term">deflist</span></dt>
+
<dd>
+
<div class="blockquote">
+
<blockquote class="blockquote">
+
<p>
+
with a quote↵
+
and stuff
+
</p>
+
</blockquote>
+
</div>
+
<pre class="programlisting">↵
+
code block↵
+
</pre>
+
<pre class="programlisting">↵
+
fenced block↵
+
</pre>
+
<p>text</p>
+
</dd>
+
<dt><span class="term">more stuff in same deflist</span></dt>
+
<dd>
+
<p>foo</p>
+
</dd>
+
</dl>
+
</div>""")
+5 -3
pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py
···
-
import nixos_render_docs
+
import nixos_render_docs as nrd
import pytest
from markdown_it.token import Token
-
class Converter(nixos_render_docs.md.Converter):
+
class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
# actual renderer doesn't matter, we're just parsing.
-
__renderer__ = nixos_render_docs.docbook.DocBookRenderer
+
def __init__(self, manpage_urls: dict[str, str]) -> None:
+
super().__init__()
+
self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
@pytest.mark.parametrize("ordered", [True, False])
def test_list_wide(ordered: bool) -> None:
+8 -15
pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py
···
-
import nixos_render_docs
+
import nixos_render_docs as nrd
from sample_md import sample1
···
import markdown_it
-
class Converter(nixos_render_docs.md.Converter):
-
def __renderer__(self, manpage_urls: Mapping[str, str],
-
parser: Optional[markdown_it.MarkdownIt] = None
-
) -> nixos_render_docs.manpage.ManpageRenderer:
-
return nixos_render_docs.manpage.ManpageRenderer(manpage_urls, self.options_by_id, parser)
-
+
class Converter(nrd.md.Converter[nrd.manpage.ManpageRenderer]):
def __init__(self, manpage_urls: Mapping[str, str], options_by_id: dict[str, str] = {}):
-
self.options_by_id = options_by_id
-
super().__init__(manpage_urls)
+
super().__init__()
+
self._renderer = nrd.manpage.ManpageRenderer(manpage_urls, options_by_id)
def test_inline_code() -> None:
c = Converter({})
···
def test_collect_links() -> None:
c = Converter({}, { '#foo': "bar" })
-
assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer)
-
c._md.renderer.link_footnotes = []
+
c._renderer.link_footnotes = []
assert c._render("[a](link1) [b](link2)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[2]\\fR"
-
assert c._md.renderer.link_footnotes == ['link1', 'link2']
+
assert c._renderer.link_footnotes == ['link1', 'link2']
def test_dedup_links() -> None:
c = Converter({}, { '#foo': "bar" })
-
assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer)
-
c._md.renderer.link_footnotes = []
+
c._renderer.link_footnotes = []
assert c._render("[a](link) [b](link)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[1]\\fR"
-
assert c._md.renderer.link_footnotes == ['link']
+
assert c._renderer.link_footnotes == ['link']
def test_full() -> None:
c = Converter({ 'man(1)': 'http://example.org' })
+5 -3
pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py
···
-
import nixos_render_docs
+
import nixos_render_docs as nrd
from markdown_it.token import Token
-
class Converter(nixos_render_docs.md.Converter):
+
class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
# actual renderer doesn't matter, we're just parsing.
-
__renderer__ = nixos_render_docs.docbook.DocBookRenderer
+
def __init__(self, manpage_urls: dict[str, str]) -> None:
+
super().__init__()
+
self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
def test_attr_span_parsing() -> None:
c = Converter({})
+2 -2
pkgs/tools/package-management/libdnf/default.nix
···
stdenv.mkDerivation rec {
pname = "libdnf";
-
version = "0.69.0";
+
version = "0.70.0";
src = fetchFromGitHub {
owner = "rpm-software-management";
repo = pname;
rev = version;
-
sha256 = "sha256-Mc9yI18D4OYv8l4axQ8W0XZ8HfmEZ5IhHC6/uKkv0Ec=";
+
sha256 = "sha256-tuHrkL3tL+sCLPxNElVgnb4zQ6OTu65X9pb/cX6vD/w=";
};
nativeBuildInputs = [
+5 -4
pkgs/tools/security/waf-tester/default.nix
···
buildGoModule rec {
pname = "waf-tester";
-
version = "0.6.12";
+
version = "0.6.13";
src = fetchFromGitHub {
owner = "jreisinger";
repo = pname;
-
rev = "v${version}";
-
hash = "sha256-baj9JuC4PF5c50K2aY+xwdE9t4aTzOu+isqJ6r1pWuc=";
+
rev = "refs/tags/v${version}";
+
hash = "sha256-UPviooQNGRVwf/bTz9ApedJDAGeCvh9iD1HXFOQXPcw=";
};
-
vendorSha256 = "sha256-qVzgZX4HVXZ3qgYAu3a46vcGl4Pk2D1Zx/giEmPEG88=";
+
vendorHash = "sha256-HOYHrR1LtVcXMKFHPaA7PYH4Fp9nhqal2oxYTq/i4/8=";
ldflags = [
"-s"
···
meta = with lib; {
description = "Tool to test Web Application Firewalls (WAFs)";
homepage = "https://github.com/jreisinger/waf-tester";
+
changelog = "https://github.com/jreisinger/waf-tester/releases/tag/v${version}";
license = licenses.gpl3Only;
maintainers = with maintainers; [ fab ];
};
+3 -3
pkgs/tools/system/automatic-timezoned/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "automatic-timezoned";
-
version = "1.0.62";
+
version = "1.0.68";
src = fetchFromGitHub {
owner = "maxbrunet";
repo = pname;
rev = "v${version}";
-
sha256 = "sha256-3T9/VAr/ZrGTZZK3rsIpnOeKdp9WxPO0JkGamDi3hyM=";
+
sha256 = "sha256-wtmyUlkruFE3dQmsb9x2683gwEVjsBCQJ8VW4b0IdkU=";
};
-
cargoHash = "sha256-rNMEXvAGpKxn2t6uvgTx3sc3tpGCXmzOM/iPWwWq2JM=";
+
cargoHash = "sha256-nQx70KtWzvg6w8UNJqTrqzBc5SZKwCiHx2jhoBbmNP4=";
meta = with lib; {
description = "Automatically update system timezone based on location";
+3 -3
pkgs/tools/system/zenith/default.nix
···
rustPlatform.buildRustPackage rec {
pname = "zenith";
-
version = "0.13.1";
+
version = "0.14.0";
src = fetchFromGitHub {
owner = "bvaisvil";
repo = pname;
rev = version;
-
sha256 = "sha256-N/DvPVYGM/DjTvKvOlR60q6rvNyfAQlnvFnFG5nbUmQ=";
+
sha256 = "sha256-GrrdE9Ih8x8N2HN+1NfxfthfHbufLAT/Ac+ZZWW5Zg8=";
};
# remove cargo config so it can find the linker on aarch64-linux
···
rm .cargo/config
'';
-
cargoSha256 = "sha256-Y/vvRJpv82Uc+Bu3lbZxRsu4TL6sAjz5AWHAHkwh98Y=";
+
cargoHash = "sha256-2VgyUVBcmSlmPSqAWrzWjH5J6Co/rAC9EQCckYzfW2o=";
nativeBuildInputs = [ llvmPackages.clang ] ++ lib.optional nvidiaSupport makeWrapper;
buildInputs = [ llvmPackages.libclang ] ++ lib.optionals stdenv.isDarwin [ IOKit ];
+2
pkgs/top-level/python-packages.nix
···
ukrainealarm = callPackage ../development/python-modules/ukrainealarm { };
+
ulid-transform = callPackage ../development/python-modules/ulid-transform { };
+
ultraheat-api = callPackage ../development/python-modules/ultraheat-api { };
umalqurra = callPackage ../development/python-modules/umalqurra { };