about summary refs log tree commit diff
path: root/pkgs
diff options
context:
space:
mode:
Diffstat (limited to 'pkgs')
-rw-r--r--pkgs/applications/audio/audacity/default.nix4
-rw-r--r--pkgs/applications/audio/ft2-clone/default.nix4
-rw-r--r--pkgs/applications/audio/jmusicbot/default.nix4
-rw-r--r--pkgs/applications/audio/monkeys-audio/default.nix39
-rw-r--r--pkgs/applications/audio/mpdevil/default.nix10
-rw-r--r--pkgs/applications/audio/mympd/default.nix6
-rw-r--r--pkgs/applications/blockchains/clightning/default.nix4
-rw-r--r--pkgs/applications/emulators/wine/sources.nix4
-rw-r--r--pkgs/applications/emulators/yapesdl/default.nix27
-rw-r--r--pkgs/applications/graphics/gscan2pdf/default.nix8
-rw-r--r--pkgs/applications/graphics/gscan2pdf/ffmpeg5-compat.patch15
-rw-r--r--pkgs/applications/graphics/pdfcpu/default.nix6
-rw-r--r--pkgs/applications/misc/cotp/default.nix6
-rw-r--r--pkgs/applications/misc/hugo/default.nix6
-rw-r--r--pkgs/applications/misc/mwic/default.nix4
-rw-r--r--pkgs/applications/misc/octoprint/default.nix6
-rw-r--r--pkgs/applications/misc/phoc/default.nix2
-rw-r--r--pkgs/applications/misc/process-compose/default.nix6
-rw-r--r--pkgs/applications/misc/pueue/default.nix6
-rw-r--r--pkgs/applications/misc/qt-box-editor/default.nix4
-rw-r--r--pkgs/applications/misc/syncthingtray/default.nix22
-rw-r--r--pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch13
-rw-r--r--pkgs/applications/networking/cluster/argo-rollouts/default.nix4
-rw-r--r--pkgs/applications/networking/cluster/cloudfoundry-cli/default.nix4
-rw-r--r--pkgs/applications/networking/cluster/clusterctl/default.nix4
-rw-r--r--pkgs/applications/networking/cluster/k3sup/default.nix6
-rw-r--r--pkgs/applications/networking/cluster/kubecfg/default.nix4
-rw-r--r--pkgs/applications/networking/cluster/ocm/default.nix6
-rw-r--r--pkgs/applications/networking/cluster/terraform-providers/providers.json62
-rw-r--r--pkgs/applications/networking/feedreaders/castget/default.nix24
-rw-r--r--pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix12
-rw-r--r--pkgs/applications/networking/irc/ircdog/default.nix10
-rw-r--r--pkgs/applications/networking/sniffers/wireshark/default.nix2
-rw-r--r--pkgs/applications/science/biology/diamond/default.nix4
-rw-r--r--pkgs/applications/science/biology/picard-tools/default.nix4
-rw-r--r--pkgs/applications/science/logic/cubicle/default.nix13
-rw-r--r--pkgs/applications/science/math/maxima/default.nix4
-rw-r--r--pkgs/applications/science/math/wxmaxima/default.nix4
-rw-r--r--pkgs/applications/science/misc/root/default.nix9
-rw-r--r--pkgs/applications/system/monitor/default.nix4
-rw-r--r--pkgs/applications/version-management/git-machete/default.nix4
-rw-r--r--pkgs/applications/version-management/mercurial/default.nix6
-rw-r--r--pkgs/applications/video/mkvtoolnix/default.nix4
-rw-r--r--pkgs/applications/virtualization/lkl/default.nix13
-rw-r--r--pkgs/applications/virtualization/nixpacks/default.nix6
-rw-r--r--pkgs/applications/virtualization/pods/default.nix6
-rw-r--r--pkgs/applications/virtualization/virtualbox/default.nix16
-rw-r--r--pkgs/applications/virtualization/virtualbox/extpack.nix2
-rw-r--r--pkgs/applications/virtualization/virtualbox/guest-additions/default.nix2
-rw-r--r--pkgs/applications/virtualization/virtualbox/qt-dependency-paths.patch (renamed from pkgs/applications/virtualization/virtualbox/qtx11extras.patch)4
-rw-r--r--pkgs/applications/window-managers/phosh/default.nix2
-rw-r--r--pkgs/data/fonts/nerdfonts/default.nix1
-rwxr-xr-xpkgs/data/fonts/nerdfonts/update.sh11
-rw-r--r--pkgs/data/misc/v2ray-geoip/default.nix6
-rw-r--r--pkgs/desktops/pantheon/desktop/wingpanel-indicators/network/default.nix14
-rw-r--r--pkgs/desktops/pantheon/services/xdg-desktop-portal-pantheon/default.nix20
-rw-r--r--pkgs/development/interpreters/erlang/R24.nix4
-rw-r--r--pkgs/development/interpreters/python/cpython/default.nix22
-rw-r--r--pkgs/development/libraries/fizz/default.nix4
-rw-r--r--pkgs/development/libraries/hipblas/default.nix2
-rw-r--r--pkgs/development/libraries/libdatovka/default.nix4
-rw-r--r--pkgs/development/libraries/libsodium/default.nix13
-rw-r--r--pkgs/development/libraries/mpdecimal/default.nix10
-rw-r--r--pkgs/development/libraries/qrupdate/default.nix62
-rw-r--r--pkgs/development/libraries/rccl/default.nix2
-rw-r--r--pkgs/development/libraries/rocksdb/default.nix4
-rw-r--r--pkgs/development/libraries/rocprofiler/default.nix2
-rw-r--r--pkgs/development/libraries/wxsqlite3/default.nix4
-rw-r--r--pkgs/development/python-modules/adb-enhanced/default.nix9
-rw-r--r--pkgs/development/python-modules/angrop/default.nix23
-rw-r--r--pkgs/development/python-modules/ansible-lint/default.nix4
-rw-r--r--pkgs/development/python-modules/beancount-black/default.nix4
-rw-r--r--pkgs/development/python-modules/ciscoconfparse/default.nix4
-rw-r--r--pkgs/development/python-modules/crate/default.nix20
-rw-r--r--pkgs/development/python-modules/dask-awkward/default.nix60
-rw-r--r--pkgs/development/python-modules/dtschema/default.nix52
-rw-r--r--pkgs/development/python-modules/duckdb-engine/default.nix4
-rw-r--r--pkgs/development/python-modules/execnb/default.nix33
-rw-r--r--pkgs/development/python-modules/fakeredis/default.nix4
-rw-r--r--pkgs/development/python-modules/fastai/default.nix53
-rw-r--r--pkgs/development/python-modules/fastdownload/default.nix32
-rw-r--r--pkgs/development/python-modules/google-cloud-speech/default.nix4
-rw-r--r--pkgs/development/python-modules/homematicip/default.nix4
-rw-r--r--pkgs/development/python-modules/lsprotocol/default.nix4
-rw-r--r--pkgs/development/python-modules/mypy-boto3-builder/default.nix4
-rw-r--r--pkgs/development/python-modules/nbdev/default.nix36
-rw-r--r--pkgs/development/python-modules/openapi-core/default.nix4
-rw-r--r--pkgs/development/python-modules/peaqevcore/default.nix4
-rw-r--r--pkgs/development/python-modules/pyfibaro/default.nix4
-rw-r--r--pkgs/development/python-modules/pyfritzhome/default.nix18
-rw-r--r--pkgs/development/python-modules/pykeyatome/default.nix4
-rw-r--r--pkgs/development/python-modules/pyoverkiz/default.nix4
-rw-r--r--pkgs/development/python-modules/pypykatz/default.nix4
-rw-r--r--pkgs/development/python-modules/pysma/default.nix4
-rw-r--r--pkgs/development/python-modules/pysml/default.nix4
-rw-r--r--pkgs/development/python-modules/pytest-md-report/default.nix43
-rw-r--r--pkgs/development/python-modules/python-velbus/default.nix20
-rw-r--r--pkgs/development/python-modules/pyvizio/default.nix4
-rw-r--r--pkgs/development/python-modules/reolink-aio/default.nix4
-rw-r--r--pkgs/development/python-modules/snscrape/default.nix8
-rw-r--r--pkgs/development/python-modules/tesserocr/default.nix4
-rw-r--r--pkgs/development/python-modules/textnets/default.nix75
-rw-r--r--pkgs/development/python-modules/thinc/default.nix4
-rw-r--r--pkgs/development/python-modules/ulid-transform/default.nix51
-rw-r--r--pkgs/development/python-modules/youless-api/default.nix4
-rw-r--r--pkgs/development/tools/analysis/cargo-tarpaulin/default.nix7
-rw-r--r--pkgs/development/tools/build-managers/corrosion/default.nix6
-rw-r--r--pkgs/development/tools/continuous-integration/cirrus-cli/default.nix6
-rw-r--r--pkgs/development/tools/datree/default.nix4
-rw-r--r--pkgs/development/tools/dt-schema/default.nix37
-rw-r--r--pkgs/development/tools/esbuild/default.nix4
-rw-r--r--pkgs/development/tools/ginkgo/default.nix6
-rw-r--r--pkgs/development/tools/icr/default.nix6
-rw-r--r--pkgs/development/tools/icr/shards.nix3
-rw-r--r--pkgs/development/tools/language-servers/millet/default.nix6
-rw-r--r--pkgs/development/tools/oh-my-posh/default.nix4
-rw-r--r--pkgs/development/tools/pscale/default.nix6
-rw-r--r--pkgs/development/tools/rgp/default.nix8
-rw-r--r--pkgs/development/tools/richgo/default.nix6
-rw-r--r--pkgs/development/tools/rover/default.nix33
-rw-r--r--pkgs/development/tools/rover/schema/etag.id2
-rw-r--r--pkgs/development/tools/rover/schema/hash.id1
-rw-r--r--pkgs/development/tools/rover/schema/schema.graphql2108
-rw-r--r--pkgs/development/tools/rust/typeshare/default.nix29
-rw-r--r--pkgs/games/osu-lazer/bin.nix4
-rw-r--r--pkgs/games/osu-lazer/default.nix4
-rw-r--r--pkgs/games/osu-lazer/deps.nix81
-rw-r--r--pkgs/games/unciv/default.nix4
-rw-r--r--pkgs/misc/fastly/default.nix4
-rw-r--r--pkgs/os-specific/linux/firmware/system76-firmware/default.nix6
-rw-r--r--pkgs/os-specific/linux/fnotifystat/default.nix4
-rw-r--r--pkgs/os-specific/linux/kernel/zen-kernels.nix10
-rw-r--r--pkgs/servers/dns/bind/default.nix17
-rw-r--r--pkgs/servers/gotify/source-sha.nix2
-rwxr-xr-xpkgs/servers/gotify/update.sh2
-rw-r--r--pkgs/servers/gotify/version.nix2
-rw-r--r--pkgs/servers/misc/gobgpd/default.nix6
-rw-r--r--pkgs/servers/monitoring/grafana/default.nix6
-rw-r--r--pkgs/servers/monitoring/grafana/plugins/doitintl-bigquery-datasource/default.nix4
-rw-r--r--pkgs/servers/monitoring/grafana/plugins/grafadruid-druid-datasource/default.nix4
-rw-r--r--pkgs/servers/monitoring/grafana/plugins/grafana-clock-panel/default.nix4
-rw-r--r--pkgs/servers/monitoring/grafana/plugins/grafana-piechart-panel/default.nix4
-rw-r--r--pkgs/servers/monitoring/grafana/plugins/grafana-polystat-panel/default.nix4
-rw-r--r--pkgs/servers/monitoring/grafana/plugins/grafana-worldmap-panel/default.nix4
-rw-r--r--pkgs/servers/monitoring/prometheus/redis-exporter.nix6
-rw-r--r--pkgs/servers/nosql/redis/default.nix4
-rw-r--r--pkgs/servers/piping-server-rust/default.nix6
-rw-r--r--pkgs/servers/plex/raw.nix6
-rw-r--r--pkgs/servers/roon-server/default.nix4
-rw-r--r--pkgs/servers/sql/postgresql/ext/temporal_tables.nix9
-rw-r--r--pkgs/servers/syncstorage-rs/default.nix9
-rw-r--r--pkgs/servers/web-apps/5etools/default.nix1
-rw-r--r--pkgs/servers/x11/xorg/default.nix6
-rw-r--r--pkgs/servers/x11/xorg/tarballs.list2
-rw-r--r--pkgs/tools/admin/aliyun-cli/default.nix6
-rw-r--r--pkgs/tools/admin/aws-rotate-key/default.nix6
-rw-r--r--pkgs/tools/admin/coldsnap/default.nix6
-rw-r--r--pkgs/tools/admin/eksctl/default.nix6
-rw-r--r--pkgs/tools/admin/pgadmin/default.nix4
-rw-r--r--pkgs/tools/admin/pgadmin/package.json5
-rw-r--r--pkgs/tools/admin/pgadmin/yarn.lock23
-rw-r--r--pkgs/tools/admin/pgadmin/yarn.nix32
-rw-r--r--pkgs/tools/admin/qovery-cli/default.nix6
-rw-r--r--pkgs/tools/archivers/snzip/default.nix6
-rw-r--r--pkgs/tools/cd-dvd/vobsub2srt/default.nix4
-rw-r--r--pkgs/tools/graphics/gmic-qt/default.nix28
-rw-r--r--pkgs/tools/graphics/gmic/default.nix9
-rw-r--r--pkgs/tools/misc/atuin/default.nix4
-rw-r--r--pkgs/tools/misc/bash_unit/default.nix4
-rw-r--r--pkgs/tools/misc/fortune/default.nix8
-rw-r--r--pkgs/tools/misc/vector/default.nix6
-rw-r--r--pkgs/tools/misc/yt-dlp/default.nix4
-rw-r--r--pkgs/tools/networking/libreswan/default.nix4
-rw-r--r--pkgs/tools/networking/ookla-speedtest/default.nix8
-rw-r--r--pkgs/tools/networking/pacparser/default.nix4
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py144
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py139
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py166
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py245
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py148
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py696
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py186
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py198
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py201
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py5
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py21
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py8
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py8
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py8
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py179
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py8
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py23
-rw-r--r--pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py8
-rw-r--r--pkgs/tools/package-management/libdnf/default.nix4
-rw-r--r--pkgs/tools/package-management/nix-update/default.nix4
-rw-r--r--pkgs/tools/security/aiodnsbrute/default.nix36
-rw-r--r--pkgs/tools/security/cloudfox/default.nix6
-rw-r--r--pkgs/tools/security/grype/default.nix15
-rw-r--r--pkgs/tools/security/otpauth/default.nix6
-rw-r--r--pkgs/tools/security/rekor/default.nix2
-rw-r--r--pkgs/tools/security/trufflehog/default.nix4
-rw-r--r--pkgs/tools/security/vault/default.nix6
-rw-r--r--pkgs/tools/security/waf-tester/default.nix9
-rw-r--r--pkgs/tools/system/automatic-timezoned/default.nix6
-rw-r--r--pkgs/tools/system/nsc/default.nix6
-rw-r--r--pkgs/tools/system/zenith/default.nix6
-rw-r--r--pkgs/tools/text/miller/default.nix6
-rw-r--r--pkgs/tools/text/ripgrep-all/default.nix4
-rw-r--r--pkgs/tools/text/tuc/default.nix6
-rw-r--r--pkgs/tools/wayland/wl-mirror/default.nix4
-rw-r--r--pkgs/top-level/all-packages.nix6
-rw-r--r--pkgs/top-level/python-packages.nix22
-rw-r--r--pkgs/top-level/stage.nix6
213 files changed, 4886 insertions, 1620 deletions
diff --git a/pkgs/applications/audio/audacity/default.nix b/pkgs/applications/audio/audacity/default.nix
index 4a652e1ac474d..6d8f279dadbd1 100644
--- a/pkgs/applications/audio/audacity/default.nix
+++ b/pkgs/applications/audio/audacity/default.nix
@@ -61,13 +61,13 @@
 
 stdenv.mkDerivation rec {
   pname = "audacity";
-  version = "3.2.4";
+  version = "3.2.5";
 
   src = fetchFromGitHub {
     owner = pname;
     repo = pname;
     rev = "Audacity-${version}";
-    hash = "sha256-gz2o0Rj4364nJAvJmMQzwIQycoQmqz2/43DBvd3qbho=";
+    hash = "sha256-tMz55fZh+TfvLEyApDqC0QMd2hEQLJsNQ6y2Xy0xgaQ=";
   };
 
   postPatch = ''
diff --git a/pkgs/applications/audio/ft2-clone/default.nix b/pkgs/applications/audio/ft2-clone/default.nix
index 886662faf473c..e5d132ac6a69b 100644
--- a/pkgs/applications/audio/ft2-clone/default.nix
+++ b/pkgs/applications/audio/ft2-clone/default.nix
@@ -13,13 +13,13 @@
 
 stdenv.mkDerivation rec {
   pname = "ft2-clone";
-  version = "1.63";
+  version = "1.65";
 
   src = fetchFromGitHub {
     owner = "8bitbubsy";
     repo = "ft2-clone";
     rev = "v${version}";
-    sha256 = "sha256-uDAW97lTeL15PPpR5vlIS371EZ7BBNd86ETPEB8joSU=";
+    sha256 = "sha256-Jo1qs0d8/o9FWR7jboWCJ7ntawBGTlm7yPzxxUnZLsI=";
   };
 
   # Adapt the linux-only CMakeLists to darwin (more reliable than make-macos.sh)
diff --git a/pkgs/applications/audio/jmusicbot/default.nix b/pkgs/applications/audio/jmusicbot/default.nix
index 7a1676e7873c5..590c695577cff 100644
--- a/pkgs/applications/audio/jmusicbot/default.nix
+++ b/pkgs/applications/audio/jmusicbot/default.nix
@@ -2,11 +2,11 @@
 
 stdenv.mkDerivation rec {
   pname = "JMusicBot";
-  version = "0.3.8";
+  version = "0.3.9";
 
   src = fetchurl {
     url = "https://github.com/jagrosh/MusicBot/releases/download/${version}/JMusicBot-${version}.jar";
-    sha256 = "sha256-wzmrh9moY6oo3RqOy9Zl1X70BZlvbJkQmz8BaBIFtIM=";
+    sha256 = "sha256-2A1yo2e1MawGLMTM6jWwpQJJuKOmljxFriORv90Jqg8=";
   };
 
   dontUnpack = true;
diff --git a/pkgs/applications/audio/monkeys-audio/default.nix b/pkgs/applications/audio/monkeys-audio/default.nix
index 05647af2201aa..01bb8b6253e2a 100644
--- a/pkgs/applications/audio/monkeys-audio/default.nix
+++ b/pkgs/applications/audio/monkeys-audio/default.nix
@@ -1,33 +1,30 @@
-{lib, gcc10Stdenv, fetchurl}:
+{ lib
+, stdenv
+, fetchzip
+, cmake
+}:
 
-gcc10Stdenv.mkDerivation rec {
-  version = "3.99-u4-b5";
-  pname = "monkeys-audio-old";
+stdenv.mkDerivation rec {
+  version = "9.20";
+  pname = "monkeys-audio";
 
-  patches = [ ./buildfix.diff ];
-
-  src = fetchurl {
-    /*
-    The real homepage is <https://monkeysaudio.com/>, but in fact we are
-    getting an old, ported to Linux version of the sources, made by (quoting
-    from the AUTHORS file found in the source):
-
-    Frank Klemm : First port to linux (with makefile)
-
-    SuperMMX <SuperMMX AT GMail DOT com> : Package the source, include the frontend and shared lib,
-         porting to Big Endian platform and adding other non-win32 enhancement.
-    */
-    url = "https://deb-multimedia.org/pool/main/m/${pname}/${pname}_${version}.orig.tar.gz";
-    sha256 = "0kjfwzfxfx7f958b2b1kf8yj655lp0ppmn0sh57gbkjvj8lml7nz";
+  src = fetchzip {
+    url = "https://monkeysaudio.com/files/MAC_${
+      builtins.concatStringsSep "" (lib.strings.splitString "." version)}_SDK.zip";
+    sha256 = "sha256-8cJ88plR9jrrLdzRHzRotGBrn6qIqOWvl+oOTXxY/TE=";
+    stripRoot = false;
   };
+  nativeBuildInputs = [
+    cmake
+  ];
 
   meta = with lib; {
-    description = "Lossless audio codec";
+    description = "APE codec and decompressor";
     platforms = platforms.linux;
     # This is not considered a GPL license, but it seems rather free although
     # it's not standard, see a quote of it:
     # https://github.com/NixOS/nixpkgs/pull/171682#issuecomment-1120260551
     license = licenses.free;
-    maintainers = [ ];
+    maintainers = with maintainers; [ doronbehar ];
   };
 }
diff --git a/pkgs/applications/audio/mpdevil/default.nix b/pkgs/applications/audio/mpdevil/default.nix
index f8fac7d14a9f5..394812a586b8a 100644
--- a/pkgs/applications/audio/mpdevil/default.nix
+++ b/pkgs/applications/audio/mpdevil/default.nix
@@ -1,23 +1,25 @@
 { lib, fetchFromGitHub
+, pkg-config, meson ,ninja
 , python3Packages
 , gdk-pixbuf, glib, gobject-introspection, gtk3
 , libnotify
-, intltool
 , wrapGAppsHook }:
 
 python3Packages.buildPythonApplication rec {
   pname = "mpdevil";
-  version = "1.4.1";
+  version = "1.10.1";
 
   src = fetchFromGitHub {
     owner = "SoongNoonien";
     repo = pname;
     rev = "v${version}";
-    sha256 = "1a5nhlbgi3ahnkcq16c2vgiaghgswy5lxg64pcrlbqssg1pj5gma";
+    sha256 = "sha256-w31e8cJvdep/ZzmDBCfdCZotrPunQBl1cTTWjs3sE1w=";
   };
 
+  format = "other";
+
   nativeBuildInputs = [
-    glib.dev gobject-introspection gtk3 intltool wrapGAppsHook
+    glib.dev gobject-introspection gtk3 pkg-config meson ninja wrapGAppsHook
   ];
 
   buildInputs = [
diff --git a/pkgs/applications/audio/mympd/default.nix b/pkgs/applications/audio/mympd/default.nix
index 2e52d8232e469..a1a958c3d558f 100644
--- a/pkgs/applications/audio/mympd/default.nix
+++ b/pkgs/applications/audio/mympd/default.nix
@@ -11,17 +11,18 @@
 , pcre2
 , gzip
 , perl
+, jq
 }:
 
 stdenv.mkDerivation rec {
   pname = "mympd";
-  version = "9.5.4";
+  version = "10.2.4";
 
   src = fetchFromGitHub {
     owner = "jcorporation";
     repo = "myMPD";
     rev = "v${version}";
-    sha256 = "sha256-0X/rEVfJ6zzX75R72xVntOfuCt8srp9PkiYOq3XbWPs=";
+    sha256 = "sha256-12hCIAwrLQkwiU9t9nNPBdIiHfMidfErSWOA0FPfhBQ=";
   };
 
   nativeBuildInputs = [
@@ -29,6 +30,7 @@ stdenv.mkDerivation rec {
     cmake
     gzip
     perl
+    jq
   ];
   preConfigure = ''
     env MYMPD_BUILDDIR=$PWD/build ./build.sh createassets
diff --git a/pkgs/applications/blockchains/clightning/default.nix b/pkgs/applications/blockchains/clightning/default.nix
index 45854ddcd631f..700a65f3a58d4 100644
--- a/pkgs/applications/blockchains/clightning/default.nix
+++ b/pkgs/applications/blockchains/clightning/default.nix
@@ -22,11 +22,11 @@ let
 in
 stdenv.mkDerivation rec {
   pname = "clightning";
-  version = "22.11.1";
+  version = "23.02";
 
   src = fetchurl {
     url = "https://github.com/ElementsProject/lightning/releases/download/v${version}/clightning-v${version}.zip";
-    sha256 = "sha256-F48jmG9voNp6+IMRVkJi6O0DXVQxKyYkOA0UBCKktIw=";
+    sha256 = "sha256-uvk7sApIwlrkH8eERBetf/nsAkN2d35T/IEtICFflzY=";
   };
 
   # when building on darwin we need dawin.cctools to provide the correct libtool
diff --git a/pkgs/applications/emulators/wine/sources.nix b/pkgs/applications/emulators/wine/sources.nix
index 0d7ac403ad198..19c230dc8572b 100644
--- a/pkgs/applications/emulators/wine/sources.nix
+++ b/pkgs/applications/emulators/wine/sources.nix
@@ -145,8 +145,8 @@ in rec {
 
   winetricks = fetchFromGitHub rec {
     # https://github.com/Winetricks/winetricks/releases
-    version = "20220411";
-    hash = "sha256-FjH10nZDYbqXI6/vKpZJKfv2maXSVkahNDf5UTU3eyU=";
+    version = "20230212";
+    hash = "sha256-pd37QTcqY5ZaVBssGecuqziOIq1p0JH0ZDB+oLmp9JU=";
     owner = "Winetricks";
     repo = "winetricks";
     rev = version;
diff --git a/pkgs/applications/emulators/yapesdl/default.nix b/pkgs/applications/emulators/yapesdl/default.nix
index 5f4b7771fb5ff..2ea3583edd76e 100644
--- a/pkgs/applications/emulators/yapesdl/default.nix
+++ b/pkgs/applications/emulators/yapesdl/default.nix
@@ -5,20 +5,21 @@
 , SDL2
 }:
 
-stdenv.mkDerivation rec {
+stdenv.mkDerivation (self: {
   pname = "yapesdl";
-  version = "0.70.2";
+  version = "0.71.2";
 
   src = fetchFromGitHub {
     owner = "calmopyrin";
-    repo = pname;
-    rev = "v${version}";
-    hash = "sha256-51P6wNaSfVA3twu+yRUKXguEmVBvuuEnHxH1Zl1vsCc=";
+    repo = "yapesdl";
+    rev = "v${self.version}";
+    hash = "sha256-QGF3aS/YSzdGxHONKyA/iTewEVYsjBAsKARVMXkFV2k=";
   };
 
   nativeBuildInputs = [
     pkg-config
   ];
+
   buildInputs = [
     SDL2
   ];
@@ -27,17 +28,17 @@ stdenv.mkDerivation rec {
 
   installPhase = ''
     runHook preInstall
-    install --directory $out/bin $out/share/doc/$pname
-    install yapesdl $out/bin/
-    install README.SDL $out/share/doc/$pname/
+    install -Dm755 yapesdl -t $out/bin/
+    install -Dm755 README.SDL -t $out/share/doc/yapesdl/
     runHook postInstall
   '';
 
-  meta = with lib; {
+  meta = {
     homepage = "http://yape.plus4.net/";
     description = "Multiplatform Commodore 64 and 264 family emulator";
-    license = licenses.gpl2Plus;
-    maintainers = with maintainers; [ AndersonTorres ];
-    platforms = platforms.unix;
+    license = lib.licenses.gpl2Plus;
+    maintainers = with lib.maintainers; [ AndersonTorres ];
+    platforms = lib.platforms.unix;
+    broken = stdenv.isDarwin;
   };
-}
+})
diff --git a/pkgs/applications/graphics/gscan2pdf/default.nix b/pkgs/applications/graphics/gscan2pdf/default.nix
index 9da51083c5ab6..9e746cfbd7a6f 100644
--- a/pkgs/applications/graphics/gscan2pdf/default.nix
+++ b/pkgs/applications/graphics/gscan2pdf/default.nix
@@ -10,17 +10,13 @@ with lib;
 
 perlPackages.buildPerlPackage rec {
   pname = "gscan2pdf";
-  version = "2.12.8";
+  version = "2.13.2";
 
   src = fetchurl {
     url = "mirror://sourceforge/gscan2pdf/gscan2pdf-${version}.tar.xz";
-    hash = "sha256-dmN2fMBDZqgvdHQryQgjmBHeH/h2dihRH8LkflFYzTk=";
+    hash = "sha256-NGz6DUa7TdChpgwmD9pcGdvYr3R+Ft3jPPSJpybCW4Q=";
   };
 
-  patches = [
-    ./ffmpeg5-compat.patch
-  ];
-
   nativeBuildInputs = [ wrapGAppsHook ];
 
   buildInputs =
diff --git a/pkgs/applications/graphics/gscan2pdf/ffmpeg5-compat.patch b/pkgs/applications/graphics/gscan2pdf/ffmpeg5-compat.patch
deleted file mode 100644
index ff522735fe350..0000000000000
--- a/pkgs/applications/graphics/gscan2pdf/ffmpeg5-compat.patch
+++ /dev/null
@@ -1,15 +0,0 @@
---- a/t/351_unpaper.t
-+++ b/t/351_unpaper.t
-@@ -88,8 +88,10 @@
- 
-                         # if we use unlike, we no longer
-                         # know how many tests there will be
--                        if ( $msg !~
--/(deprecated|Encoder did not produce proper pts, making some up)/
-+                        if ( $msg !~ /( deprecated |
-+                            \Qdoes not contain an image sequence pattern\E |
-+                            \QEncoder did not produce proper pts, making some up\E |
-+                            \Quse the -update option\E )/x
-                           )
-                         {
-                             fail 'no warnings';
diff --git a/pkgs/applications/graphics/pdfcpu/default.nix b/pkgs/applications/graphics/pdfcpu/default.nix
index 4952ecf84f23c..171f4a2c5b121 100644
--- a/pkgs/applications/graphics/pdfcpu/default.nix
+++ b/pkgs/applications/graphics/pdfcpu/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "pdfcpu";
-  version = "0.3.13";
+  version = "0.4.0";
 
   src = fetchFromGitHub {
     owner = "pdfcpu";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-CFKo8YEAXAniX+jL2A0naJUOn3KAWwcrPsabdiZevhI=";
+    sha256 = "sha256-l3vJDF2c6h/trfnAGxu7XEoDoj7bB4tATBUlxKFYfUs=";
   };
 
-  vendorSha256 = "sha256-3y42rbhurGhCI9PuSayxmLem0tv/nTjBwYxF3Dk6/yM=";
+  vendorSha256 = "sha256-611eLYm+OPIdmax2KwYNjuQEGqyZd6SXvhUHzRdLzaI=";
 
   # No tests
   doCheck = false;
diff --git a/pkgs/applications/misc/cotp/default.nix b/pkgs/applications/misc/cotp/default.nix
index fb3e149bc8e84..db18ec5d131de 100644
--- a/pkgs/applications/misc/cotp/default.nix
+++ b/pkgs/applications/misc/cotp/default.nix
@@ -8,16 +8,16 @@
 
 rustPlatform.buildRustPackage rec {
   name = "cotp";
-  version = "1.2.1";
+  version = "1.2.3";
 
   src = fetchFromGitHub {
     owner = "replydev";
     repo = "cotp";
     rev = "v${version}";
-    hash = "sha256-DIb/lgJxwg+QuqzN/0YoUV1iZwRqh6PAN0KRK7TbWDs=";
+    hash = "sha256-Pg07iq2jj8cUA4iQsY52cujmUZLYrbTG5Zj+lITxpls=";
   };
 
-  cargoHash = "sha256-uvH4mdI8ya/MJJngXQ98oXjG7JjUdvPwIzvJrdwlOEE=";
+  cargoHash = "sha256-gH9axiM0Qgl2TdJUpnDONHtU2I5l03SrKEe+2l5V21Y=";
 
   buildInputs = lib.optionals stdenv.isLinux [ libxcb ]
     ++ lib.optionals stdenv.isDarwin [ AppKit ];
diff --git a/pkgs/applications/misc/hugo/default.nix b/pkgs/applications/misc/hugo/default.nix
index f4f4f579ef56b..5fc8dd5e9d4cf 100644
--- a/pkgs/applications/misc/hugo/default.nix
+++ b/pkgs/applications/misc/hugo/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "hugo";
-  version = "0.110.0";
+  version = "0.111.1";
 
   src = fetchFromGitHub {
     owner = "gohugoio";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-7B0C8191lUGsv81+0eKDrBm+5hLlFjID3RTuajSg/RM=";
+    hash = "sha256-3bg7cmM05ekR5gtJCEJk3flplw8MRc9hVqlZx3ZUIaw=";
   };
 
-  vendorHash = "sha256-GtywXjtAF5Q4jUz2clfseUJVqiU+eSguG/ZoKy2TzuA=";
+  vendorHash = "sha256-xiysjJi3bL0xIoEEo7xXQbznFzwKJrCT6l/bxEbDRUI=";
 
   doCheck = false;
 
diff --git a/pkgs/applications/misc/mwic/default.nix b/pkgs/applications/misc/mwic/default.nix
index e1f9baa608832..a31d6d5a90ab3 100644
--- a/pkgs/applications/misc/mwic/default.nix
+++ b/pkgs/applications/misc/mwic/default.nix
@@ -1,12 +1,12 @@
 { lib, stdenv, fetchurl, pythonPackages }:
 
 stdenv.mkDerivation rec {
-  version = "0.7.9";
+  version = "0.7.10";
   pname = "mwic";
 
   src = fetchurl {
     url = "https://github.com/jwilk/mwic/releases/download/${version}/${pname}-${version}.tar.gz";
-    sha256 = "sha256-i7DSvUBUMOvn2aYpwYOCDHKq0nkleknD7k2xopo+C5s=";
+    sha256 = "sha256-dmIHPehkxpSb78ymVpcPCu4L41coskrHQOg067dprOo=";
   };
 
   makeFlags=["PREFIX=\${out}"];
diff --git a/pkgs/applications/misc/octoprint/default.nix b/pkgs/applications/misc/octoprint/default.nix
index 4ca54221c2fca..c1a4f6aec9228 100644
--- a/pkgs/applications/misc/octoprint/default.nix
+++ b/pkgs/applications/misc/octoprint/default.nix
@@ -16,7 +16,7 @@ let
     packageOverrides = lib.foldr lib.composeExtensions (self: super: { }) (
       [
         (
-          # with version 3 of flask-limiter octoprint 1.8.6 fails to start with
+          # with version 3 of flask-limiter octoprint 1.8.7 fails to start with
           #  TypeError: Limiter.__init__() got multiple values for argument 'key_func'
           self: super: {
             flask-limiter = super.flask-limiter.overridePythonAttrs (oldAttrs: rec {
@@ -105,13 +105,13 @@ let
           self: super: {
             octoprint = self.buildPythonPackage rec {
               pname = "OctoPrint";
-              version = "1.8.6";
+              version = "1.8.7";
 
               src = fetchFromGitHub {
                 owner = "OctoPrint";
                 repo = "OctoPrint";
                 rev = version;
-                hash = "sha256-DCUesPy4/g7DYN/9CDRvwAWHcv4dFsF+gsysg5UWThQ=";
+                hash = "sha256-g4PYB9YbkX0almRPgMFlb8D633Y5fc3H+Boa541suqc=";
               };
 
               propagatedBuildInputs = with self; [
diff --git a/pkgs/applications/misc/phoc/default.nix b/pkgs/applications/misc/phoc/default.nix
index 0fae80fc14e4b..79ffb3fdbf9b3 100644
--- a/pkgs/applications/misc/phoc/default.nix
+++ b/pkgs/applications/misc/phoc/default.nix
@@ -95,7 +95,7 @@ in stdenv.mkDerivation rec {
     description = "Wayland compositor for mobile phones like the Librem 5";
     homepage = "https://gitlab.gnome.org/World/Phosh/phoc";
     license = licenses.gpl3Plus;
-    maintainers = with maintainers; [ masipcat zhaofengli ];
+    maintainers = with maintainers; [ masipcat tomfitzhenry zhaofengli ];
     platforms = platforms.linux;
   };
 }
diff --git a/pkgs/applications/misc/process-compose/default.nix b/pkgs/applications/misc/process-compose/default.nix
index 67c1c7e815c16..967904c6e194e 100644
--- a/pkgs/applications/misc/process-compose/default.nix
+++ b/pkgs/applications/misc/process-compose/default.nix
@@ -8,13 +8,13 @@ let config-module = "github.com/f1bonacc1/process-compose/src/config";
 in
 buildGoModule rec {
   pname = "process-compose";
-  version = "0.40.2";
+  version = "0.43.1";
 
   src = fetchFromGitHub {
     owner = "F1bonacc1";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-+09gLeifEFwG2Ou1tQP29hYHhr0Qn0hOKj7p7PB8Jfc=";
+    hash = "sha256-yNYoVz6vITKkAkqH/0p7D4sifTpjtEZS4syFSwN4v98=";
     # populate values that require us to use git. By doing this in postFetch we
     # can delete .git afterwards and maintain better reproducibility of the src.
     leaveDotGit = true;
@@ -43,7 +43,7 @@ buildGoModule rec {
     installShellFiles
   ];
 
-  vendorHash = "sha256-g82JRmfbKH/XEZx2aLZOcyen23vOxQXR7VyeAYxCSi4=";
+  vendorHash = "sha256-iiGn0dYHNEp5Bs54X44sHbsG3HD92Xs4oah4iZXqqvQ=";
 
   doCheck = false;
 
diff --git a/pkgs/applications/misc/pueue/default.nix b/pkgs/applications/misc/pueue/default.nix
index 8e4d5cdc0a557..d1d7aff156a7b 100644
--- a/pkgs/applications/misc/pueue/default.nix
+++ b/pkgs/applications/misc/pueue/default.nix
@@ -10,16 +10,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "pueue";
-  version = "3.1.1";
+  version = "3.1.2";
 
   src = fetchFromGitHub {
     owner = "Nukesor";
     repo = "pueue";
     rev = "v${version}";
-    hash = "sha256-5xHY8DOQnOdYqNyfAS2kMuW2vxAuoSe6RaOItnAJCkQ=";
+    hash = "sha256-Q9NHkVOWVWAty6iIhN0GmUkKB+nDqmxiPVnhbQvup9M=";
   };
 
-  cargoHash = "sha256-3IOtx1aeth6QBjY6aILtzxhjZddovD7KIKzNhVCabfU=";
+  cargoHash = "sha256-YSD7RXU3eBlELx76gU5eNOGkSoK9SRQZOV+7lil1fyQ=";
 
   nativeBuildInputs = [
     installShellFiles
diff --git a/pkgs/applications/misc/qt-box-editor/default.nix b/pkgs/applications/misc/qt-box-editor/default.nix
index 7987208b885a4..7a84f85a9e21e 100644
--- a/pkgs/applications/misc/qt-box-editor/default.nix
+++ b/pkgs/applications/misc/qt-box-editor/default.nix
@@ -5,7 +5,7 @@
 , qtsvg
 , qmake
 , leptonica
-, tesseract
+, tesseract4
 }:
 
 mkDerivation {
@@ -19,7 +19,7 @@ mkDerivation {
     hash = "sha256-3dWnAu0CLO3atjbC1zJEnL3vzsIEecDDDhW3INMfCv4=";
   };
 
-  buildInputs = [ qtbase qtsvg leptonica tesseract ];
+  buildInputs = [ qtbase qtsvg leptonica tesseract4 ];
 
   nativeBuildInputs = [ qmake ];
 
diff --git a/pkgs/applications/misc/syncthingtray/default.nix b/pkgs/applications/misc/syncthingtray/default.nix
index 5d583181ce65e..99404d38efeee 100644
--- a/pkgs/applications/misc/syncthingtray/default.nix
+++ b/pkgs/applications/misc/syncthingtray/default.nix
@@ -19,30 +19,25 @@
 , kioPluginSupport ? true
 , plasmoidSupport  ? true
 , systemdSupport ? true
+/* It is possible to set via this option an absolute exec path that will be
+written to the `~/.config/autostart/syncthingtray.desktop` file generated
+during runtime. Alternatively, one can edit the desktop file themselves after
+it is generated See:
+https://github.com/NixOS/nixpkgs/issues/199596#issuecomment-1310136382 */
+, autostartExecPath ? "syncthingtray"
 }:
 
 mkDerivation rec {
-  version = "1.3.1";
+  version = "1.3.2";
   pname = "syncthingtray";
 
   src = fetchFromGitHub {
     owner = "Martchus";
     repo = "syncthingtray";
     rev = "v${version}";
-    sha256 = "sha256-0rmfDkPvgubVqfbIOZ+mnv/x1p2sb88zGeg/Q2JCy3I=";
+    sha256 = "sha256-zLZw6ltdgO66dvKdLXhr/a6r8UhbSAx06jXrgMARHyw=";
   };
 
-  patches = [
-    # Fix Exec= path in runtime-generated
-    # ~/.config/autostart/syncthingtray.desktop file - this is required because
-    # we are wrapping the executable. We can't use `substituteAll` because we
-    # can't use `${placeholder "out"}` because that will produce the $out of
-    # the patch derivation itself, and not of syncthing's "out" placeholder.
-    # Hence we use a C definition with NIX_CFLAGS_COMPILE
-    ./use-nix-path-in-autostart.patch
-  ];
-  env.NIX_CFLAGS_COMPILE = "-DEXEC_NIX_PATH=\"${placeholder "out"}/bin/syncthingtray\"";
-
   buildInputs = [
     qtbase
     cpp-utilities
@@ -70,6 +65,7 @@ mkDerivation rec {
   '';
 
   cmakeFlags = [
+    "-DAUTOSTART_EXEC_PATH=${autostartExecPath}"
     # See https://github.com/Martchus/syncthingtray/issues/42
     "-DQT_PLUGIN_DIR:STRING=${placeholder "out"}/lib/qt-5"
   ] ++ lib.optionals (!plasmoidSupport) ["-DNO_PLASMOID=ON"]
diff --git a/pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch b/pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch
deleted file mode 100644
index a0907496ff9a7..0000000000000
--- a/pkgs/applications/misc/syncthingtray/use-nix-path-in-autostart.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git i/widgets/settings/settingsdialog.cpp w/widgets/settings/settingsdialog.cpp
-index 4deff1f..16845b5 100644
---- i/widgets/settings/settingsdialog.cpp
-+++ w/widgets/settings/settingsdialog.cpp
-@@ -802,7 +802,7 @@ bool setAutostartEnabled(bool enabled)
-         desktopFile.write("[Desktop Entry]\n"
-                           "Name=" APP_NAME "\n"
-                           "Exec=\"");
--        desktopFile.write(qEnvironmentVariable("APPIMAGE", QCoreApplication::applicationFilePath()).toUtf8().data());
-+        desktopFile.write(qEnvironmentVariable("APPIMAGE", EXEC_NIX_PATH).toUtf8().data());
-         desktopFile.write("\" qt-widgets-gui --single-instance\nComment=" APP_DESCRIPTION "\n"
-                           "Icon=" PROJECT_NAME "\n"
-                           "Type=Application\n"
diff --git a/pkgs/applications/networking/cluster/argo-rollouts/default.nix b/pkgs/applications/networking/cluster/argo-rollouts/default.nix
index 266ec0e6a4574..09d31ffcb2afd 100644
--- a/pkgs/applications/networking/cluster/argo-rollouts/default.nix
+++ b/pkgs/applications/networking/cluster/argo-rollouts/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "argo-rollouts";
-  version = "1.4.0";
+  version = "1.4.1";
 
   src = fetchFromGitHub {
     owner = "argoproj";
     repo = "argo-rollouts";
     rev = "v${version}";
-    sha256 = "sha256-HdYbrcz1uumwfUleDayc7obv4Grpg3TiUxKr8aF5bXM=";
+    sha256 = "sha256-MpiKdPjQRF1LzNxBvPucoeRkDfboJdStfQ6X+d2jiwk=";
   };
 
   vendorHash = "sha256-ZIFZCMyhpfKK/Irq2/MvkXuXX1jExDaSK/nXZgzCZgU=";
diff --git a/pkgs/applications/networking/cluster/cloudfoundry-cli/default.nix b/pkgs/applications/networking/cluster/cloudfoundry-cli/default.nix
index 7ff0a0101c970..f8543f3dc0b97 100644
--- a/pkgs/applications/networking/cluster/cloudfoundry-cli/default.nix
+++ b/pkgs/applications/networking/cluster/cloudfoundry-cli/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "cloudfoundry-cli";
-  version = "8.6.0";
+  version = "8.6.1";
 
   src = fetchFromGitHub {
     owner = "cloudfoundry";
     repo = "cli";
     rev = "v${version}";
-    sha256 = "sha256-SApjp6rdyOyZD6onE0aZ1AeiR6XyPUT9SVJJe/ROEQQ=";
+    sha256 = "sha256-wW2oemRz+NI/+ke+4MFc8qCIiGIBYlb1KCEHvRbhZ/U=";
   };
   vendorHash = "sha256-xydewlruZvtWHm0IvVWuvv31+Z7/PLVC9gTZcQLaowk=";
 
diff --git a/pkgs/applications/networking/cluster/clusterctl/default.nix b/pkgs/applications/networking/cluster/clusterctl/default.nix
index 3f614815f7a59..e27ebb58bd455 100644
--- a/pkgs/applications/networking/cluster/clusterctl/default.nix
+++ b/pkgs/applications/networking/cluster/clusterctl/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "clusterctl";
-  version = "1.3.4";
+  version = "1.3.5";
 
   src = fetchFromGitHub {
     owner = "kubernetes-sigs";
     repo = "cluster-api";
     rev = "v${version}";
-    hash = "sha256-bkjtJidG+UHma15axlLcXtqtWTqesOdHHmH4db5hoAY=";
+    hash = "sha256-e6rs7cCSZiklMtPiFozea6EqRylepD2gfoDqQaUuly4=";
   };
 
   vendorHash = "sha256-VPeaT4vPhBa6V+Ir+vNRIWgyVBzEgTDSnDtGrxxdZ0c=";
diff --git a/pkgs/applications/networking/cluster/k3sup/default.nix b/pkgs/applications/networking/cluster/k3sup/default.nix
index 555f1e69c1794..4ee0a4fd97bb6 100644
--- a/pkgs/applications/networking/cluster/k3sup/default.nix
+++ b/pkgs/applications/networking/cluster/k3sup/default.nix
@@ -9,18 +9,18 @@
 
 buildGoModule rec {
   pname = "k3sup";
-  version = "0.12.12";
+  version = "0.12.13";
 
   src = fetchFromGitHub {
     owner = "alexellis";
     repo = "k3sup";
     rev = version;
-    sha256 = "sha256-Fp52PL0oNqpvisiLLqZ+iQ4RSzLaL3UH7S/CAR5h9LA=";
+    sha256 = "sha256-lnr2zMp6gpOM1DtUFIniDd38zR1qnXCmcftlt7dL6P4=";
   };
 
   nativeBuildInputs = [ makeWrapper installShellFiles ];
 
-  vendorSha256 = "sha256-97m8xz46lvTtZoxO2+pjWmZyZnB2atPuVzYgS9DV+gI=";
+  vendorHash = "sha256-97m8xz46lvTtZoxO2+pjWmZyZnB2atPuVzYgS9DV+gI=";
 
   postConfigure = ''
     substituteInPlace vendor/github.com/alexellis/go-execute/pkg/v1/exec.go \
diff --git a/pkgs/applications/networking/cluster/kubecfg/default.nix b/pkgs/applications/networking/cluster/kubecfg/default.nix
index 461b29aed69c2..46747974dd2f4 100644
--- a/pkgs/applications/networking/cluster/kubecfg/default.nix
+++ b/pkgs/applications/networking/cluster/kubecfg/default.nix
@@ -6,13 +6,13 @@
 
 buildGoModule rec {
   pname = "kubecfg";
-  version = "0.29.0";
+  version = "0.29.1";
 
   src = fetchFromGitHub {
     owner = "kubecfg";
     repo = "kubecfg";
     rev = "v${version}";
-    hash = "sha256-41hctulZdFSBc+Yw4p2haR2VNIpa0bwntPCz3WUJyZg=";
+    hash = "sha256-lHpXmJPOjyzlNl7fLQH6Ufj20YRzeGz4NGxd3Bgr3mA=";
   };
 
   vendorHash = "sha256-VGLGa1/8sdVC3H4hxpvF/t2YgbRlbeNTJMJb5zwknPw=";
diff --git a/pkgs/applications/networking/cluster/ocm/default.nix b/pkgs/applications/networking/cluster/ocm/default.nix
index 4c4ececb71efc..e7f01e9916e1d 100644
--- a/pkgs/applications/networking/cluster/ocm/default.nix
+++ b/pkgs/applications/networking/cluster/ocm/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "ocm";
-  version = "0.1.65";
+  version = "0.1.66";
 
   src = fetchFromGitHub {
     owner = "openshift-online";
     repo = "ocm-cli";
     rev = "v${version}";
-    sha256 = "sha256-UzHGVK/HZ5eH8nO4+G92NunOQi9AWnqv4vgcHjtoPDw=";
+    sha256 = "sha256-iOgDWqP9sFd5/0e5/+WP6R3PpJa8AiUE4EjI39HwWX8=";
   };
 
-  vendorSha256 = "sha256-4pqXap1WayqdXuwwLktE71D7x6Ao9MkIKSzIKtVyP84=";
+  vendorHash = "sha256-yY/X0LVIH1ULegx8MIZyUxD1wPNxxISSCBxj9aY2wtA=";
 
   # Strip the final binary.
   ldflags = [ "-s" "-w" ];
diff --git a/pkgs/applications/networking/cluster/terraform-providers/providers.json b/pkgs/applications/networking/cluster/terraform-providers/providers.json
index e96f2e0b8c6a0..8f58a0473ef73 100644
--- a/pkgs/applications/networking/cluster/terraform-providers/providers.json
+++ b/pkgs/applications/networking/cluster/terraform-providers/providers.json
@@ -46,11 +46,11 @@
     "vendorHash": "sha256-JOaw8rKH7eb3RiP/FD+M7VEXCRfVuarTjfEusz1yGmQ="
   },
   "alicloud": {
-    "hash": "sha256-Cf3plUhdewlq3MvOqZGcICP0j9R3vg0nZdBMrk/Et7k=",
+    "hash": "sha256-QefplcJVXduBbado4Ykg2Ngybb/oxf6/ulCgRqJGm0A=",
     "homepage": "https://registry.terraform.io/providers/aliyun/alicloud",
     "owner": "aliyun",
     "repo": "terraform-provider-alicloud",
-    "rev": "v1.199.0",
+    "rev": "v1.200.0",
     "spdx": "MPL-2.0",
     "vendorHash": null
   },
@@ -82,13 +82,13 @@
     "vendorHash": "sha256-99PwwxVHfRGC0QCQGhifRzqWFOHZ1R7Ge2ou7OjiggQ="
   },
   "auth0": {
-    "hash": "sha256-3hAfDzK7iO4D68OsCvuXQx5Gk0VOtoBiw21tBJjDJtQ=",
+    "hash": "sha256-d5zM6FKFT9UFUyrm+5aF2wRvGsdtkq3Z8NvlsvZib7c=",
     "homepage": "https://registry.terraform.io/providers/auth0/auth0",
     "owner": "auth0",
     "repo": "terraform-provider-auth0",
-    "rev": "v0.44.0",
+    "rev": "v0.44.1",
     "spdx": "MPL-2.0",
-    "vendorHash": "sha256-UP9A0lcW5QbTuur1MMjKMlvC8S3nenqs0WjpoqvwEQI="
+    "vendorHash": "sha256-vcKw8G9SqbP0wBnhLKJUz9ua1nGdP5ioZ+5ACxkeCZk="
   },
   "avi": {
     "hash": "sha256-mBLdIL4mUI4zA3c9gB4DL1QY0xHW15Q1rO/v1gVYKYU=",
@@ -110,20 +110,20 @@
     "vendorHash": null
   },
   "aws": {
-    "hash": "sha256-qopoHOcCdOAkgpZq3AvCnsq00sjvNSFdUKzn7SQ0G5k=",
+    "hash": "sha256-U9mzz/r3xb6bl9n1Go6JiM6CemB2Nwsu6LEhc5ypV3c=",
     "homepage": "https://registry.terraform.io/providers/hashicorp/aws",
     "owner": "hashicorp",
     "repo": "terraform-provider-aws",
-    "rev": "v4.56.0",
+    "rev": "v4.57.0",
     "spdx": "MPL-2.0",
-    "vendorHash": "sha256-OBWwMNDpeoPR6NLIgsjiYGQdePEWDMWGN1Y0nHsecYs="
+    "vendorHash": "sha256-dK1NGOpX8h4XvcDtp4DEaVrxHaGmzTXldKbsfFoVWu4="
   },
   "azuread": {
-    "hash": "sha256-vfkheaRQoDpItMEFzuDkkOOoVvj07MyCkAaybef71nc=",
+    "hash": "sha256-MGCGfocs16qmJnvMRRD7TRHnPkS17h+oNUkMARAQhLs=",
     "homepage": "https://registry.terraform.io/providers/hashicorp/azuread",
     "owner": "hashicorp",
     "repo": "terraform-provider-azuread",
-    "rev": "v2.35.0",
+    "rev": "v2.36.0",
     "spdx": "MPL-2.0",
     "vendorHash": null
   },
@@ -355,11 +355,11 @@
     "vendorHash": "sha256-oVTanZpCWs05HwyIKW2ajiBPz1HXOFzBAt5Us+EtTRw="
   },
   "equinix": {
-    "hash": "sha256-aah3f/5Bd+IgXbyJpDhcyklIYHlK3yy16UkYlOprh0c=",
+    "hash": "sha256-zyRPpAaDgjRafn5RcrzmbVTzO6gGS1HMmvLR8VFdKow=",
     "homepage": "https://registry.terraform.io/providers/equinix/equinix",
     "owner": "equinix",
     "repo": "terraform-provider-equinix",
-    "rev": "v1.12.0",
+    "rev": "v1.13.0",
     "spdx": "MIT",
     "vendorHash": "sha256-Zi2e/Vg9iKTrU8Mb37Y8xHYIBL+IfDnWMUUg5Vqrbfo="
   },
@@ -391,13 +391,13 @@
     "vendorHash": null
   },
   "flexibleengine": {
-    "hash": "sha256-uT8BmACMMJKVPAhL/7rudCXG9AOb4kS1Lswr5ZxY6M4=",
+    "hash": "sha256-0wpyi397+5YAa3epZZII312rK1SnPU5k9a1/iVTbqmU=",
     "homepage": "https://registry.terraform.io/providers/FlexibleEngineCloud/flexibleengine",
     "owner": "FlexibleEngineCloud",
     "repo": "terraform-provider-flexibleengine",
-    "rev": "v1.36.0",
+    "rev": "v1.36.1",
     "spdx": "MPL-2.0",
-    "vendorHash": "sha256-obBN7Q/gKbvERJIUVz+GgPjn7/OKjXCiFI6WuOd0hic="
+    "vendorHash": "sha256-HcyUGKbgj322fU7keN/lBEn6UJhV3QXScBJHZHJkCII="
   },
   "fortios": {
     "deleteVendor": true,
@@ -540,11 +540,11 @@
     "vendorHash": "sha256-rxh8Me+eOKPCbfHFT3tRsbM7JU67dBqv2JOiWArI/2Y="
   },
   "huaweicloud": {
-    "hash": "sha256-oZUPfhndpht9EuBiltLknblGaMX2M/dD1iOiwDJKgWY=",
+    "hash": "sha256-x/5jt31yPTJRHSHRZqSrrjNdERWho6l71jvS7x6dR0c=",
     "homepage": "https://registry.terraform.io/providers/huaweicloud/huaweicloud",
     "owner": "huaweicloud",
     "repo": "terraform-provider-huaweicloud",
-    "rev": "v1.44.2",
+    "rev": "v1.45.0",
     "spdx": "MPL-2.0",
     "vendorHash": null
   },
@@ -567,13 +567,13 @@
     "vendorHash": null
   },
   "ibm": {
-    "hash": "sha256-Qdb5HpamjCNGlqSf3etFv0++Skrk/jm6UVBFsKGU+jw=",
+    "hash": "sha256-7TuvaeCRtQcYkJe6KbinGdK3JvmEbT4yxwHbzLR6jfE=",
     "homepage": "https://registry.terraform.io/providers/IBM-Cloud/ibm",
     "owner": "IBM-Cloud",
     "repo": "terraform-provider-ibm",
-    "rev": "v1.50.0",
+    "rev": "v1.51.0",
     "spdx": "MPL-2.0",
-    "vendorHash": "sha256-JkmfZ9yz3r26j1SHIwnyNA+nYWAy4DoaWEMfFUTzD3Y="
+    "vendorHash": "sha256-l+Q4ix50ItXI/i5aDvqSC2kTk3tDBPZgO/6aok+P0hQ="
   },
   "icinga2": {
     "hash": "sha256-Y/Oq0aTzP+oSKPhHiHY9Leal4HJJm7TNDpcdqkUsCmk=",
@@ -621,11 +621,11 @@
     "vendorHash": "sha256-nDvnLEOtXkUJFY22pKogOzkWrj4qjyQbdlJ5pa/xnK8="
   },
   "ksyun": {
-    "hash": "sha256-F/A+hDjYTQS0NT0rslE792qNINghfdiQHRNnbMpyBdM=",
+    "hash": "sha256-mq0wE9jkn67HFyg0MgtD9lY7lk0+4/rnPLJ4mXX0xwY=",
     "homepage": "https://registry.terraform.io/providers/kingsoftcloud/ksyun",
     "owner": "kingsoftcloud",
     "repo": "terraform-provider-ksyun",
-    "rev": "v1.3.64",
+    "rev": "v1.3.66",
     "spdx": "MPL-2.0",
     "vendorHash": "sha256-miHKAz+ONXtuC1DNukcyZbbaYReY69dz9Zk6cJdORdQ="
   },
@@ -783,13 +783,13 @@
     "vendorHash": "sha256-3t8pUAwuVeZN5cYGs72YsdRvJunudSmKSldFWEFVA/4="
   },
   "ns1": {
-    "hash": "sha256-2w9x/FTtieWB88CIEkP7BH5saC6dt4IxdROBucczios=",
+    "hash": "sha256-fPeWs1VMsCY+OywHdwP9EUyjpoTYquBqP8W08Z/0DAA=",
     "homepage": "https://registry.terraform.io/providers/ns1-terraform/ns1",
     "owner": "ns1-terraform",
     "repo": "terraform-provider-ns1",
-    "rev": "v1.13.4",
+    "rev": "v2.0.0",
     "spdx": "MPL-2.0",
-    "vendorHash": "sha256-/Rgerbd8c6Owo79LrYsR9O0JNBrDOODFD+k1Yd5G6cY="
+    "vendorHash": "sha256-R4q9ASqTdKv4BG4zNktKsLxa6UU42UzWTLYHuRnJ4Zg="
   },
   "null": {
     "hash": "sha256-ExXDbAXMVCTZBlYmi4kD/7JFB1fCFAoPL637+1N6rEI=",
@@ -1027,11 +1027,11 @@
     "vendorHash": null
   },
   "snowflake": {
-    "hash": "sha256-nNv2lo7I5+eFmw+BvRB/DmgNE6iuR3Aq0kxyOeQdiqU=",
+    "hash": "sha256-MMWObJRS7FKvOfor2j0QywRMRbGsE5QcyDGbY2CXjo4=",
     "homepage": "https://registry.terraform.io/providers/Snowflake-Labs/snowflake",
     "owner": "Snowflake-Labs",
     "repo": "terraform-provider-snowflake",
-    "rev": "v0.57.0",
+    "rev": "v0.58.0",
     "spdx": "MIT",
     "vendorHash": "sha256-yFk5ap28JluaKkUPfePBuRUEg6/Ma5MrRkmWK6iAGNg="
   },
@@ -1045,11 +1045,11 @@
     "vendorHash": "sha256-NO1r/EWLgH1Gogru+qPeZ4sW7FuDENxzNnpLSKstnE8="
   },
   "spotinst": {
-    "hash": "sha256-pm6KFPIMPq9jFUf9wYYA3mWMl7+pJLv53wegFmPyBZY=",
+    "hash": "sha256-bUFX6Ok7cYyaoYHElUIcEwl6DRGK8q+opKBCABo8CVM=",
     "homepage": "https://registry.terraform.io/providers/spotinst/spotinst",
     "owner": "spotinst",
     "repo": "terraform-provider-spotinst",
-    "rev": "v1.102.0",
+    "rev": "v1.103.0",
     "spdx": "MPL-2.0",
     "vendorHash": "sha256-IlztpEAI/Z7DUQ5pMaGSQnXWPedAIJlS773nxsRMCYg="
   },
@@ -1099,11 +1099,11 @@
     "vendorHash": "sha256-tltQNtTsPoT5CTrKM7vLDVkmmW2FTd6MBubfXZveGxI="
   },
   "tencentcloud": {
-    "hash": "sha256-aqi6lEGVj0PhIMwUfU/4lu5uGgbU4+R42UhINbHgMjY=",
+    "hash": "sha256-91efifPY9ErjqtNPzm3+XSy1Jy+eQs2znxYzez74J/0=",
     "homepage": "https://registry.terraform.io/providers/tencentcloudstack/tencentcloud",
     "owner": "tencentcloudstack",
     "repo": "terraform-provider-tencentcloud",
-    "rev": "v1.79.12",
+    "rev": "v1.79.13",
     "spdx": "MPL-2.0",
     "vendorHash": null
   },
diff --git a/pkgs/applications/networking/feedreaders/castget/default.nix b/pkgs/applications/networking/feedreaders/castget/default.nix
index f0f09c8de7e6e..e79a387e07f57 100644
--- a/pkgs/applications/networking/feedreaders/castget/default.nix
+++ b/pkgs/applications/networking/feedreaders/castget/default.nix
@@ -1,5 +1,6 @@
-{ lib, stdenv, fetchFromGitHub
-, autoreconfHook
+{ lib
+, stdenv
+, fetchurl
 , pkg-config
 , glib
 , ronn
@@ -11,14 +12,11 @@
 
 stdenv.mkDerivation rec {
   pname = "castget";
-  version = "2.0.0";
+  version = "2.0.1";
 
-  src = fetchFromGitHub {
-    owner = "mlj";
-    repo = pname;
-    # Upstream uses `_` instead of `.` for the version
-    rev = "rel_${lib.replaceStrings ["."] ["_"] version}";
-    sha256 = "1129x64rw587q3sdpa3lrgs0gni5f0siwbvmfz8ya4zkbhgi2ik7";
+  src = fetchurl {
+    url = "http://savannah.nongnu.org/download/castget/castget-${version}.tar.bz2";
+    hash = "sha256-Q4tffsfjGkXtN1ZjD+RH9CAVrNpT7AkgL0hihya16HU=";
   };
 
   # without this, the build fails because of an encoding issue with the manual page.
@@ -29,12 +27,16 @@ stdenv.mkDerivation rec {
     export LC_ALL="en_US.UTF-8";
   '';
 
-  buildInputs = [ glib curl id3lib libxml2 ];
+  buildInputs = [
+    glib
+    curl
+    id3lib
+    libxml2
+  ];
   nativeBuildInputs = [
     ronn
     # See comment on locale above
     glibcLocales
-    autoreconfHook
     pkg-config
   ];
 
diff --git a/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix b/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix
index f4eb47428b5bb..b7280468b7544 100644
--- a/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix
+++ b/pkgs/applications/networking/instant-messengers/deltachat-desktop/default.nix
@@ -21,17 +21,17 @@
 
 let
   libdeltachat' = libdeltachat.overrideAttrs (old: rec {
-    version = "1.107.1";
+    version = "1.110.0";
     src = fetchFromGitHub {
       owner = "deltachat";
       repo = "deltachat-core-rust";
       rev = version;
-      hash = "sha256-ISAUZyFrp86ILtRrlowceBQNJ7+tbJReIAe6+u4wwQI=";
+      hash = "sha256-SPBuStrBp9fnrLfFT2ec9yYItZsvQF9BHdJxi+plbgw=";
     };
     cargoDeps = rustPlatform.fetchCargoTarball {
       inherit src;
       name = "${old.pname}-${version}";
-      hash = "sha256-B4BMxiI3GhsjeD3gYrq5ZpbZ7l77ycrIMWu2sUzZiz4=";
+      hash = "sha256-Y4+CkaV9njHqmmiZnDtfZ5OwMVk583FtncxOgAqACkA=";
     };
   });
   esbuild' = esbuild.override {
@@ -48,16 +48,16 @@ let
   };
 in buildNpmPackage rec {
   pname = "deltachat-desktop";
-  version = "1.34.4";
+  version = "1.34.5";
 
   src = fetchFromGitHub {
     owner = "deltachat";
     repo = "deltachat-desktop";
     rev = "v${version}";
-    hash = "sha256-LV8/r6psUZuCEGbaH1nWlrkeNbEYG8R5O1aCxECPH1E=";
+    hash = "sha256-gNcYcxyztUrcxbOO7kaTSCyxqdykjp7Esm3jPJ/d4gc=";
   };
 
-  npmDepsHash = "sha256-rdZVvsyCo/6C4+gjytCCn9Qcl+chc6U+6orkcM59I8U=";
+  npmDepsHash = "sha256-I0PhE+GXFgOdvH5aLZRyn3lVmXgATX2kmltXYC9chog=";
 
   nativeBuildInputs = [
     makeWrapper
diff --git a/pkgs/applications/networking/irc/ircdog/default.nix b/pkgs/applications/networking/irc/ircdog/default.nix
index ebb9d2ad15a61..6d1f534877e9b 100644
--- a/pkgs/applications/networking/irc/ircdog/default.nix
+++ b/pkgs/applications/networking/irc/ircdog/default.nix
@@ -5,15 +5,15 @@
 
 buildGoModule rec {
   pname = "ircdog";
-  version = "0.3.0";
+  version = "0.4.0";
 
   src = fetchFromGitHub {
     owner = "goshuirc";
-    repo = pname;
-    rev = "v${version}";
-    sha256 = "sha256-x3ihWLgVYu17vG1xQTgIr4TSkeZ467TZBV1fPTPnZgw=";
-    fetchSubmodules = true;
+    repo = "ircdog";
+    rev = "refs/tags/v${version}";
+    hash = "sha256-uqqgXmEpGEJHnd1mtgpp13jFhKP5fbhE5wtcZNZL8t4=";
   };
+
   vendorSha256 = null;
 
   meta = with lib; {
diff --git a/pkgs/applications/networking/sniffers/wireshark/default.nix b/pkgs/applications/networking/sniffers/wireshark/default.nix
index 47687bb544c26..5d69667f1bfb2 100644
--- a/pkgs/applications/networking/sniffers/wireshark/default.nix
+++ b/pkgs/applications/networking/sniffers/wireshark/default.nix
@@ -44,7 +44,7 @@ in stdenv.mkDerivation {
   buildInputs = [
     gettext pcre2 libpcap lua5 libssh nghttp2 openssl libgcrypt
     libgpg-error gnutls geoip c-ares glib zlib
-  ] ++ lib.optionals withQt  (with qt5; [ qtbase qtmultimedia qtsvg qttools ])
+  ] ++ lib.optionals withQt  (with qt5; [ qtbase qtmultimedia qtsvg qttools qtwayland ])
     ++ lib.optionals stdenv.isLinux  [ libcap libnl ]
     ++ lib.optionals stdenv.isDarwin [ SystemConfiguration ApplicationServices gmp ]
     ++ lib.optionals (withQt && stdenv.isDarwin) (with qt5; [ qtmacextras ]);
diff --git a/pkgs/applications/science/biology/diamond/default.nix b/pkgs/applications/science/biology/diamond/default.nix
index 241585b3579ed..2b34f61ab2cfb 100644
--- a/pkgs/applications/science/biology/diamond/default.nix
+++ b/pkgs/applications/science/biology/diamond/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "diamond";
-  version = "2.1.3";
+  version = "2.1.4";
 
   src = fetchFromGitHub {
     owner = "bbuchfink";
     repo = "diamond";
     rev = "v${version}";
-    sha256 = "sha256-gvPftUSH+Gnn8LQeORpv7jjHewUKSeo2FVNcoaE2GKU=";
+    sha256 = "sha256-Og1cxEMJ24cncNDD2dXwy58OZ/nJmGdqrMRr5Y6YmHo=";
   };
 
 
diff --git a/pkgs/applications/science/biology/picard-tools/default.nix b/pkgs/applications/science/biology/picard-tools/default.nix
index f6f2f3c43bb28..8bd1af4b11d7c 100644
--- a/pkgs/applications/science/biology/picard-tools/default.nix
+++ b/pkgs/applications/science/biology/picard-tools/default.nix
@@ -2,11 +2,11 @@
 
 stdenv.mkDerivation rec {
   pname = "picard-tools";
-  version = "2.27.5";
+  version = "3.0.0";
 
   src = fetchurl {
     url = "https://github.com/broadinstitute/picard/releases/download/${version}/picard.jar";
-    sha256 = "sha256-Exyj4GJqPvEug5l5XnpJ+Cm7ToXXG0ib9PIx0hpsMZk=";
+    sha256 = "sha256-DV4oqzAfrTsCAw0BkjiIEpuoLF9yKsXMstQYq3asVJk=";
   };
 
   nativeBuildInputs = [ makeWrapper ];
diff --git a/pkgs/applications/science/logic/cubicle/default.nix b/pkgs/applications/science/logic/cubicle/default.nix
index 67f70e7165d4c..c9382c5d0f1a7 100644
--- a/pkgs/applications/science/logic/cubicle/default.nix
+++ b/pkgs/applications/science/logic/cubicle/default.nix
@@ -14,13 +14,6 @@ stdenv.mkDerivation rec {
     hash = "sha256-/EtbXpyXqRm0jGcMfGLAEwdr92061edjFys1V7/w6/Y=";
   };
 
-  # https://github.com/cubicle-model-checker/cubicle/issues/1
-  postPatch = ''
-    substituteInPlace Makefile.in \
-      --replace "@OCAMLC@" "ocamlfind ocamlc -package num" \
-      --replace "@OCAMLOPT@" "ocamlfind ocamlopt -package num"
-  '';
-
   strictDeps = true;
 
   nativeBuildInputs = [
@@ -36,6 +29,12 @@ stdenv.mkDerivation rec {
     num
   ];
 
+  # https://github.com/cubicle-model-checker/cubicle/issues/1
+  env = {
+    OCAMLC = "ocamlfind ocamlc -package num";
+    OCAMLOPT = "ocamlfind ocamlopt -package num";
+  };
+
   meta = with lib; {
     description = "An open source model checker for verifying safety properties of array-based systems";
     homepage = "https://cubicle.lri.fr/";
diff --git a/pkgs/applications/science/math/maxima/default.nix b/pkgs/applications/science/math/maxima/default.nix
index dffc212176b30..b42587596fa87 100644
--- a/pkgs/applications/science/math/maxima/default.nix
+++ b/pkgs/applications/science/math/maxima/default.nix
@@ -20,11 +20,11 @@ let
 in
 stdenv.mkDerivation rec {
   pname = "maxima";
-  version = "5.45.1";
+  version = "5.46.0";
 
   src = fetchurl {
     url = "mirror://sourceforge/${pname}/${pname}-${version}.tar.gz";
-    sha256 = "sha256-/pAWJ2lwvvIUoaJENIVYZEUU1/36pPyLnQ6Hr8u059w=";
+    sha256 = "sha256-c5Dwa0jaZckDPosvYpuXi5AFZFSlQCLbfecOIiWqiwc=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/applications/science/math/wxmaxima/default.nix b/pkgs/applications/science/math/wxmaxima/default.nix
index 205fe3ebed5de..ee2f62317c0cd 100644
--- a/pkgs/applications/science/math/wxmaxima/default.nix
+++ b/pkgs/applications/science/math/wxmaxima/default.nix
@@ -12,13 +12,13 @@
 
 stdenv.mkDerivation rec {
   pname = "wxmaxima";
-  version = "22.12.0";
+  version = "23.02.1";
 
   src = fetchFromGitHub {
     owner = "wxMaxima-developers";
     repo = "wxmaxima";
     rev = "Version-${version}";
-    sha256 = "sha256-RT6y4M6LQD1fXJcjtdSXnDmoJvv160g2asdV4WtTcok=";
+    sha256 = "sha256-Lrj/oJNmKlCkNbnCGY2TewCospwajKdWgmKkreHzEIU=";
   };
 
   buildInputs = [
diff --git a/pkgs/applications/science/misc/root/default.nix b/pkgs/applications/science/misc/root/default.nix
index cfda819a42ca7..a33af57951089 100644
--- a/pkgs/applications/science/misc/root/default.nix
+++ b/pkgs/applications/science/misc/root/default.nix
@@ -261,6 +261,15 @@ stdenv.mkDerivation rec {
     ]}"
   '';
 
+  # To use the debug information on the fly (without installation)
+  # add the outPath of root.debug into NIX_DEBUG_INFO_DIRS (in PATH-like format)
+  # and make sure that gdb from Nixpkgs can be found in PATH.
+  #
+  # Darwin currently fails to support it (#203380)
+  # we set it to true hoping to benefit from the future fix.
+  # Before that, please make sure if root.debug exists before using it.
+  separateDebugInfo = true;
+
   setupHook = ./setup-hook.sh;
 
   meta = with lib; {
diff --git a/pkgs/applications/system/monitor/default.nix b/pkgs/applications/system/monitor/default.nix
index eb8d892dd62e3..e3917ce0e0144 100644
--- a/pkgs/applications/system/monitor/default.nix
+++ b/pkgs/applications/system/monitor/default.nix
@@ -27,13 +27,13 @@
 
 stdenv.mkDerivation rec {
   pname = "monitor";
-  version = "0.16.0";
+  version = "0.16.1";
 
   src = fetchFromGitHub {
     owner = "stsdc";
     repo = "monitor";
     rev = version;
-    sha256 = "sha256-+B3h7ydN+ISElpOMMCcKORYnq1MaMhvr+4I2qHJ26As=";
+    sha256 = "sha256-ZTsb1xcJ7eeCEPebZW0anmG1SUPAzZakw4WzJql9VTQ=";
     fetchSubmodules = true;
   };
 
diff --git a/pkgs/applications/version-management/git-machete/default.nix b/pkgs/applications/version-management/git-machete/default.nix
index 885f6d1355e93..455b7497aa96a 100644
--- a/pkgs/applications/version-management/git-machete/default.nix
+++ b/pkgs/applications/version-management/git-machete/default.nix
@@ -12,13 +12,13 @@
 
 buildPythonApplication rec {
   pname = "git-machete";
-  version = "3.15.2";
+  version = "3.16.0";
 
   src = fetchFromGitHub {
     owner = "virtuslab";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-hIm3JDLXUTwjuVfAHvZBWFBJNOAVWyfl/X4A6B0OoXg=";
+    hash = "sha256-94qYCyWqVwMMptlJIe4o4/mEHnhcMubcupd+Qs2SYH0=";
   };
 
   nativeBuildInputs = [ installShellFiles ];
diff --git a/pkgs/applications/version-management/mercurial/default.nix b/pkgs/applications/version-management/mercurial/default.nix
index 467359d9c901a..0ddabbb12d8a9 100644
--- a/pkgs/applications/version-management/mercurial/default.nix
+++ b/pkgs/applications/version-management/mercurial/default.nix
@@ -21,11 +21,11 @@ let
 
   self = python3Packages.buildPythonApplication rec {
     pname = "mercurial${lib.optionalString fullBuild "-full"}";
-    version = "6.3.2";
+    version = "6.3.3";
 
     src = fetchurl {
       url = "https://mercurial-scm.org/release/mercurial-${version}.tar.gz";
-      sha256 = "sha256-z+butd2JOrMsC3nBUxqsQgdz4PyDejXbPU2ScD30Wpg=";
+      sha256 = "sha256-E8l/9YnHYF6ApIjzNoUs4dU4xdQUPPszvmm9rd2RV70=";
     };
 
     format = "other";
@@ -35,7 +35,7 @@ let
     cargoDeps = if rustSupport then rustPlatform.fetchCargoTarball {
       inherit src;
       name = "mercurial-${version}";
-      sha256 = "sha256-pJNX0j/Rg3cQanveD7z2x5wixo/jLvHai8Ib2Akmmgk";
+      sha256 = "sha256-ZQYNFEbvSwiJ/BSQ0ZxpjFrmyXkKjVJciwz45Br7Rl8=";
       sourceRoot = "mercurial-${version}/rust";
     } else null;
     cargoRoot = if rustSupport then "rust" else null;
diff --git a/pkgs/applications/video/mkvtoolnix/default.nix b/pkgs/applications/video/mkvtoolnix/default.nix
index bce50b43a07c4..3befb8534916e 100644
--- a/pkgs/applications/video/mkvtoolnix/default.nix
+++ b/pkgs/applications/video/mkvtoolnix/default.nix
@@ -47,13 +47,13 @@ let
 in
 stdenv.mkDerivation rec {
   pname = "mkvtoolnix";
-  version = "73.0.0";
+  version = "74.0.0";
 
   src = fetchFromGitLab {
     owner = "mbunkus";
     repo = "mkvtoolnix";
     rev = "release-${version}";
-    sha256 = "HGoT3t/ooRMiyjUkHnvVGOB04IU5U8VEKDixhE57kR8=";
+    sha256 = "sha256-p8rIAHSqYCOlNbuxisQlIkMh2OArc+MOYn1kgC5kJsc=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/applications/virtualization/lkl/default.nix b/pkgs/applications/virtualization/lkl/default.nix
index 0247cd433b048..839022a8d5512 100644
--- a/pkgs/applications/virtualization/lkl/default.nix
+++ b/pkgs/applications/virtualization/lkl/default.nix
@@ -6,15 +6,20 @@
 
 stdenv.mkDerivation rec {
   pname = "lkl";
-  version = "2023-01-27";
+
+  # NOTE: pinned to the last known version that doesn't have a hang in cptofs.
+  # Please verify `nix build -f nixos/release-combined.nix nixos.ova` works
+  # before attempting to update again.
+  # ref: https://github.com/NixOS/nixpkgs/pull/219434
+  version = "2022-08-08";
 
   outputs = [ "dev" "lib" "out" ];
 
   src = fetchFromGitHub {
     owner  = "lkl";
     repo   = "linux";
-    rev  = "b00f0fbcd5ae24636a9315fea3af32f411cf93be";
-    sha256 = "sha256-GZpnTVdcnS5uAUHsVre539+0Qlv36Fui0WGjOPwvWrE=";
+    rev  = "ffbb4aa67b3e0a64f6963f59385a200d08cb2d8b";
+    sha256 = "sha256-24sNREdnhkF+P+3P0qEh2tF1jHKF7KcbFSn/rPK2zWs=";
   };
 
   nativeBuildInputs = [ bc bison flex python3 ];
@@ -72,6 +77,6 @@ stdenv.mkDerivation rec {
     homepage    = "https://github.com/lkl/linux/";
     platforms   = platforms.linux; # Darwin probably works too but I haven't tested it
     license     = licenses.gpl2;
-    maintainers = with maintainers; [ copumpkin ];
+    maintainers = with maintainers; [ copumpkin raitobezarius ];
   };
 }
diff --git a/pkgs/applications/virtualization/nixpacks/default.nix b/pkgs/applications/virtualization/nixpacks/default.nix
index c8072c903d260..695cd970b7bb7 100644
--- a/pkgs/applications/virtualization/nixpacks/default.nix
+++ b/pkgs/applications/virtualization/nixpacks/default.nix
@@ -2,16 +2,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "nixpacks";
-  version = "1.4.0";
+  version = "1.4.1";
 
   src = fetchFromGitHub {
     owner = "railwayapp";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-v9ycluLfkrPDzjsMXtv7w9UHgMaGzTsJw4lT/KfRAu4=";
+    sha256 = "sha256-zxgNHzKXekZnk0OsHw30u4L9U2mIT/MryZuAQ2EBEYg=";
   };
 
-  cargoHash = "sha256-wVQEa1qS+JF6PHKvRrRFbSvj2qp6j14ErOQPkxP0uuA=";
+  cargoHash = "sha256-tsGyrU/5yp5PJ2d5HUoaw/jhGgYyDt6qBK+DvC79kmY=";
 
   # skip test due FHS dependency
   doCheck = false;
diff --git a/pkgs/applications/virtualization/pods/default.nix b/pkgs/applications/virtualization/pods/default.nix
index 648183654701e..5e68eeb2c827f 100644
--- a/pkgs/applications/virtualization/pods/default.nix
+++ b/pkgs/applications/virtualization/pods/default.nix
@@ -17,19 +17,19 @@
 
 stdenv.mkDerivation rec {
   pname = "pods";
-  version = "1.0.5";
+  version = "1.0.6";
 
   src = fetchFromGitHub {
     owner = "marhkb";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-V/4atbYG3jP0o1Bfn/dZBDXEk+Yi4cSJAY8HnTmpHRI=";
+    sha256 = "sha256-ZryzNlEj/2JTp5FJiDzXN9v1DvczfebqEOrJP+dKaRw=";
   };
 
   cargoDeps = rustPlatform.fetchCargoTarball {
     inherit src;
     name = "${pname}-${version}";
-    sha256 = "sha256-gJZ3z6xDgWwOPjCLZg3LRMk3KoTXGaotXgO/xDUwAvk=";
+    sha256 = "sha256-OgvlRnii4T4HcFPiGkcLcagyHCg+lWXCXQ9XdXjHDbQ=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/applications/virtualization/virtualbox/default.nix b/pkgs/applications/virtualization/virtualbox/default.nix
index d65f101b3774d..fc3303baf339b 100644
--- a/pkgs/applications/virtualization/virtualbox/default.nix
+++ b/pkgs/applications/virtualization/virtualbox/default.nix
@@ -3,6 +3,7 @@
 , libpng, glib, lvm2, libXrandr, libXinerama, libopus, qtbase, qtx11extras
 , qttools, qtsvg, qtwayland, pkg-config, which, docbook_xsl, docbook_xml_dtd_43
 , alsa-lib, curl, libvpx, nettools, dbus, substituteAll, gsoap, zlib
+, yasm, glslang
 # If open-watcom-bin is not passed, VirtualBox will fall back to use
 # the shipped alternative sources (assembly).
 , open-watcom-bin
@@ -23,19 +24,19 @@ let
   buildType = "release";
   # Use maintainers/scripts/update.nix to update the version and all related hashes or
   # change the hashes in extpack.nix and guest-additions/default.nix as well manually.
-  version = "6.1.40";
+  version = "7.0.6";
 in stdenv.mkDerivation {
   pname = "virtualbox";
   inherit version;
 
   src = fetchurl {
     url = "https://download.virtualbox.org/virtualbox/${version}/VirtualBox-${version}.tar.bz2";
-    sha256 = "bc857555d3e836ad9350a8f7b03bb54d2fdc04dddb2043d09813f4634bca4814";
+    sha256 = "f146d9a86a35af0abb010e628636fd800cb476cc2ce82f95b0c0ca876e1756ff";
   };
 
   outputs = [ "out" "modsrc" ];
 
-  nativeBuildInputs = [ pkg-config which docbook_xsl docbook_xml_dtd_43 ]
+  nativeBuildInputs = [ pkg-config which docbook_xsl docbook_xml_dtd_43 yasm glslang ]
     ++ optional (!headless) wrapQtAppsHook;
 
   # Wrap manually because we wrap just a small number of executables.
@@ -94,7 +95,7 @@ in stdenv.mkDerivation {
       qtPluginPath = "${qtbase.bin}/${qtbase.qtPluginPrefix}:${qtsvg.bin}/${qtbase.qtPluginPrefix}:${qtwayland.bin}/${qtbase.qtPluginPrefix}";
     })
   ++ [
-    ./qtx11extras.patch
+    ./qt-dependency-paths.patch
     # https://github.com/NixOS/nixpkgs/issues/123851
     ./fix-audio-driver-loading.patch
   ];
@@ -130,14 +131,17 @@ in stdenv.mkDerivation {
     VBOX_JAVA_HOME                 := ${jdk}
     ''}
     ${optionalString (!headless) ''
+    VBOX_WITH_VBOXSDL              := 1
     PATH_QT5_X11_EXTRAS_LIB        := ${getLib qtx11extras}/lib
     PATH_QT5_X11_EXTRAS_INC        := ${getDev qtx11extras}/include
-    TOOL_QT5_LRC                   := ${getDev qttools}/bin/lrelease
+    PATH_QT5_TOOLS_LIB             := ${getLib qttools}/lib
+    PATH_QT5_TOOLS_INC             := ${getDev qttools}/include
     ''}
     ${optionalString enableWebService ''
     # fix gsoap missing zlib include and produce errors with --as-needed
     VBOX_GSOAP_CXX_LIBS := gsoapssl++ z
     ''}
+    TOOL_QT5_LRC                   := ${getDev qttools}/bin/lrelease
     LOCAL_CONFIG
 
     ./configure \
@@ -174,7 +178,7 @@ in stdenv.mkDerivation {
       -name src -o -exec cp -avt "$libexec" {} +
 
     mkdir -p $out/bin
-    for file in ${optionalString (!headless) "VirtualBox VBoxSDL rdesktop-vrdp"} ${optionalString enableWebService "vboxwebsrv"} VBoxManage VBoxBalloonCtrl VBoxHeadless; do
+    for file in ${optionalString (!headless) "VirtualBox VBoxSDL"} ${optionalString enableWebService "vboxwebsrv"} VBoxManage VBoxBalloonCtrl VBoxHeadless; do
         echo "Linking $file to /bin"
         test -x "$libexec/$file"
         ln -s "$libexec/$file" $out/bin/$file
diff --git a/pkgs/applications/virtualization/virtualbox/extpack.nix b/pkgs/applications/virtualization/virtualbox/extpack.nix
index 7092ffb33dee4..7e27e79d5dd73 100644
--- a/pkgs/applications/virtualization/virtualbox/extpack.nix
+++ b/pkgs/applications/virtualization/virtualbox/extpack.nix
@@ -12,7 +12,7 @@ fetchurl rec {
     # Manually sha256sum the extensionPack file, must be hex!
     # Thus do not use `nix-prefetch-url` but instead plain old `sha256sum`.
     # Checksums can also be found at https://www.virtualbox.org/download/hashes/${version}/SHA256SUMS
-    let value = "29cf8410e2514ea4393f63f5e955b8311787873679fc23ae9a897fb70ef3f84a";
+    let value = "292961aa8723b54f96f89f6d8abf7d8e29259d94b7de831dbffb9ae15d346434";
     in assert (builtins.stringLength value) == 64; value;
 
   meta = {
diff --git a/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix b/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix
index 1ff7f0abebaa4..0601aa3e44a20 100644
--- a/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix
+++ b/pkgs/applications/virtualization/virtualbox/guest-additions/default.nix
@@ -23,7 +23,7 @@ in stdenv.mkDerivation rec {
 
   src = fetchurl {
     url = "http://download.virtualbox.org/virtualbox/${version}/VBoxGuestAdditions_${version}.iso";
-    sha256 = "d456c559926f1a8fdd7259056e0a50f12339fd494122cf30db7736e2032970c6";
+    sha256 = "21e0f407d2a4f5c286084a70718aa20235ea75969eca0cab6cfab43a3499a010";
   };
 
   KERN_DIR = "${kernel.dev}/lib/modules/${kernel.modDirVersion}/build";
diff --git a/pkgs/applications/virtualization/virtualbox/qtx11extras.patch b/pkgs/applications/virtualization/virtualbox/qt-dependency-paths.patch
index a3aa98b081d16..ae5493a327d6e 100644
--- a/pkgs/applications/virtualization/virtualbox/qtx11extras.patch
+++ b/pkgs/applications/virtualization/virtualbox/qt-dependency-paths.patch
@@ -7,10 +7,10 @@ index 71b96a3..73391f0 100644
    endif
   else
 -  $(eval $(target)_LIBS    += $(foreach module,$(qt_modules), $(PATH_SDK_QT5_LIB)/lib$(qt_prefix)Qt5$(module)$(qt_infix)$(SUFF_DLL)) )
-+  $(eval $(target)_LIBS    += $(foreach module,$(qt_modules), $(if $(filter X11Extras,$(module)),$(PATH_QT5_X11_EXTRAS_LIB),$(PATH_SDK_QT5_LIB))/lib$(qt_prefix)Qt5$(module)$(qt_infix)$(SUFF_DLL)) )
++  $(eval $(target)_LIBS    += $(foreach module,$(qt_modules), $(if $(filter Help,$(module)),$(PATH_QT5_TOOLS_LIB),$(if $(filter X11Extras,$(module)),$(PATH_QT5_X11_EXTRAS_LIB),$(PATH_SDK_QT5_LIB)))/lib$(qt_prefix)Qt5$(module)$(qt_infix)$(SUFF_DLL)) )
   endif
 - $(eval $(target)_INCS     += $(addprefix $(PATH_SDK_QT5_INC)/Qt,$(qt_modules)) $(PATH_SDK_QT5_INC) )
-+ $(eval $(target)_INCS     += $(addprefix $(PATH_SDK_QT5_INC)/Qt,$(qt_modules)) $(PATH_SDK_QT5_INC) $(PATH_QT5_X11_EXTRAS_INC)/QtX11Extras )
++ $(eval $(target)_INCS     += $(addprefix $(PATH_SDK_QT5_INC)/Qt,$(qt_modules)) $(PATH_SDK_QT5_INC) $(PATH_QT5_X11_EXTRAS_INC)/QtX11Extras $(PATH_QT5_TOOLS_INC))
  endif
  $(eval $(target)_DEFS      += $(foreach module,$(toupper $(qt_modules)), QT_$(module)_LIB) )
 
diff --git a/pkgs/applications/window-managers/phosh/default.nix b/pkgs/applications/window-managers/phosh/default.nix
index 5b4480499d95d..adb22269f04c3 100644
--- a/pkgs/applications/window-managers/phosh/default.nix
+++ b/pkgs/applications/window-managers/phosh/default.nix
@@ -141,7 +141,7 @@ stdenv.mkDerivation rec {
     homepage = "https://gitlab.gnome.org/World/Phosh/phosh";
     changelog = "https://gitlab.gnome.org/World/Phosh/phosh/-/blob/v${version}/debian/changelog";
     license = licenses.gpl3Plus;
-    maintainers = with maintainers; [ masipcat zhaofengli ];
+    maintainers = with maintainers; [ masipcat tomfitzhenry zhaofengli ];
     platforms = platforms.linux;
   };
 }
diff --git a/pkgs/data/fonts/nerdfonts/default.nix b/pkgs/data/fonts/nerdfonts/default.nix
index 7c4db29467fbb..dbbb26971baa6 100644
--- a/pkgs/data/fonts/nerdfonts/default.nix
+++ b/pkgs/data/fonts/nerdfonts/default.nix
@@ -59,6 +59,7 @@ stdenv.mkDerivation rec {
       rm -rfv $out/share/fonts/truetype/NerdFonts/*Windows\ Compatible.*
     ''}
   '';
+  passthru.updateScript = ./update.sh;
 
   meta = with lib; {
     description = "Iconic font aggregator, collection, & patcher. 3,600+ icons, 50+ patched fonts";
diff --git a/pkgs/data/fonts/nerdfonts/update.sh b/pkgs/data/fonts/nerdfonts/update.sh
index b4c4aaa7cf2b7..53d240feddd8e 100755
--- a/pkgs/data/fonts/nerdfonts/update.sh
+++ b/pkgs/data/fonts/nerdfonts/update.sh
@@ -5,9 +5,14 @@ latest_release=$(curl --silent https://api.github.com/repos/ryanoasis/nerd-fonts
 version=$(jq -r '.tag_name' <<<"$latest_release")
 
 dirname="$(dirname "$0")"
-echo \""${version#v}"\" >"$dirname/version.nix"
-
-echo Using version "$version"
+echo \""${version#v}"\" >"$dirname/version-new.nix"
+if diff -q "$dirname/version-new.nix" "$dirname/version.nix"; then
+    echo No new version available, current: $version
+    exit 0
+else
+    echo Updated to version "$version"
+    mv "$dirname/version-new.nix" "$dirname/version.nix"
+fi
 
 printf '{\n' > "$dirname/shas.nix"
 
diff --git a/pkgs/data/misc/v2ray-geoip/default.nix b/pkgs/data/misc/v2ray-geoip/default.nix
index a6b3aa6ec302f..ab972e0ce9949 100644
--- a/pkgs/data/misc/v2ray-geoip/default.nix
+++ b/pkgs/data/misc/v2ray-geoip/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "v2ray-geoip";
-  version = "202302230047";
+  version = "202303020053";
 
   src = fetchFromGitHub {
     owner = "v2fly";
     repo = "geoip";
-    rev = "8ae031e49fecaa0ef8d3e2501741cf2cb12e3eac";
-    sha256 = "sha256-5p3u9/v32bMEhAXgu/v5ooiis0Nt4peVPeCA9o0AKv8=";
+    rev = "c002daa9332a673ce2fabe61eb9c45dc6a54f3fa";
+    sha256 = "sha256-Pfny59HitidyXgXaI/1V3UNdM18X0+vVCyKP4qkL1rY=";
   };
 
   installPhase = ''
diff --git a/pkgs/desktops/pantheon/desktop/wingpanel-indicators/network/default.nix b/pkgs/desktops/pantheon/desktop/wingpanel-indicators/network/default.nix
index 58844ccad7832..d5bb01cecb9b8 100644
--- a/pkgs/desktops/pantheon/desktop/wingpanel-indicators/network/default.nix
+++ b/pkgs/desktops/pantheon/desktop/wingpanel-indicators/network/default.nix
@@ -1,6 +1,7 @@
 { lib
 , stdenv
 , fetchFromGitHub
+, fetchpatch
 , nix-update-script
 , substituteAll
 , pkg-config
@@ -17,15 +18,24 @@
 
 stdenv.mkDerivation rec {
   pname = "wingpanel-indicator-network";
-  version = "2.3.4";
+  version = "7.0.1";
 
   src = fetchFromGitHub {
     owner = "elementary";
     repo = pname;
     rev = version;
-    sha256 = "sha256-j6USKTpiktDKH7jBV1FFg8f3HeNwEkMp3HxG/MeL41g=";
+    sha256 = "sha256-pz2sWN33d20/fMByR+XrNz2lxPdgCA6vxism3E/Fh/I=";
   };
 
+  patches = [
+    # PopoverWidget: fix flowbox child focus
+    # https://github.com/elementary/wingpanel-indicator-network/pull/288
+    (fetchpatch {
+      url = "https://github.com/elementary/wingpanel-indicator-network/commit/88db9004249334e1316321e0373a3065900fe6f1.patch";
+      sha256 = "sha256-rpAULo4qVPO3yr7cBVeKyT7L43zHVEdYLJD4x0ukBs4=";
+    })
+  ];
+
   nativeBuildInputs = [
     meson
     ninja
diff --git a/pkgs/desktops/pantheon/services/xdg-desktop-portal-pantheon/default.nix b/pkgs/desktops/pantheon/services/xdg-desktop-portal-pantheon/default.nix
index a7e0fae873f7a..8be2adad40f4f 100644
--- a/pkgs/desktops/pantheon/services/xdg-desktop-portal-pantheon/default.nix
+++ b/pkgs/desktops/pantheon/services/xdg-desktop-portal-pantheon/default.nix
@@ -6,25 +6,23 @@
 , ninja
 , pkg-config
 , vala
-, wrapGAppsHook
+, wrapGAppsHook4
 , glib
-, granite
-, gtk3
-, libhandy
+, granite7
+, gtk4
 , systemd
-, vte
 , xorg
 }:
 
 stdenv.mkDerivation rec {
   pname = "xdg-desktop-portal-pantheon";
-  version = "1.2.0";
+  version = "7.0.0";
 
   src = fetchFromGitHub {
     owner = "elementary";
     repo = "portals";
     rev = version;
-    sha256 = "sha256-DPCBC3/MJxy9d77ZYzK68FwN8kbyo7guYrkZC+onRBw=";
+    sha256 = "sha256-Rfo9Z5rCJgk36Db3ce8dYBJswy8owjvRMrJVB/RfwyI=";
   };
 
   nativeBuildInputs = [
@@ -32,16 +30,14 @@ stdenv.mkDerivation rec {
     ninja
     pkg-config
     vala
-    wrapGAppsHook
+    wrapGAppsHook4
   ];
 
   buildInputs = [
     glib
-    granite
-    gtk3
-    libhandy
+    granite7
+    gtk4
     systemd
-    vte
     xorg.libX11
   ];
 
diff --git a/pkgs/development/interpreters/erlang/R24.nix b/pkgs/development/interpreters/erlang/R24.nix
index 58df443f96ac3..747c2a90cb7f2 100644
--- a/pkgs/development/interpreters/erlang/R24.nix
+++ b/pkgs/development/interpreters/erlang/R24.nix
@@ -1,6 +1,6 @@
 { mkDerivation }:
 
 mkDerivation {
-  version = "24.3.4.8";
-  sha256 = "sha256-NwnGqFEhVi96fEPLKoAqJgvWNZEsRGtE/3HP0eRthuA=";
+  version = "24.3.4.9";
+  sha256 = "sha256-toM2AoPAle+eNKg0to3r/EYT2taJ9OwKvde4Jr++ZE0=";
 }
diff --git a/pkgs/development/interpreters/python/cpython/default.nix b/pkgs/development/interpreters/python/cpython/default.nix
index 11b6fcbe8cf17..0afdba9983e02 100644
--- a/pkgs/development/interpreters/python/cpython/default.nix
+++ b/pkgs/development/interpreters/python/cpython/default.nix
@@ -17,6 +17,7 @@
 , libxcrypt
 , self
 , configd
+, darwin
 , autoreconfHook
 , autoconf-archive
 , pkg-config
@@ -41,6 +42,7 @@
 , stripBytecode ? true
 , includeSiteCustomize ? true
 , static ? stdenv.hostPlatform.isStatic
+, enableFramework ? false
 , enableOptimizations ? false
 # enableNoSemanticInterposition is a subset of the enableOptimizations flag that doesn't harm reproducibility.
 # clang starts supporting `-fno-sematic-interposition` with version 10
@@ -65,6 +67,8 @@ assert x11Support -> tcl != null
 
 assert bluezSupport -> bluez != null;
 
+assert enableFramework -> stdenv.isDarwin;
+
 assert lib.assertMsg (reproducibleBuild -> stripBytecode)
   "Deterministic builds require stripping bytecode.";
 
@@ -84,6 +88,8 @@ let
   buildPackages = pkgsBuildHost;
   inherit (passthru) pythonForBuild;
 
+  inherit (darwin.apple_sdk.frameworks) Cocoa;
+
   tzdataSupport = tzdata != null && passthru.pythonAtLeast "3.9";
 
   passthru = let
@@ -125,6 +131,8 @@ let
     ++ optionals x11Support [ tcl tk libX11 xorgproto ]
     ++ optionals (bluezSupport && stdenv.isLinux) [ bluez ]
     ++ optionals stdenv.isDarwin [ configd ])
+
+    ++ optionals enableFramework [ Cocoa ]
     ++ optionals tzdataSupport [ tzdata ];  # `zoneinfo` module
 
   hasDistutilsCxxPatch = !(stdenv.cc.isGNU or false);
@@ -308,8 +316,10 @@ in with passthru; stdenv.mkDerivation {
     "--without-ensurepip"
     "--with-system-expat"
     "--with-system-ffi"
-  ] ++ optionals (!static) [
+  ] ++ optionals (!static && !enableFramework) [
     "--enable-shared"
+  ] ++ optionals enableFramework [
+    "--enable-framework=${placeholder "out"}/Library/Frameworks"
   ] ++ optionals enableOptimizations [
     "--enable-optimizations"
   ] ++ optionals enableLTO [
@@ -390,7 +400,11 @@ in with passthru; stdenv.mkDerivation {
     ] ++ optionals tzdataSupport [
       tzdata
     ]);
-  in ''
+  in lib.optionalString enableFramework ''
+    for dir in include lib share; do
+      ln -s $out/Library/Frameworks/Python.framework/Versions/Current/$dir $out/$dir
+    done
+  '' + ''
     # needed for some packages, especially packages that backport functionality
     # to 2.x from 3.x
     for item in $out/lib/${libPrefix}/test/*; do
@@ -487,7 +501,7 @@ in with passthru; stdenv.mkDerivation {
   # Enforce that we don't have references to the OpenSSL -dev package, which we
   # explicitly specify in our configure flags above.
   disallowedReferences =
-    lib.optionals (openssl' != null && !static) [ openssl'.dev ]
+    lib.optionals (openssl' != null && !static && !enableFramework) [ openssl'.dev ]
     ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
     # Ensure we don't have references to build-time packages.
     # These typically end up in shebangs.
@@ -521,7 +535,7 @@ in with passthru; stdenv.mkDerivation {
       high level dynamic data types.
     '';
     license = licenses.psfl;
-    platforms = with platforms; linux ++ darwin;
+    platforms = platforms.linux ++ platforms.darwin;
     maintainers = with maintainers; [ fridh ];
   };
 }
diff --git a/pkgs/development/libraries/fizz/default.nix b/pkgs/development/libraries/fizz/default.nix
index 182f4fba56e32..6ab107497fda2 100644
--- a/pkgs/development/libraries/fizz/default.nix
+++ b/pkgs/development/libraries/fizz/default.nix
@@ -19,13 +19,13 @@
 
 stdenv.mkDerivation rec {
   pname = "fizz";
-  version = "2023.02.20.00";
+  version = "2023.02.27.00";
 
   src = fetchFromGitHub {
     owner = "facebookincubator";
     repo = "fizz";
     rev = "v${version}";
-    hash = "sha256-qgp0E/xCbvMIndwUkqsvZuFY7333NviOkljqiMOhKtw=";
+    hash = "sha256-zb3O5YHQc+1cPcL0K3FwhMfr+/KFQU7SDVT1bEITF6E=";
   };
 
   nativeBuildInputs = [ cmake ];
diff --git a/pkgs/development/libraries/hipblas/default.nix b/pkgs/development/libraries/hipblas/default.nix
index e72c2eac1f861..e8402c0d05bd4 100644
--- a/pkgs/development/libraries/hipblas/default.nix
+++ b/pkgs/development/libraries/hipblas/default.nix
@@ -18,7 +18,7 @@
 # Can also use cuBLAS
 stdenv.mkDerivation (finalAttrs: {
   pname = "hipblas";
-  version = "5.4.2";
+  version = "5.4.3";
 
   outputs = [
     "out"
diff --git a/pkgs/development/libraries/libdatovka/default.nix b/pkgs/development/libraries/libdatovka/default.nix
index aca25f73eddb2..8cddc9a9f37a3 100644
--- a/pkgs/development/libraries/libdatovka/default.nix
+++ b/pkgs/development/libraries/libdatovka/default.nix
@@ -15,11 +15,11 @@
 
 stdenv.mkDerivation rec {
   pname = "libdatovka";
-  version = "0.2.1";
+  version = "0.3.0";
 
   src = fetchurl {
     url = "https://gitlab.nic.cz/datovka/libdatovka/-/archive/v${version}/libdatovka-v${version}.tar.gz";
-    sha256 = "sha256-687d8ZD9zfMeo62YWCW5Kc0CXkKClxtbbwXR51pPwBE=";
+    sha256 = "sha256-aG7U8jP3pvOeFDetYVOx+cE78ys0uSkKNjSgB09ste8=";
   };
 
   patches = [
diff --git a/pkgs/development/libraries/libsodium/default.nix b/pkgs/development/libraries/libsodium/default.nix
index bc8a2ced7cd5a..14e730d69e52f 100644
--- a/pkgs/development/libraries/libsodium/default.nix
+++ b/pkgs/development/libraries/libsodium/default.nix
@@ -1,11 +1,13 @@
-{ lib, stdenv, fetchurl, autoreconfHook }:
+{ lib, stdenv, fetchurl, autoreconfHook
+, testers
+}:
 
-stdenv.mkDerivation rec {
+stdenv.mkDerivation (finalAttrs: {
   pname = "libsodium";
   version = "1.0.18";
 
   src = fetchurl {
-    url = "https://download.libsodium.org/libsodium/releases/${pname}-${version}.tar.gz";
+    url = "https://download.libsodium.org/libsodium/releases/${finalAttrs.pname}-${finalAttrs.version}.tar.gz";
     sha256 = "1h9ncvj23qbbni958knzsli8dvybcswcjbx0qjjgi922nf848l3g";
   };
 
@@ -26,11 +28,14 @@ stdenv.mkDerivation rec {
 
   doCheck = true;
 
+  passthru.tests.pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;
+
   meta = with lib; {
     description = "A modern and easy-to-use crypto library";
     homepage = "http://doc.libsodium.org/";
     license = licenses.isc;
     maintainers = with maintainers; [ raskin ];
+    pkgConfigModules = [ "libsodium" ];
     platforms = platforms.all;
   };
-}
+})
diff --git a/pkgs/development/libraries/mpdecimal/default.nix b/pkgs/development/libraries/mpdecimal/default.nix
index 5fa03b7083e2b..3a2e4b1fe7d09 100644
--- a/pkgs/development/libraries/mpdecimal/default.nix
+++ b/pkgs/development/libraries/mpdecimal/default.nix
@@ -3,7 +3,7 @@
 stdenv.mkDerivation rec {
   pname = "mpdecimal";
   version = "2.5.1";
-  outputs = [ "out" "doc" ];
+  outputs = [ "out" "cxx" "doc" "dev" ];
 
   src = fetchurl {
     url = "https://www.bytereef.org/software/mpdecimal/releases/mpdecimal-${version}.tar.gz";
@@ -12,6 +12,14 @@ stdenv.mkDerivation rec {
 
   configureFlags = [ "LD=${stdenv.cc.targetPrefix}cc" ];
 
+  postInstall = ''
+    mkdir -p $cxx/lib
+    mv $out/lib/*c++* $cxx/lib
+
+    mkdir -p $dev/nix-support
+    echo -n $cxx >> $dev/nix-support/propagated-build-inputs
+  '';
+
   meta = {
     description = "Library for arbitrary precision decimal floating point arithmetic";
 
diff --git a/pkgs/development/libraries/qrupdate/default.nix b/pkgs/development/libraries/qrupdate/default.nix
index d3d821ba1db18..bc4263928078b 100644
--- a/pkgs/development/libraries/qrupdate/default.nix
+++ b/pkgs/development/libraries/qrupdate/default.nix
@@ -1,54 +1,50 @@
 { stdenv
 , lib
-, fetchurl
+, fetchFromGitHub
 , gfortran
 , blas
+, cmake
 , lapack
 , which
 }:
 
 stdenv.mkDerivation rec {
   pname = "qrupdate";
-  version = "1.1.2";
-  src = fetchurl {
-    url = "mirror://sourceforge/qrupdate/${pname}-${version}.tar.gz";
-    sha256 = "024f601685phcm1pg8lhif3lpy5j9j0k6n0r46743g4fvh8wg8g2";
+  version = "1.1.5";
+
+  src = fetchFromGitHub {
+    owner = "mpimd-csc";
+    repo = "qrupdate-ng";
+    rev = "v${version}";
+    hash = "sha256-dHxLPrN00wwozagY2JyfZkD3sKUD2+BcnbjNgZepzFg=";
   };
 
-  preBuild =
-    # Check that blas and lapack are compatible
-    assert (blas.isILP64 == lapack.isILP64);
-  # We don't have structuredAttrs yet implemented, and we need to use space
-  # seprated values in makeFlags, so only this works.
-  ''
-    makeFlagsArray+=(
-      "LAPACK=-L${lapack}/lib -llapack"
-      "BLAS=-L${blas}/lib -lblas"
-      "PREFIX=${placeholder "out"}"
-      "FFLAGS=${toString ([
-        "-std=legacy"
-      ] ++ lib.optionals blas.isILP64 [
-        # If another application intends to use qrupdate compiled with blas with
-        # 64 bit support, it should add this to it's FFLAGS as well. See (e.g):
-        # https://savannah.gnu.org/bugs/?50339
-        "-fdefault-integer-8"
-      ])}"
-    )
-  '';
+  cmakeFlags = assert (blas.isILP64 == lapack.isILP64); [
+    "-DCMAKE_Fortran_FLAGS=${toString ([
+      "-std=legacy"
+    ] ++ lib.optionals blas.isILP64 [
+      # If another application intends to use qrupdate compiled with blas with
+      # 64 bit support, it should add this to it's FFLAGS as well. See (e.g):
+      # https://savannah.gnu.org/bugs/?50339
+      "-fdefault-integer-8"
+    ])}"
+  ];
 
   doCheck = true;
 
-  checkTarget = "test";
-
-  buildFlags = [ "lib" "solib" ];
-
-  installTargets = lib.optionals stdenv.isDarwin [ "install-staticlib" "install-shlib" ];
-
-  nativeBuildInputs = [ which gfortran ];
+  nativeBuildInputs = [
+    cmake
+    which
+    gfortran
+  ];
+  buildInputs = [
+    blas
+    lapack
+  ];
 
   meta = with lib; {
     description = "Library for fast updating of qr and cholesky decompositions";
-    homepage = "https://sourceforge.net/projects/qrupdate/";
+    homepage = "https://github.com/mpimd-csc/qrupdate-ng";
     license = licenses.gpl3Plus;
     maintainers = with maintainers; [ doronbehar ];
     platforms = platforms.unix;
diff --git a/pkgs/development/libraries/rccl/default.nix b/pkgs/development/libraries/rccl/default.nix
index d941cb19998e1..b3aaaff82f080 100644
--- a/pkgs/development/libraries/rccl/default.nix
+++ b/pkgs/development/libraries/rccl/default.nix
@@ -13,7 +13,7 @@
 
 stdenv.mkDerivation (finalAttrs: {
   pname = "rccl";
-  version = "5.4.2";
+  version = "5.4.3";
 
   outputs = [
     "out"
diff --git a/pkgs/development/libraries/rocksdb/default.nix b/pkgs/development/libraries/rocksdb/default.nix
index 4b7bd0b9fd945..ffad392c6216a 100644
--- a/pkgs/development/libraries/rocksdb/default.nix
+++ b/pkgs/development/libraries/rocksdb/default.nix
@@ -19,13 +19,13 @@
 
 stdenv.mkDerivation rec {
   pname = "rocksdb";
-  version = "7.9.2";
+  version = "7.10.2";
 
   src = fetchFromGitHub {
     owner = "facebook";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-5P7IqJ14EZzDkbjaBvbix04ceGGdlWBuVFH/5dpD5VM=";
+    sha256 = "sha256-U2ReSrJwjAXUdRmwixC0DQXht/h/6rV8SOf5e2NozIs=";
   };
 
   nativeBuildInputs = [ cmake ninja ];
diff --git a/pkgs/development/libraries/rocprofiler/default.nix b/pkgs/development/libraries/rocprofiler/default.nix
index 7dbde6e1633eb..e7e0c9fed6502 100644
--- a/pkgs/development/libraries/rocprofiler/default.nix
+++ b/pkgs/development/libraries/rocprofiler/default.nix
@@ -11,7 +11,7 @@
 
 stdenv.mkDerivation (finalAttrs: {
   pname = "rocprofiler";
-  version = "5.4.2";
+  version = "5.4.3";
 
   src = fetchFromGitHub {
     owner = "ROCm-Developer-Tools";
diff --git a/pkgs/development/libraries/wxsqlite3/default.nix b/pkgs/development/libraries/wxsqlite3/default.nix
index b610e903ba303..f09b5277d6a1f 100644
--- a/pkgs/development/libraries/wxsqlite3/default.nix
+++ b/pkgs/development/libraries/wxsqlite3/default.nix
@@ -12,13 +12,13 @@
 
 stdenv.mkDerivation rec {
   pname = "wxsqlite3";
-  version = "4.9.1";
+  version = "4.9.2";
 
   src = fetchFromGitHub {
     owner = "utelle";
     repo = "wxsqlite3";
     rev = "v${version}";
-    hash = "sha256-n7m94QdQf0s5I9z8ScpCu+r2h7XOKO2F1OX44IjdBn4=";
+    hash = "sha256-LYQky0tIzZrxroa4korlE+RK2MJTVLgw3T2kGZOyY2s=";
   };
 
   nativeBuildInputs = [ autoreconfHook ];
diff --git a/pkgs/development/python-modules/adb-enhanced/default.nix b/pkgs/development/python-modules/adb-enhanced/default.nix
index 79fc34cc0ecb4..7f6c2d1098019 100644
--- a/pkgs/development/python-modules/adb-enhanced/default.nix
+++ b/pkgs/development/python-modules/adb-enhanced/default.nix
@@ -9,15 +9,16 @@
 
 buildPythonPackage rec {
   pname = "adb-enhanced";
-  version = "2.5.14";
+  version = "2.5.16";
+  format = "setuptools";
 
-  disabled = pythonOlder "3.4";
+  disabled = pythonOlder "3.7";
 
   src = fetchFromGitHub {
     owner = "ashishb";
     repo = pname;
-    rev = version;
-    sha256 = "sha256-GaPOYBQEGI40MutjjY8exABqGge2p/buk9v+NcZ5oJs=";
+    rev = "refs/tags/${version}";
+    hash = "sha256-+CMXKg3LLxEXGcFQ9zSqy/1HPZS9MsQ1fZxClJ0Vrnw=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/angrop/default.nix b/pkgs/development/python-modules/angrop/default.nix
index eb8c6706b5d55..69b06e58784d4 100644
--- a/pkgs/development/python-modules/angrop/default.nix
+++ b/pkgs/development/python-modules/angrop/default.nix
@@ -2,16 +2,15 @@
 , angr
 , buildPythonPackage
 , fetchFromGitHub
-, fetchpatch
 , progressbar
 , pythonOlder
-, pythonRelaxDepsHook
+, setuptools
 , tqdm
 }:
 
 buildPythonPackage rec {
   pname = "angrop";
-  version = "9.2.7";
+  version = "9.2.8";
   format = "pyproject";
 
   disabled = pythonOlder "3.6";
@@ -19,20 +18,12 @@ buildPythonPackage rec {
   src = fetchFromGitHub {
     owner = "angr";
     repo = pname;
-    rev = "v${version}";
-    hash = "sha256-wIPk7Cz7FSPviPFBSLrBjLr9M0o3pyoJM7wiAhHrg9Q=";
+    rev = "refs/tags/v${version}";
+    hash = "sha256-zmWdGbFzwLDP7MUqEprZcIgA7lAdCrafWYohAehJyh0=";
   };
 
-  patches = [
-    (fetchpatch {
-      name = "compatibility-with-newer-angr.patch";
-      url = "https://github.com/angr/angrop/commit/23194ee4ecdcb7a7390ec04eb133786ec3f807b1.patch";
-      hash = "sha256-n9/oPUblUHSk81qwU129rnNOjsNViaegp6454CaDo+8=";
-    })
-  ];
-
   nativeBuildInputs = [
-    pythonRelaxDepsHook
+    setuptools
   ];
 
   propagatedBuildInputs = [
@@ -41,10 +32,6 @@ buildPythonPackage rec {
     tqdm
   ];
 
-  pythonRelaxDeps = [
-    "angr"
-  ];
-
   # Tests have additional requirements, e.g., angr binaries
   # cle is executing the tests with the angr binaries already and is a requirement of angr
   doCheck = false;
diff --git a/pkgs/development/python-modules/ansible-lint/default.nix b/pkgs/development/python-modules/ansible-lint/default.nix
index 1b61ab6afc4a6..ebb30630eed05 100644
--- a/pkgs/development/python-modules/ansible-lint/default.nix
+++ b/pkgs/development/python-modules/ansible-lint/default.nix
@@ -22,14 +22,14 @@
 
 buildPythonPackage rec {
   pname = "ansible-lint";
-  version = "6.13.1";
+  version = "6.14.0";
   format = "pyproject";
 
   disabled = pythonOlder "3.8";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-Q1wStP2I2oFa9oIfO/iwTrtlGBHaiaEcnRkLr/Ibrao=";
+    hash = "sha256-5VFXrBi7Pj3+9CQ4HAKZxnqsYz3lqhH3+A/gpQgEHXo=";
   };
 
   postPatch = ''
diff --git a/pkgs/development/python-modules/beancount-black/default.nix b/pkgs/development/python-modules/beancount-black/default.nix
index eb082f5b9537d..103574a243541 100644
--- a/pkgs/development/python-modules/beancount-black/default.nix
+++ b/pkgs/development/python-modules/beancount-black/default.nix
@@ -10,7 +10,7 @@
 
 buildPythonPackage rec {
   pname = "beancount-black";
-  version = "0.1.13";
+  version = "0.1.14";
 
   disabled = pythonOlder "3.9";
   format = "pyproject";
@@ -19,7 +19,7 @@ buildPythonPackage rec {
     owner = "LaunchPlatform";
     repo = "beancount-black";
     rev = version;
-    sha256 = "sha256-jhcPR+5+e8d9cbcXC//xuBwmZ14xtXNlYtmH5yNSU0E=";
+    hash = "sha256-4ooMskwPJJLJBfPikaHJ4xuwR1x478ecYWZdIE0UAK8=";
   };
 
   buildInputs = [
diff --git a/pkgs/development/python-modules/ciscoconfparse/default.nix b/pkgs/development/python-modules/ciscoconfparse/default.nix
index 52cd1a8cf28a7..803cfb7b6fa08 100644
--- a/pkgs/development/python-modules/ciscoconfparse/default.nix
+++ b/pkgs/development/python-modules/ciscoconfparse/default.nix
@@ -13,7 +13,7 @@
 
 buildPythonPackage rec {
   pname = "ciscoconfparse";
-  version = "1.7.15";
+  version = "1.7.18";
   format = "pyproject";
 
   disabled = pythonOlder "3.7";
@@ -22,7 +22,7 @@ buildPythonPackage rec {
     owner = "mpenning";
     repo = pname;
     rev = "refs/tags/${version}";
-    hash = "sha256-oGvwtaIgVvvW8Oq/dZN+Zj/PESpqWALFYPia9yeilco=";
+    hash = "sha256-jWInSqvMuwYJTPqHnrYWhMH/HvaQc2dFRqQu4RGFr28=";
   };
 
   postPatch = ''
diff --git a/pkgs/development/python-modules/crate/default.nix b/pkgs/development/python-modules/crate/default.nix
index 5ba9a0f7695a5..4da552db399b6 100644
--- a/pkgs/development/python-modules/crate/default.nix
+++ b/pkgs/development/python-modules/crate/default.nix
@@ -3,7 +3,7 @@
 , buildPythonPackage
 , urllib3
 , geojson
-, isPy3k
+, pythonOlder
 , sqlalchemy
 , pytestCheckHook
 , pytz
@@ -12,12 +12,14 @@
 
 buildPythonPackage rec {
   pname = "crate";
-  version = "0.29.0";
-  disabled = !isPy3k;
+  version = "0.30.0";
+  format = "setuptools";
+
+  disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "sha256-SywW/b4DnVeSzzRiHbDaKTjcuwDnkwrK6vFfaQVIZhQ=";
+    hash = "sha256-8xraDCFZbpJZsh3sO5VlSHwnEfH4u4AJZkXA+L4TB60=";
   };
 
   propagatedBuildInputs = [
@@ -32,8 +34,15 @@ buildPythonPackage rec {
   ];
 
   disabledTests = [
-    # network access
+    # the following tests require network access
     "test_layer_from_uri"
+    "test_additional_settings"
+    "test_basic"
+    "test_cluster"
+    "test_default_settings"
+    "test_dynamic_http_port"
+    "test_environment_variables"
+    "test_verbosity"
   ];
 
   disabledTestPaths = [
@@ -44,6 +53,7 @@ buildPythonPackage rec {
   meta = with lib; {
     homepage = "https://github.com/crate/crate-python";
     description = "A Python client library for CrateDB";
+    changelog = "https://github.com/crate/crate-python/blob/${version}/CHANGES.txt";
     license = licenses.asl20;
     maintainers = with maintainers; [ doronbehar ];
   };
diff --git a/pkgs/development/python-modules/dask-awkward/default.nix b/pkgs/development/python-modules/dask-awkward/default.nix
new file mode 100644
index 0000000000000..6dd42fcc3c0ad
--- /dev/null
+++ b/pkgs/development/python-modules/dask-awkward/default.nix
@@ -0,0 +1,60 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, pythonOlder
+, awkward
+, dask
+, hatch-vcs
+, hatchling
+, pyarrow
+, pytestCheckHook
+}:
+
+buildPythonPackage rec {
+  pname = "dask-awkward";
+  version = "2023.1.0";
+  format = "pyproject";
+
+  disabled = pythonOlder "3.8";
+
+  src = fetchFromGitHub {
+    owner = "dask-contrib";
+    repo = pname;
+    rev = version;
+    hash = "sha256-q0mBd4yelnNL7rMWfilituo9h/xmLLLndSCBdY2egEQ=";
+  };
+
+  nativeBuildInputs = [
+    hatch-vcs
+    hatchling
+  ];
+
+  propagatedBuildInputs = [
+    awkward
+    dask
+  ];
+
+  SETUPTOOLS_SCM_PRETEND_VERSION = version;
+
+  checkInputs = [
+    pytestCheckHook
+    pyarrow
+  ];
+
+  pythonImportsCheck = [
+    "dask_awkward"
+  ];
+
+  pytestFlagsArray = [
+    # require internet
+    "--deselect=tests/test_parquet.py::test_remote_double"
+    "--deselect=tests/test_parquet.py::test_remote_single"
+  ];
+
+  meta = with lib; {
+    description = "Native Dask collection for awkward arrays, and the library to use it";
+    homepage = "https://github.com/dask-contrib/dask-awkward";
+    license = licenses.bsd3;
+    maintainers = with maintainers; [ veprbl ];
+  };
+}
diff --git a/pkgs/development/python-modules/dtschema/default.nix b/pkgs/development/python-modules/dtschema/default.nix
new file mode 100644
index 0000000000000..f2212d8c9c787
--- /dev/null
+++ b/pkgs/development/python-modules/dtschema/default.nix
@@ -0,0 +1,52 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, jsonschema
+, pythonOlder
+, rfc3987
+, ruamel-yaml
+, setuptools-scm
+}:
+
+buildPythonPackage rec {
+  pname = "dtschema";
+  version = "2022.01";
+  format = "setuptools";
+
+  disabled = pythonOlder "3.7";
+
+  src = fetchFromGitHub {
+    owner = "devicetree-org";
+    repo = "dt-schema";
+    rev = "refs/tags/v${version}";
+    hash = "sha256-wwlXIM/eO3dII/qQpkAGLT3/15rBLi7ZiNtqYFf7Li4=";
+  };
+
+  SETUPTOOLS_SCM_PRETEND_VERSION = version;
+
+  nativeBuildInputs = [
+    setuptools-scm
+  ];
+
+  propagatedBuildInputs = [
+    jsonschema
+    rfc3987
+    ruamel-yaml
+  ];
+
+  # Module has no tests
+  doCheck = false;
+
+  pythonImportsCheck = [
+    "dtschema"
+  ];
+
+  meta = with lib; {
+    description = "Tooling for devicetree validation using YAML and jsonschema";
+    homepage = "https://github.com/devicetree-org/dt-schema/";
+    changelog = "https://github.com/devicetree-org/dt-schema/releases/tag/v${version}";
+    license = with licenses; [ bsd2 /* or */ gpl2Only ];
+    maintainers = with maintainers; [ sorki ];
+  };
+}
+
diff --git a/pkgs/development/python-modules/duckdb-engine/default.nix b/pkgs/development/python-modules/duckdb-engine/default.nix
index ea812cb9d5ea8..9ab39eabf9081 100644
--- a/pkgs/development/python-modules/duckdb-engine/default.nix
+++ b/pkgs/development/python-modules/duckdb-engine/default.nix
@@ -13,7 +13,7 @@
 
 buildPythonPackage rec {
   pname = "duckdb-engine";
-  version = "0.6.8";
+  version = "0.6.9";
   format = "pyproject";
 
   disabled = pythonOlder "3.7";
@@ -22,7 +22,7 @@ buildPythonPackage rec {
     repo = "duckdb_engine";
     owner = "Mause";
     rev = "refs/tags/v${version}";
-    hash = "sha256-Vb2sXZjhBZpZdemtGZ8dajB9Ziu/obLv80R63IH/hJg=";
+    hash = "sha256-F1Y7NXkNnCbCxc43gBN7bt+z0D0EwnzCyBKFzbq9KcA=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/execnb/default.nix b/pkgs/development/python-modules/execnb/default.nix
new file mode 100644
index 0000000000000..22661c9ad07a5
--- /dev/null
+++ b/pkgs/development/python-modules/execnb/default.nix
@@ -0,0 +1,33 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, fastcore
+, traitlets
+, ipython
+, pythonOlder
+}:
+
+buildPythonPackage rec {
+  pname = "execnb";
+  version = "0.1.4";
+  format = "setuptools";
+  disabled = pythonOlder "3.6";
+
+  src = fetchPypi {
+    inherit pname version;
+    sha256 = "sha256-y9gSvzJA8Fsh56HbA8SszlozsBBfTLfgWGDXm9uSBvA=";
+  };
+
+  propagatedBuildInputs = [ fastcore traitlets ipython ];
+
+  # no real tests
+  doCheck = false;
+  pythonImportsCheck = [ "execnb" ];
+
+  meta = with lib; {
+    homepage = "https://github.com/fastai/execnb";
+    description = "Execute a jupyter notebook, fast, without needing jupyter";
+    license = licenses.asl20;
+    maintainers = with maintainers; [ rxiao ];
+  };
+}
diff --git a/pkgs/development/python-modules/fakeredis/default.nix b/pkgs/development/python-modules/fakeredis/default.nix
index d600e50d395b2..f721742a15a63 100644
--- a/pkgs/development/python-modules/fakeredis/default.nix
+++ b/pkgs/development/python-modules/fakeredis/default.nix
@@ -16,7 +16,7 @@
 
 buildPythonPackage rec {
   pname = "fakeredis";
-  version = "2.9.2";
+  version = "2.10.0";
   format = "pyproject";
 
   disabled = pythonOlder "3.7";
@@ -25,7 +25,7 @@ buildPythonPackage rec {
     owner = "dsoftwareinc";
     repo = "fakeredis-py";
     rev = "refs/tags/v${version}";
-    hash = "sha256-YwUNjEM0Lmj14fTqQXy78LRzlfffy7KZOTulufkeRZA=";
+    hash = "sha256-H1SeNlX/NqdewNY+rs5HLTK0dRnB1H+EQfzf2g/y1ek=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/fastai/default.nix b/pkgs/development/python-modules/fastai/default.nix
new file mode 100644
index 0000000000000..dcb0b582ea74a
--- /dev/null
+++ b/pkgs/development/python-modules/fastai/default.nix
@@ -0,0 +1,53 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, fastprogress
+, fastcore
+, fastdownload
+, torch
+, torchvision
+, matplotlib
+, pillow
+, scikit-learn
+, scipy
+, spacy
+, pandas
+, requests
+, pythonOlder
+}:
+
+buildPythonPackage rec {
+  pname = "fastai";
+  version = "2.7.10";
+  format = "setuptools";
+  disabled = pythonOlder "3.6";
+
+  src = fetchPypi {
+    inherit pname version;
+    sha256 = "sha256-zO9qGFrjpjfvybzZ/qjki3X0VNDrrTtt9CbyL64gA50=";
+  };
+
+  propagatedBuildInputs = [
+    fastprogress
+    fastcore
+    fastdownload
+    torchvision
+    matplotlib
+    pillow
+    scikit-learn
+    scipy
+    spacy
+    pandas
+    requests
+  ];
+
+  doCheck = false;
+  pythonImportsCheck = [ "fastai" ];
+
+  meta = with lib; {
+    homepage = "https://github.com/fastai/fastai";
+    description = "The fastai deep learning library";
+    license = licenses.asl20;
+    maintainers = with maintainers; [ rxiao ];
+  };
+}
diff --git a/pkgs/development/python-modules/fastdownload/default.nix b/pkgs/development/python-modules/fastdownload/default.nix
new file mode 100644
index 0000000000000..4a60a70429868
--- /dev/null
+++ b/pkgs/development/python-modules/fastdownload/default.nix
@@ -0,0 +1,32 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, fastprogress
+, fastcore
+, pythonOlder
+}:
+
+buildPythonPackage rec {
+  pname = "fastdownload";
+  version = "0.0.6";
+  format = "setuptools";
+  disabled = pythonOlder "3.6";
+
+  src = fetchPypi {
+    inherit pname version;
+    sha256 = "sha256-1ayb0zx8rFKDgqlq/tVVLqDkh47T5jofHt53r8bWr30=";
+  };
+
+  propagatedBuildInputs = [ fastprogress fastcore ];
+
+  # no real tests
+  doCheck = false;
+  pythonImportsCheck = [ "fastdownload" ];
+
+  meta = with lib; {
+    homepage = "https://github.com/fastai/fastdownload";
+    description = "Easily download, verify, and extract archives";
+    license = licenses.asl20;
+    maintainers = with maintainers; [ rxiao ];
+  };
+}
diff --git a/pkgs/development/python-modules/google-cloud-speech/default.nix b/pkgs/development/python-modules/google-cloud-speech/default.nix
index 2de8604ccfb46..7116db6ca0c14 100644
--- a/pkgs/development/python-modules/google-cloud-speech/default.nix
+++ b/pkgs/development/python-modules/google-cloud-speech/default.nix
@@ -13,14 +13,14 @@
 
 buildPythonPackage rec {
   pname = "google-cloud-speech";
-  version = "2.17.3";
+  version = "2.18.0";
   format = "setuptools";
 
   disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-RgGL9SXlmI4mcuL8phcPOCiGo2xhzRZbnpRw72U1KzI=";
+    hash = "sha256-MCFfkPtTc7TdN+WF4tcnHq+Kun5vQDhdSSsW97/cDzA=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/homematicip/default.nix b/pkgs/development/python-modules/homematicip/default.nix
index c929aa197642a..2f384ab5c078c 100644
--- a/pkgs/development/python-modules/homematicip/default.nix
+++ b/pkgs/development/python-modules/homematicip/default.nix
@@ -17,7 +17,7 @@
 
 buildPythonPackage rec {
   pname = "homematicip";
-  version = "1.0.13";
+  version = "1.0.14";
   format = "setuptools";
 
   disabled = pythonOlder "3.8";
@@ -26,7 +26,7 @@ buildPythonPackage rec {
     owner = "hahn-th";
     repo = "homematicip-rest-api";
     rev = "refs/tags/${version}";
-    hash = "sha256-bNVvQbwtef7Q0OBtR/8vsDDPkgGQgzdBC3QyoxLW3Wo=";
+    hash = "sha256-2tJoIknqcwEvX2mQsrSEEh45pEMpNfeefuXVKSJTwig=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/lsprotocol/default.nix b/pkgs/development/python-modules/lsprotocol/default.nix
index e2dcd7bb9a748..b447ab18cba50 100644
--- a/pkgs/development/python-modules/lsprotocol/default.nix
+++ b/pkgs/development/python-modules/lsprotocol/default.nix
@@ -13,7 +13,7 @@
 
 buildPythonPackage rec {
   pname = "lsprotocol";
-  version = "2022.0.0a9";
+  version = "2022.0.0a10";
   format = "pyproject";
 
   disabled = pythonOlder "3.7";
@@ -22,7 +22,7 @@ buildPythonPackage rec {
     owner = "microsoft";
     repo = pname;
     rev = "refs/tags/${version}";
-    hash = "sha256-6XecPKuBhwtkmZrGozzO+VEryI5wwy9hlvWE1oV6ajk=";
+    hash = "sha256-IAFNEWpBRVAGcJNIV1bog9K2nANRw/qJfCJ9+Wu/yJc=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/mypy-boto3-builder/default.nix b/pkgs/development/python-modules/mypy-boto3-builder/default.nix
index 3c582230ee2e9..a02a6dce390db 100644
--- a/pkgs/development/python-modules/mypy-boto3-builder/default.nix
+++ b/pkgs/development/python-modules/mypy-boto3-builder/default.nix
@@ -17,7 +17,7 @@
 
 buildPythonPackage rec {
   pname = "mypy-boto3-builder";
-  version = "7.12.4";
+  version = "7.12.5";
   format = "pyproject";
 
   disabled = pythonOlder "3.10";
@@ -26,7 +26,7 @@ buildPythonPackage rec {
     owner = "youtype";
     repo = "mypy_boto3_builder";
     rev = "refs/tags/${version}";
-    hash = "sha256-X8ATnycG7MvzDNaMClvhyy4Qy4hvoNhn0sQ+s/JnX64=";
+    hash = "sha256-Ij01EExSc4pU8eC+JPhSB8YKXkspusMRgdPhdgbUEKk=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/nbdev/default.nix b/pkgs/development/python-modules/nbdev/default.nix
new file mode 100644
index 0000000000000..c47508918a8da
--- /dev/null
+++ b/pkgs/development/python-modules/nbdev/default.nix
@@ -0,0 +1,36 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, fastprogress
+, fastcore
+, asttokens
+, astunparse
+, watchdog
+, execnb
+, ghapi
+, pythonOlder
+}:
+
+buildPythonPackage rec {
+  pname = "nbdev";
+  version = "2.3.11";
+  format = "setuptools";
+  disabled = pythonOlder "3.6";
+
+  src = fetchPypi {
+    inherit pname version;
+    sha256 = "sha256-ITMCmuAb1lXONbP5MREpk8vfNSztoTEmT87W1o+fbIU=";
+  };
+
+  propagatedBuildInputs = [ fastprogress fastcore asttokens astunparse watchdog execnb ghapi ];
+  # no real tests
+  doCheck = false;
+  pythonImportsCheck = [ "nbdev" ];
+
+  meta = with lib; {
+    homepage = "https://github.com/fastai/nbdev";
+    description = "Create delightful software with Jupyter Notebooks";
+    license = licenses.asl20;
+    maintainers = with maintainers; [ rxiao ];
+  };
+}
diff --git a/pkgs/development/python-modules/openapi-core/default.nix b/pkgs/development/python-modules/openapi-core/default.nix
index 496dceeaab487..987f5cf6b14d3 100644
--- a/pkgs/development/python-modules/openapi-core/default.nix
+++ b/pkgs/development/python-modules/openapi-core/default.nix
@@ -27,7 +27,7 @@
 
 buildPythonPackage rec {
   pname = "openapi-core";
-  version = "0.16.5";
+  version = "0.16.6";
   format = "pyproject";
 
   disabled = pythonOlder "3.7";
@@ -36,7 +36,7 @@ buildPythonPackage rec {
     owner = "p1c2u";
     repo = "openapi-core";
     rev = "refs/tags/${version}";
-    hash = "sha256-xXSZ9qxjmeIyYIWQubJbJxkXUdOu/WSSBddIWsVaH8k=";
+    hash = "sha256-cpWEZ+gX4deTxMQ5BG+Qh863jcqUkOlNSY3KtOwOcBo=";
   };
 
   postPatch = ''
diff --git a/pkgs/development/python-modules/peaqevcore/default.nix b/pkgs/development/python-modules/peaqevcore/default.nix
index 3333331e85f6c..be36f758f9415 100644
--- a/pkgs/development/python-modules/peaqevcore/default.nix
+++ b/pkgs/development/python-modules/peaqevcore/default.nix
@@ -6,14 +6,14 @@
 
 buildPythonPackage rec {
   pname = "peaqevcore";
-  version = "12.2.1";
+  version = "12.2.6";
   format = "setuptools";
 
   disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-WOuKGVrNZzvY7F0Mvj3MjSdTu47c5Y11ySe1qorzlWE=";
+    hash = "sha256-IAqXp/d0f1khhNpkp4uQmxqJ4Xh8Nl87i+iMa3U9EDM=";
   };
 
   postPatch = ''
diff --git a/pkgs/development/python-modules/pyfibaro/default.nix b/pkgs/development/python-modules/pyfibaro/default.nix
index 759fe47221b7f..2cb78380c8fee 100644
--- a/pkgs/development/python-modules/pyfibaro/default.nix
+++ b/pkgs/development/python-modules/pyfibaro/default.nix
@@ -10,7 +10,7 @@
 
 buildPythonPackage rec {
   pname = "pyfibaro";
-  version = "0.6.8";
+  version = "0.6.9";
   format = "pyproject";
 
   disabled = pythonOlder "3.9";
@@ -19,7 +19,7 @@ buildPythonPackage rec {
     owner = "rappenze";
     repo = pname;
     rev = "refs/tags/${version}";
-    hash = "sha256-2BDVCukm2y4rZyIWozRWJ+pY2bI2A7Vpitjd8jSJoWQ=";
+    hash = "sha256-vyp+O5Oj1/OYALGb+ioXeFdlDveR8j5M9Z40QTC+sj4=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/pyfritzhome/default.nix b/pkgs/development/python-modules/pyfritzhome/default.nix
index b2baa51466718..2cc602050c155 100644
--- a/pkgs/development/python-modules/pyfritzhome/default.nix
+++ b/pkgs/development/python-modules/pyfritzhome/default.nix
@@ -1,24 +1,23 @@
 { lib
 , buildPythonPackage
 , fetchFromGitHub
+, pytestCheckHook
 , pythonOlder
 , requests
-, nose
-, mock
 }:
 
 buildPythonPackage rec {
   pname = "pyfritzhome";
-  version = "0.6.7";
+  version = "0.6.8";
   format = "setuptools";
 
-  disabled = pythonOlder "3.6";
+  disabled = pythonOlder "3.7";
 
   src = fetchFromGitHub {
     owner = "hthiery";
     repo = "python-fritzhome";
-    rev = version;
-    hash = "sha256-cRG+Dm3KG6no3/OQCZkvISW1yE5azdDVTa5oTV1sRpk=";
+    rev = "refs/tags/${version}";
+    hash = "sha256-MIWRBwqVuS1iEuWxsE1yuGS2zHYVgnH2G4JJk7Yct6s=";
   };
 
   propagatedBuildInputs = [
@@ -26,14 +25,9 @@ buildPythonPackage rec {
   ];
 
   nativeCheckInputs = [
-    mock
-    nose
+    pytestCheckHook
   ];
 
-  checkPhase = ''
-    nosetests
-  '';
-
   pythonImportsCheck = [
     "pyfritzhome"
   ];
diff --git a/pkgs/development/python-modules/pykeyatome/default.nix b/pkgs/development/python-modules/pykeyatome/default.nix
index 0f6786c4c4cc6..363b078cad320 100644
--- a/pkgs/development/python-modules/pykeyatome/default.nix
+++ b/pkgs/development/python-modules/pykeyatome/default.nix
@@ -13,7 +13,7 @@
 
 buildPythonPackage rec {
   pname = "pykeyatome";
-  version = "2.1.1";
+  version = "2.1.2";
   format = "setuptools";
 
   disabled = pythonOlder "3.8";
@@ -22,7 +22,7 @@ buildPythonPackage rec {
     owner = "jugla";
     repo = "pyKeyAtome";
     rev = "refs/tags/V${version}";
-    hash = "sha256-/HfWPrpW4NowFmdmU2teIiex1O03bHemnUdhOoEDRgc=";
+    hash = "sha256-zRXUjekawf2/zTSlXqHVB02dDkb6HbU4NN6UBgl2rtg=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/pyoverkiz/default.nix b/pkgs/development/python-modules/pyoverkiz/default.nix
index 963290da2776a..b454381c47062 100644
--- a/pkgs/development/python-modules/pyoverkiz/default.nix
+++ b/pkgs/development/python-modules/pyoverkiz/default.nix
@@ -15,7 +15,7 @@
 
 buildPythonPackage rec {
   pname = "pyoverkiz";
-  version = "1.7.6";
+  version = "1.7.7";
   format = "pyproject";
 
   disabled = pythonOlder "3.7";
@@ -24,7 +24,7 @@ buildPythonPackage rec {
     owner = "iMicknl";
     repo = "python-overkiz-api";
     rev = "refs/tags/v${version}";
-    hash = "sha256-nmXOmoPH8w4Soj8lhI7wl3uYVmKw3xSuIkmCF0XI7RI=";
+    hash = "sha256-QYvnSFt0pJL3clDxN2axJUMU8M/maj3iSeUfVRgQGFg=";
   };
 
   postPatch = ''
diff --git a/pkgs/development/python-modules/pypykatz/default.nix b/pkgs/development/python-modules/pypykatz/default.nix
index 10c25dbe64367..59253178661cd 100644
--- a/pkgs/development/python-modules/pypykatz/default.nix
+++ b/pkgs/development/python-modules/pypykatz/default.nix
@@ -13,14 +13,14 @@
 
 buildPythonPackage rec {
   pname = "pypykatz";
-  version = "0.6.5";
+  version = "0.6.6";
   format = "setuptools";
 
   disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-1cHQ05yKofZC3pz8JIZCjZmZasqSb/SSwJlg8ThIn1k=";
+    hash = "sha256-fPeEKTfRL142RIMSQxpByIAy09sXlmDjIATikc82Iuw=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/pysma/default.nix b/pkgs/development/python-modules/pysma/default.nix
index de1bba50740c4..605e1b5dfe1c1 100644
--- a/pkgs/development/python-modules/pysma/default.nix
+++ b/pkgs/development/python-modules/pysma/default.nix
@@ -9,14 +9,14 @@
 
 buildPythonPackage rec {
   pname = "pysma";
-  version = "0.7.3";
+  version = "0.7.4";
   format = "setuptools";
 
   disabled = pythonOlder "3.8";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "sha256-8LwxRiYUhKYZkrjDNcnOkssvfw/tZ6dj1GKZQ6UkqsQ=";
+    sha256 = "sha256-4u564tLk91duYv1IClHddur6t+Rbla/e9P0yWAxw2sw=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/pysml/default.nix b/pkgs/development/python-modules/pysml/default.nix
index 3ff7bee167cb7..ebada417bca3b 100644
--- a/pkgs/development/python-modules/pysml/default.nix
+++ b/pkgs/development/python-modules/pysml/default.nix
@@ -9,14 +9,14 @@
 
 buildPythonPackage rec {
   pname = "pysml";
-  version = "0.0.8";
+  version = "0.0.9";
   format = "pyproject";
 
   src = fetchFromGitHub {
     owner = "mtdcr";
     repo = pname;
     rev = version;
-    sha256 = "sha256-Qw2irvj94cBquYeVUhqOq8lw85oP5TqtA2XTT2z5/as=";
+    sha256 = "sha256-pUbRttH/ksYcE1qZJAQWhuKk4+40w5xsul0TTqq1g3s=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/pytest-md-report/default.nix b/pkgs/development/python-modules/pytest-md-report/default.nix
new file mode 100644
index 0000000000000..1f9664947d2cc
--- /dev/null
+++ b/pkgs/development/python-modules/pytest-md-report/default.nix
@@ -0,0 +1,43 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, pytablewriter
+, pytest
+, tcolorpy
+, typepy
+, pytestCheckHook
+, pythonOlder
+}:
+
+buildPythonPackage rec {
+  pname = "pytest-md-report";
+  version = "0.3.0";
+  format = "setuptools";
+
+  disabled = pythonOlder "3.6";
+
+  src = fetchPypi {
+    inherit pname version;
+    hash = "sha256-ra88WXG6+xHSjOcy6tdYjvpKpNlvu6lq+sZckLadAlU=";
+  };
+
+  propagatedBuildInputs = [
+    pytablewriter
+    tcolorpy
+    typepy
+  ];
+
+  buildInputs = [ pytest ];
+
+  nativeCheckInputs = [ pytestCheckHook ];
+
+  pythonImportsCheck = [ "pytest_md_report" ];
+
+  meta = with lib; {
+    description = "A pytest plugin to make a test results report with Markdown table format";
+    homepage = "https://github.com/thombashi/pytest-md-report";
+    changelog = "https://github.com/thombashi/pytest-md-report/releases/tag/v${version}";
+    license = licenses.mit;
+    maintainers = with maintainers; [ rrbutani ];
+  };
+}
diff --git a/pkgs/development/python-modules/python-velbus/default.nix b/pkgs/development/python-modules/python-velbus/default.nix
index c83ff853f1d55..c2357f2df32a7 100644
--- a/pkgs/development/python-modules/python-velbus/default.nix
+++ b/pkgs/development/python-modules/python-velbus/default.nix
@@ -1,18 +1,20 @@
 { lib
 , buildPythonPackage
-, fetchFromGitHub
+, fetchPypi
 , pyserial
+, pythonOlder
 }:
 
 buildPythonPackage rec {
   pname = "python-velbus";
-  version = "2.1.4";
+  version = "2.1.9";
+  format = "setuptools";
 
-  src = fetchFromGitHub {
-    owner = "thomasdelaet";
-    repo = pname;
-    rev = version;
-    sha256 = "1z0a7fc9xfrcpwi9xiimxsgbzbp2iwyi1rij6vqd5z47mzi49fv9";
+  disabled = pythonOlder "3.7";
+
+  src = fetchPypi {
+    inherit pname version;
+    hash = "sha256-SbuECT6851E+QNyyPaNTnKmH54fYovemSto8gvfMIKg=";
   };
 
   propagatedBuildInputs = [
@@ -22,7 +24,9 @@ buildPythonPackage rec {
   # Project has not tests
   doCheck = false;
 
-  pythonImportsCheck = [ "velbus" ];
+  pythonImportsCheck = [
+    "velbus"
+  ];
 
   meta = with lib; {
     description = "Python library to control the Velbus home automation system";
diff --git a/pkgs/development/python-modules/pyvizio/default.nix b/pkgs/development/python-modules/pyvizio/default.nix
index 807278d967d59..4a11c8f30ba14 100644
--- a/pkgs/development/python-modules/pyvizio/default.nix
+++ b/pkgs/development/python-modules/pyvizio/default.nix
@@ -12,11 +12,11 @@
 
 buildPythonPackage rec {
   pname = "pyvizio";
-  version = "0.1.59";
+  version = "0.1.60";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "1j2zbziklx4az55m3997y7yp4xflk7i0gsbdfh7fp9k0qngb2053";
+    sha256 = "sha256-RwwZDb4mQJZw8w1sTFJ0eM3az4pDQbVLGtA+anyKEJM=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/reolink-aio/default.nix b/pkgs/development/python-modules/reolink-aio/default.nix
index 7d4ec741431be..f7d033b7ce8e8 100644
--- a/pkgs/development/python-modules/reolink-aio/default.nix
+++ b/pkgs/development/python-modules/reolink-aio/default.nix
@@ -11,7 +11,7 @@
 
 buildPythonPackage rec {
   pname = "reolink-aio";
-  version = "0.5.2";
+  version = "0.5.3";
   format = "setuptools";
 
   disabled = pythonOlder "3.9";
@@ -20,7 +20,7 @@ buildPythonPackage rec {
     owner = "starkillerOG";
     repo = "reolink_aio";
     rev = "refs/tags/${version}";
-    hash = "sha256-GLBekTsJqwzwtSYiYaJSeTt7qNs0gx2nOhlsfECFvKg=";
+    hash = "sha256-pPWnK5Xui339D10vS2QA2Og+Qg3JM8c3CbvRxksl0NY=";
   };
 
   postPatch = ''
diff --git a/pkgs/development/python-modules/snscrape/default.nix b/pkgs/development/python-modules/snscrape/default.nix
index 379159180a060..5d87fb52b2ab9 100644
--- a/pkgs/development/python-modules/snscrape/default.nix
+++ b/pkgs/development/python-modules/snscrape/default.nix
@@ -12,16 +12,16 @@
 
 buildPythonPackage rec {
   pname = "snscrape";
-  version = "0.4.3.20220106";
-  format = "setuptools";
+  version = "0.6.0.20230303";
+  format = "pyproject";
 
   disabled = pythonOlder "3.8";
 
   src = fetchFromGitHub {
     owner = "JustAnotherArchivist";
     repo = pname;
-    rev = "v${version}";
-    hash = "sha256-gphNT1IYSiAw22sqHlV8Rm4WRP4EWUvP0UkITuepmMc=";
+    rev = "refs/tags/v${version}";
+    hash = "sha256-FY8byS+0yAhNSRxWsrsQMR5kdZmnHutru5Z6SWVfpiE=";
   };
 
   SETUPTOOLS_SCM_PRETEND_VERSION = version;
diff --git a/pkgs/development/python-modules/tesserocr/default.nix b/pkgs/development/python-modules/tesserocr/default.nix
index 9e3e1560c2083..f852e29c002c2 100644
--- a/pkgs/development/python-modules/tesserocr/default.nix
+++ b/pkgs/development/python-modules/tesserocr/default.nix
@@ -6,7 +6,7 @@
 , cython
 , leptonica
 , pkg-config
-, tesseract
+, tesseract4
 
 # propagates
 , pillow
@@ -36,7 +36,7 @@ buildPythonPackage rec {
 
   buildInputs = [
     leptonica
-    tesseract
+    tesseract4
   ];
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/textnets/default.nix b/pkgs/development/python-modules/textnets/default.nix
new file mode 100644
index 0000000000000..da3da57d35d91
--- /dev/null
+++ b/pkgs/development/python-modules/textnets/default.nix
@@ -0,0 +1,75 @@
+{ lib
+, buildPythonPackage
+, cairocffi
+, cython
+, fetchFromGitHub
+, igraph
+, leidenalg
+, pandas
+, poetry-core
+, pytestCheckHook
+, pythonOlder
+, scipy
+, setuptools
+, spacy
+, en_core_web_sm
+, toolz
+, tqdm
+, wasabi
+}:
+
+buildPythonPackage rec {
+  pname = "textnets";
+  version = "0.8.7";
+  format = "pyproject";
+
+  disabled = pythonOlder "3.8";
+
+  src = fetchFromGitHub {
+    owner = "jboynyc";
+    repo = pname;
+    rev = "refs/tags/v${version}";
+    hash = "sha256-BBndY+3leJBxiImuyRL7gMD5eocE4i96+97I9hDEwec=";
+  };
+
+  nativeBuildInputs = [
+    cython
+    poetry-core
+    setuptools
+  ];
+
+  propagatedBuildInputs = [
+    cairocffi
+    igraph
+    leidenalg
+    pandas
+    scipy
+    spacy
+    toolz
+    tqdm
+    wasabi
+  ];
+
+  # Deselect test of experimental feature that fails due to having an
+  # additional dependency.
+  disabledTests = [
+    "test_context"
+  ];
+
+  nativeCheckInputs = [
+    pytestCheckHook
+    en_core_web_sm
+  ];
+
+  pythonImportsCheck = [
+    "textnets"
+  ];
+
+  meta = with lib; {
+    description = "Text analysis with networks";
+    homepage = "https://textnets.readthedocs.io";
+    changelog = "https://github.com/jboynyc/textnets/blob/v${version}/HISTORY.rst";
+    license = licenses.gpl3Only;
+    maintainers = with maintainers; [ jboy ];
+  };
+}
diff --git a/pkgs/development/python-modules/thinc/default.nix b/pkgs/development/python-modules/thinc/default.nix
index e415bc17a7b3c..8deba357004ee 100644
--- a/pkgs/development/python-modules/thinc/default.nix
+++ b/pkgs/development/python-modules/thinc/default.nix
@@ -29,14 +29,14 @@
 
 buildPythonPackage rec {
   pname = "thinc";
-  version = "8.1.7";
+  version = "8.1.8";
   format = "setuptools";
 
   disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-Dwj20fxQ4ovxiBTKKxyAfNTVmpMNcTRZpnXghsR3mvk=";
+    hash = "sha256-NcZXy+2wT8W8JHhl1mWSHOw9Ve81+/zj7hogSGtyBoM=";
   };
 
   buildInputs = [
diff --git a/pkgs/development/python-modules/ulid-transform/default.nix b/pkgs/development/python-modules/ulid-transform/default.nix
new file mode 100644
index 0000000000000..d53281f5f062b
--- /dev/null
+++ b/pkgs/development/python-modules/ulid-transform/default.nix
@@ -0,0 +1,51 @@
+{ lib
+, cython
+, buildPythonPackage
+, fetchFromGitHub
+, poetry-core
+, pytestCheckHook
+, pythonOlder
+, setuptools
+}:
+
+buildPythonPackage rec {
+  pname = "ulid-transform";
+  version = "0.4.0";
+  format = "pyproject";
+
+  disabled = pythonOlder "3.9";
+
+  src = fetchFromGitHub {
+    owner = "bdraco";
+    repo = pname;
+    rev = "refs/tags/v${version}";
+    hash = "sha256-JuTIE8FAVZkfn+byJ1z9/ep9Oih1uXpz/QTB2OfM0WU=";
+  };
+
+  nativeBuildInputs = [
+    cython
+    poetry-core
+    setuptools
+  ];
+
+  nativeCheckInputs = [
+    pytestCheckHook
+  ];
+
+  postPatch = ''
+    substituteInPlace pyproject.toml \
+      --replace " --cov=ulid_transform --cov-report=term-missing:skip-covered" ""
+  '';
+
+  pythonImportsCheck = [
+    "ulid_transform"
+  ];
+
+  meta = with lib; {
+    description = "Library to create and transform ULIDs";
+    homepage = "https://github.com/bdraco/ulid-transform";
+    changelog = "https://github.com/bdraco/ulid-transform/releases/tag/v${version}";
+    license = with licenses; [ mit ];
+    maintainers = with maintainers; [ fab ];
+  };
+}
diff --git a/pkgs/development/python-modules/youless-api/default.nix b/pkgs/development/python-modules/youless-api/default.nix
index c5f5b7f1f3d2e..3fc465d9bad10 100644
--- a/pkgs/development/python-modules/youless-api/default.nix
+++ b/pkgs/development/python-modules/youless-api/default.nix
@@ -12,7 +12,7 @@
 
 buildPythonPackage rec {
   pname = "youless-api";
-  version = "1.0";
+  version = "1.0.1";
   format = "setuptools";
 
   disabled = pythonOlder "3.7";
@@ -21,7 +21,7 @@ buildPythonPackage rec {
     owner = "jongsoftdev";
     repo = "youless-python-bridge";
     rev = version;
-    hash = "sha256-yh4ZmMn5z6aTZrhj9ZmvpmsDOF4MeDcPtSgr4fimjGM=";
+    hash = "sha256-49/HmkGr87aDhr8GEtARpXvr2RcgmLdAqhvMLI5x+vQ=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix b/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix
index 499a424292308..0cc65b004e80b 100644
--- a/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix
+++ b/pkgs/development/tools/analysis/cargo-tarpaulin/default.nix
@@ -2,13 +2,13 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "cargo-tarpaulin";
-  version = "0.25.0";
+  version = "0.25.1";
 
   src = fetchFromGitHub {
     owner = "xd009642";
     repo = "tarpaulin";
     rev = version;
-    sha256 = "sha256-9duL16AuwG3lBMq1hUAXbNrvoBF6SASCiakmT42LQ/E=";
+    sha256 = "sha256-JTkVNy2wqPIQ5mVcptI10a3Ghhdygnm9dmwUmiDqYjE=";
   };
 
   nativeBuildInputs = [
@@ -17,7 +17,7 @@ rustPlatform.buildRustPackage rec {
   buildInputs = [ openssl ]
     ++ lib.optionals stdenv.isDarwin [ curl Security ];
 
-  cargoHash = "sha256-MXnE3Fq/jzWHvmO2i8cWixRKRuwVbUU/OmBj1SUkEiY=";
+  cargoHash = "sha256-t4L3HSOGk/lAL8mOaVl/pm3kE0CVVzsYpyu0V6zeIFQ=";
   #checkFlags = [ "--test-threads" "1" ];
   doCheck = false;
 
@@ -26,6 +26,5 @@ rustPlatform.buildRustPackage rec {
     homepage = "https://github.com/xd009642/tarpaulin";
     license = with licenses; [ mit /* or */ asl20 ];
     maintainers = with maintainers; [ hugoreeves ];
-    platforms = lib.platforms.x86_64;
   };
 }
diff --git a/pkgs/development/tools/build-managers/corrosion/default.nix b/pkgs/development/tools/build-managers/corrosion/default.nix
index bee465fbfd885..1f8e04c080825 100644
--- a/pkgs/development/tools/build-managers/corrosion/default.nix
+++ b/pkgs/development/tools/build-managers/corrosion/default.nix
@@ -8,13 +8,13 @@
 
 stdenv.mkDerivation rec {
   pname = "corrosion";
-  version = "0.3.3";
+  version = "0.3.4";
 
   src = fetchFromGitHub {
     owner = "corrosion-rs";
     repo = "corrosion";
     rev = "v${version}";
-    hash = "sha256-dXUjQmKk+UdgYqdMuNh9ALaots1t0xwg6hEWwAbGPJc=";
+    hash = "sha256-g2kA1FYt6OWb0zb3pSQ46dJMsSZpT6kLYkpIIN3XZbI=";
   };
 
   cargoRoot = "generator";
@@ -23,7 +23,7 @@ stdenv.mkDerivation rec {
     inherit src;
     sourceRoot = "${src.name}/${cargoRoot}";
     name = "${pname}-${version}";
-    hash = "sha256-f+n/bjjdKar5aURkPNYKkHUll6lqNa/dlzq3dIFh+tc=";
+    hash = "sha256-088qK9meyqV93ezLlBIjdp1l/n+pv+9afaJGYlXEFQc=";
   };
 
   buildInputs = lib.optional stdenv.isDarwin libiconv;
diff --git a/pkgs/development/tools/continuous-integration/cirrus-cli/default.nix b/pkgs/development/tools/continuous-integration/cirrus-cli/default.nix
index 2a7111b352ecc..c1f08ce74d5b9 100644
--- a/pkgs/development/tools/continuous-integration/cirrus-cli/default.nix
+++ b/pkgs/development/tools/continuous-integration/cirrus-cli/default.nix
@@ -6,16 +6,16 @@
 
 buildGoModule rec {
   pname = "cirrus-cli";
-  version = "0.94.4";
+  version = "0.95.0";
 
   src = fetchFromGitHub {
     owner = "cirruslabs";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-PnO9M3CQQnZotzCuE2rlGsoBzNO0PdsoMLlY3tNyEyk=";
+    sha256 = "sha256-s+mScSSVjzCZ+9lwFdcC/F5oCdT51JNxlqP7uKlx+Y8=";
   };
 
-  vendorSha256 = "sha256-gotc9M2UkRJtE4LZPCpqDTXQ/cnN4tk+3HG243tFoss=";
+  vendorHash = "sha256-TZOBIivaoaO7EWBqV2zuL3Em5o4MButq4+TxvePu+qY=";
 
   ldflags = [
     "-X github.com/cirruslabs/cirrus-cli/internal/version.Version=v${version}"
diff --git a/pkgs/development/tools/datree/default.nix b/pkgs/development/tools/datree/default.nix
index bc0eb9332c0a1..83e9d2d9877e1 100644
--- a/pkgs/development/tools/datree/default.nix
+++ b/pkgs/development/tools/datree/default.nix
@@ -8,13 +8,13 @@
 
 buildGoModule rec {
   pname = "datree";
-  version = "1.8.27";
+  version = "1.8.33";
 
   src = fetchFromGitHub {
     owner = "datreeio";
     repo = "datree";
     rev = "refs/tags/${version}";
-    hash = "sha256-52xAm0D8yzUPvox1byHcyUTg8mILakOnVh8q0a2yR1M=";
+    hash = "sha256-pyu8umkR8ncDVkSLJbxqnLFRlZQR29GnkCSxDNHQUGg=";
   };
 
   vendorHash = "sha256-mkVguYzjNGgFUdATjGfenCx3h97LS3SEOkYo3CuP9fA=";
diff --git a/pkgs/development/tools/dt-schema/default.nix b/pkgs/development/tools/dt-schema/default.nix
deleted file mode 100644
index 20e1ed136e2b8..0000000000000
--- a/pkgs/development/tools/dt-schema/default.nix
+++ /dev/null
@@ -1,37 +0,0 @@
-{ lib
-, buildPythonPackage
-, fetchPypi
-, git
-, ruamel-yaml
-, jsonschema
-, rfc3987
-, setuptools
-, setuptools-scm
-}:
-
-buildPythonPackage rec {
-  pname = "dtschema";
-  version = "2022.1";
-
-  src = fetchPypi {
-    inherit pname version;
-    sha256 = "sha256-G5KzuaMbbkuLK+cNvzBld1UwvExS6ZGVW2e+GXQRFMU=";
-  };
-
-  nativeBuildInputs = [ setuptools-scm git ];
-  propagatedBuildInputs = [
-    setuptools
-    ruamel-yaml
-    jsonschema
-    rfc3987
-  ];
-
-  meta = with lib; {
-    description = "Tooling for devicetree validation using YAML and jsonschema";
-    homepage = "https://github.com/devicetree-org/dt-schema/";
-    # all files have SPDX tags
-    license = with licenses; [ bsd2 gpl2 ];
-    maintainers = with maintainers; [ sorki ];
-  };
-}
-
diff --git a/pkgs/development/tools/esbuild/default.nix b/pkgs/development/tools/esbuild/default.nix
index 3845f18c351da..fae82784e45fc 100644
--- a/pkgs/development/tools/esbuild/default.nix
+++ b/pkgs/development/tools/esbuild/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "esbuild";
-  version = "0.17.10";
+  version = "0.17.11";
 
   src = fetchFromGitHub {
     owner = "evanw";
     repo = "esbuild";
     rev = "v${version}";
-    hash = "sha256-qe7YCOIwp+MSa5VkwImdOea1aMcpWdor/13PIgGEkkw=";
+    hash = "sha256-k7bXEDAmxyn2u/cniqKtr9zbrWnzwbhTZkL35/igctM=";
   };
 
   vendorHash = "sha256-+BfxCyg0KkDQpHt/wycy/8CTG6YBA/VJvJFhhzUnSiQ=";
diff --git a/pkgs/development/tools/ginkgo/default.nix b/pkgs/development/tools/ginkgo/default.nix
index 956f5dca4ffd4..9ef3a6e2099c9 100644
--- a/pkgs/development/tools/ginkgo/default.nix
+++ b/pkgs/development/tools/ginkgo/default.nix
@@ -2,15 +2,15 @@
 
 buildGoModule rec {
   pname = "ginkgo";
-  version = "2.8.3";
+  version = "2.8.4";
 
   src = fetchFromGitHub {
     owner = "onsi";
     repo = "ginkgo";
     rev = "v${version}";
-    sha256 = "sha256-V10AFqCCe+SQeMQ+bBYqrKE/wSxyhpMQg+rNOH5P1e4=";
+    sha256 = "sha256-GzsdcL5XwnT+NnEZbr8jJNx0vZEaw0TPTCO4Rf1C/2A=";
   };
-  vendorHash = "sha256-CNpmcQ623ZINYKwV0ZJi7KuEwzyGOM7t9OOCTx7JKDs=";
+  vendorHash = "sha256-Ei9U5S/jBOtQS/Y0GQX0ZknkHQGJFfc5U8eVvofxpOo=";
 
   # integration tests expect more file changes
   # types tests are missing CodeLocation
diff --git a/pkgs/development/tools/icr/default.nix b/pkgs/development/tools/icr/default.nix
index 50a496d93824b..7f6b0440b9495 100644
--- a/pkgs/development/tools/icr/default.nix
+++ b/pkgs/development/tools/icr/default.nix
@@ -13,13 +13,13 @@
 
 crystal.buildCrystalPackage rec {
   pname = "icr";
-  version = "unstable-2021-03-14";
+  version = "0.9.0";
 
   src = fetchFromGitHub {
     owner = "crystal-community";
     repo = "icr";
-    rev = "b6b335f40aff4c2c07d21250949935e8259f7d1b";
-    sha256 = "sha256-Qoy37lCdHFnMAuuqyB9uT15/RLllksFyApYAGy+RmDs=";
+    rev = "v${version}";
+    hash = "sha256-29B/i8oEjwNOYjnc78QcYTl6fC/M9VfAVCCBjLBKp0Q=";
   };
 
   shardsFile = ./shards.nix;
diff --git a/pkgs/development/tools/icr/shards.nix b/pkgs/development/tools/icr/shards.nix
index 1dddd5a42c393..8f880fa9a6f05 100644
--- a/pkgs/development/tools/icr/shards.nix
+++ b/pkgs/development/tools/icr/shards.nix
@@ -1,7 +1,6 @@
 {
   readline = {
-    owner = "crystal-lang";
-    repo = "crystal-readline";
+    url = "https://github.com/crystal-lang/crystal-readline.git";
     rev = "0fb7d186da8e1b157998d98d1c96e99699b791eb";
     sha256 = "1rk27vw3ssldgnfgprwvz2gag02v4g6d6yg56b3sk9w3fn8jyyi8";
   };
diff --git a/pkgs/development/tools/language-servers/millet/default.nix b/pkgs/development/tools/language-servers/millet/default.nix
index e39abae89a2cc..8c01c6ee20d38 100644
--- a/pkgs/development/tools/language-servers/millet/default.nix
+++ b/pkgs/development/tools/language-servers/millet/default.nix
@@ -2,16 +2,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "millet";
-  version = "0.7.9";
+  version = "0.8.1";
 
   src = fetchFromGitHub {
     owner = "azdavis";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-c4hNrswfYz/Wr59sWQJu8yuOqk594iVm+NzxYpG96Ys=";
+    hash = "sha256-yIOb6AeEpIbKarY4I0X4zq5Gtrv05QLrDlFaBD3x6rw=";
   };
 
-  cargoHash = "sha256-Ja5Vjt3z0pkxEMtkyWYY+lZH0AnzVzyGxlQtlmwWbS4=";
+  cargoHash = "sha256-DIRs+xhcdV74NFjsB1jJYgd8Cu/BmAUcBf58rGAp/yo=";
 
   postPatch = ''
     rm .cargo/config.toml
diff --git a/pkgs/development/tools/oh-my-posh/default.nix b/pkgs/development/tools/oh-my-posh/default.nix
index d83d5ef6f11e1..0fc9506594622 100644
--- a/pkgs/development/tools/oh-my-posh/default.nix
+++ b/pkgs/development/tools/oh-my-posh/default.nix
@@ -6,13 +6,13 @@
 
 buildGoModule rec {
   pname = "oh-my-posh";
-  version = "14.9.1";
+  version = "14.9.2";
 
   src = fetchFromGitHub {
     owner = "jandedobbeleer";
     repo = pname;
     rev = "refs/tags/v${version}";
-    hash = "sha256-oWif17MawHKiFRts9wfLA7XcSLMuogaPLziYzgKihas=";
+    hash = "sha256-9ZIMAJVVrJk8ny3TgwXHSxrg713dSbPlgQnY/b0m2Ps=";
   };
 
   vendorHash = "sha256-JZ5UiL2vGsXy/xmz+NcAKYDmp5hq7bx54/OdUyQHUp0=";
diff --git a/pkgs/development/tools/pscale/default.nix b/pkgs/development/tools/pscale/default.nix
index 42eec2fa4c93e..e6798260f94b1 100644
--- a/pkgs/development/tools/pscale/default.nix
+++ b/pkgs/development/tools/pscale/default.nix
@@ -8,16 +8,16 @@
 
 buildGoModule rec {
   pname = "pscale";
-  version = "0.129.0";
+  version = "0.130.0";
 
   src = fetchFromGitHub {
     owner = "planetscale";
     repo = "cli";
     rev = "v${version}";
-    sha256 = "sha256-GdJyOZ0FqLBRGeQZ5+qLYXLL8JxqigYCVuDUUs1KbN4=";
+    sha256 = "sha256-rrrjIyIMoCshq348bUBGzMcwYhEZMt1OA/pOoE4PEY8=";
   };
 
-  vendorHash = "sha256-iXT+xvmVDfFVV/bYq+hnmkz2ODUQ4fHR8z2lcaK93TE=";
+  vendorHash = "sha256-shs05gXqLjp+L3t5f7oh0BV8YRPtcoCzrTlmx1tOvP0=";
 
   ldflags = [
     "-s" "-w"
diff --git a/pkgs/development/tools/rgp/default.nix b/pkgs/development/tools/rgp/default.nix
index 2e5400c1cdcec..6b80dae38a55d 100644
--- a/pkgs/development/tools/rgp/default.nix
+++ b/pkgs/development/tools/rgp/default.nix
@@ -19,15 +19,15 @@
 }:
 
 let
-  buildNum = "2022-12-12-1037";
+  buildNum = "2023-02-15-1051";
 in
-stdenv.mkDerivation rec {
+stdenv.mkDerivation {
   pname = "rgp";
-  version = "1.14";
+  version = "1.14.1";
 
   src = fetchurl {
     url = "https://gpuopen.com/download/radeon-developer-tool-suite/RadeonDeveloperToolSuite-${buildNum}.tgz";
-    hash = "sha256-T13SOy+77lLxmlcczXEFZAnyx9Lm52G/WiCcC1Py4HA=";
+    hash = "sha256-1JxW6vXfOYDaCnHWEq8crjuu0QrUCwahm+ipOKVDQPA=";
   };
 
   nativeBuildInputs = [ makeWrapper autoPatchelfHook ];
diff --git a/pkgs/development/tools/richgo/default.nix b/pkgs/development/tools/richgo/default.nix
index f8468d1f175b3..cef70f62ab688 100644
--- a/pkgs/development/tools/richgo/default.nix
+++ b/pkgs/development/tools/richgo/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "richgo";
-  version = "0.3.11";
+  version = "0.3.12";
 
   src = fetchFromGitHub {
     owner = "kyoh86";
     repo = "richgo";
     rev = "v${version}";
-    sha256 = "sha256-a8CxJKk9fKGYTDtY/mU/3gcdIeejg20sL8Tm4ozgDl4=";
+    sha256 = "sha256-pOB1exuwGwSxStodKhLLwh1xBvLjopUn0k+sEARdA9g=";
   };
 
-  vendorSha256 = "sha256-j2RZOt5IRb2oEQ6sFu+nXpVkDsnppA6h9YT4F7AiCoY=";
+  vendorHash = "sha256-jIzBN5T5+eTFCYOdS5hj3yTGOfU8NTrFmnIu+dDjVeU=";
 
   meta = with lib; {
     description = "Enrich `go test` outputs with text decorations";
diff --git a/pkgs/development/tools/rover/default.nix b/pkgs/development/tools/rover/default.nix
index 5144ecb38e888..91852f7516626 100644
--- a/pkgs/development/tools/rover/default.nix
+++ b/pkgs/development/tools/rover/default.nix
@@ -3,32 +3,46 @@
 , fetchFromGitHub
 , perl
 , rustPlatform
+, darwin
+, stdenv
 }:
 
 rustPlatform.buildRustPackage rec {
   pname = "rover";
-  version = "0.5.1";
+  version = "0.11.0";
 
   src = fetchFromGitHub {
     owner = "apollographql";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-wBHMND/xpm9o7pkWMUj9lEtEkzy3mX+E4Dt7qDn6auY=";
+    sha256 = "sha256-Ei6EeM0+b3EsMoRo38nHO79onT9Oq/cfbiCZhyDYQrc=";
   };
 
-  cargoSha256 = "sha256-n0R2MdAYGsOsYt4x1N1KdGvBZYTALyhSzCGW29bnFU4=";
+  cargoSha256 = "sha256-+iDU8LPb7P4MNQ8MB5ldbWq4wWRcnbgOmSZ93Z//5O0=";
+
+  buildInputs = lib.optionals stdenv.isDarwin [
+    darwin.apple_sdk.frameworks.Security
+    darwin.apple_sdk.frameworks.CoreServices
+  ];
 
   nativeBuildInputs = [
     perl
   ];
 
-  # The rover-client's build script (crates/rover-client/build.rs) will try to
+  # This test checks whether the plugins specified in the plugins json file are
+  # valid by making a network call to the repo that houses their binaries; but, the
+  # build env can't make network calls (impurity)
+  cargoTestFlags = [
+    "-- --skip=latest_plugins_are_valid_versions"
+  ];
+
+  # The rover-client's build script (xtask/src/commands/prep/schema.rs) will try to
   # download the API's graphql schema at build time to our read-only filesystem.
   # To avoid this we pre-download it to a location the build script checks.
   preBuild = ''
-    mkdir crates/rover-client/.schema
-    cp ${./schema}/etag.id        crates/rover-client/.schema/
-    cp ${./schema}/schema.graphql crates/rover-client/.schema/
+    cp ${./schema}/hash.id              crates/rover-client/.schema/
+    cp ${./schema}/etag.id              crates/rover-client/.schema/
+    cp ${./schema}/schema.graphql       crates/rover-client/.schema/
   '';
 
   passthru.updateScript = ./update.sh;
@@ -41,10 +55,9 @@ rustPlatform.buildRustPackage rec {
   '';
 
   meta = with lib; {
-    description = "A CLI for managing and maintaining graphs with Apollo Studio";
+    description = "A CLI for interacting with ApolloGraphQL's developer tooling, including managing self-hosted and GraphOS graphs.";
     homepage = "https://www.apollographql.com/docs/rover";
     license = licenses.mit;
-    maintainers = [ maintainers.ivanbrennan ];
-    platforms = ["x86_64-linux"];
+    maintainers = [ maintainers.ivanbrennan maintainers.aaronarinder ];
   };
 }
diff --git a/pkgs/development/tools/rover/schema/etag.id b/pkgs/development/tools/rover/schema/etag.id
index a8b9f0cece3ef..59331ac0df6d3 100644
--- a/pkgs/development/tools/rover/schema/etag.id
+++ b/pkgs/development/tools/rover/schema/etag.id
@@ -1 +1 @@
-2694c7b893d44c9ad8f5d7161116deb9985a6bd05e8e0cdcd7379947430e6f89
+d35f8c48cb89329f33656944fa9e997de1e778b043b9ca4d78c8accdecfd9046
diff --git a/pkgs/development/tools/rover/schema/hash.id b/pkgs/development/tools/rover/schema/hash.id
new file mode 100644
index 0000000000000..d730728cfa77d
--- /dev/null
+++ b/pkgs/development/tools/rover/schema/hash.id
@@ -0,0 +1 @@
+ff145f12604d11312e6a2f8a61a3d226fcdb2ca79f6b7fbc24c5a22aa23ab1af
diff --git a/pkgs/development/tools/rover/schema/schema.graphql b/pkgs/development/tools/rover/schema/schema.graphql
index b20b21a91e419..8cc527f4f822d 100644
--- a/pkgs/development/tools/rover/schema/schema.graphql
+++ b/pkgs/development/tools/rover/schema/schema.graphql
@@ -1,20 +1,319 @@
-"""An organization. Can have multiple members and graphs.""" type Account{auditLogExports:[AuditLogExport!]"""These are the roles that the account is able to use""" availableRoles:[UserPermission!]!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request
-with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the
-browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the
-filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to
-MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the account's avatar. Note that CORS is not enabled for these URLs. The size
-argument is used for bandwidth reduction, and should be the size of the image as displayed in the
-application. Apollo's media server will downscale larger images to at least the requested size,
-but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String billingInfo:BillingInfo companyUrl:String currentBillingMonth:BillingMonth currentPlan:BillingPlan!currentPlanV2:BillingPlanV2!currentSubscription:BillingSubscription currentSubscriptionV2:BillingSubscriptionV2 experimentalFeatures:AccountExperimentalFeatures!expiredTrialSubscription:BillingSubscription expiredTrialSubscriptionV2:BillingSubscriptionV2 graphIDAvailable(id:ID!):Boolean!hasBeenOnTrial:Boolean!hasBeenOnTrialV2:Boolean!"""Globally unique identifier, which isn't guaranteed stable (can be changed by administrators).""" id:ID!"""Internal immutable identifier for the account. Only visible to Apollo admins (because it really
-shouldn't be used in normal client apps).""" internalID:ID!invitations(includeAccepted:Boolean!=false):[AccountInvitation!]invoices:[Invoice!]invoicesV2:[InvoiceV2!]!isOnExpiredTrial:Boolean!isOnTrial:Boolean!legacyIsOnTrial:Boolean!memberships:[AccountMembership!]"""Name of the organization, which can change over time and isn't unique.""" name:String!provisionedAt:Timestamp recurlyEmail:String """Returns a different registry related stats pertaining to this account.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow requests(from:Timestamp!to:Timestamp!):Long requestsInCurrentBillingPeriod:Long roles:AccountRoles """How many seats would be included in your next bill, as best estimated today""" seatCountForNextBill:Int seats:Seats secondaryIDs:[ID!]!"""Graphs belonging to this organization.""" services(includeDeleted:Boolean):[Service!]!"""If non-null, this organization tracks its members through an upstream, eg PingOne;
-invitations are not possible on SSO-synchronized account.""" sso:OrganizationSSO state:AccountState """A list of reusable invitations for the organization.""" staticInvitations:[OrganizationInviteLink!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow!@deprecated(reason:"use Account.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow subscriptions:[BillingSubscription!]subscriptionsV2:[BillingSubscriptionV2!]!"""Gets a ticket for this org, by id""" ticket(id:ID!):ZendeskTicket """List of Zendesk tickets submitted for this org""" tickets:[ZendeskTicket!]}"""Columns of AccountBillingUsageStats.""" enum AccountBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilter{and:[AccountBillingUsageStatsFilter!]in:AccountBillingUsageStatsFilterIn not:AccountBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[AccountBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountBillingUsageStatsMetrics{operationCount:Long!}input AccountBillingUsageStatsOrderBySpec{column:AccountBillingUsageStatsColumn!direction:Ordering!}type AccountBillingUsageStatsRecord{"""Dimensions of AccountBillingUsageStats that can be grouped by.""" groupBy:AccountBillingUsageStatsDimensions!"""Metrics of AccountBillingUsageStats that can be aggregated over.""" metrics:AccountBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountChecksStatsMetrics{totalFailedChecks:Long!totalSuccessfulChecks:Long!}type AccountChecksStatsRecord{id:ID!metrics:AccountChecksStatsMetrics!timestamp:Timestamp!}"""Columns of AccountEdgeServerInfos.""" enum AccountEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type AccountEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilter{and:[AccountEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:AccountEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:AccountEdgeServerInfosFilter or:[AccountEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input AccountEdgeServerInfosOrderBySpec{column:AccountEdgeServerInfosColumn!direction:Ordering!}type AccountEdgeServerInfosRecord{"""Dimensions of AccountEdgeServerInfos that can be grouped by.""" groupBy:AccountEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountErrorStats.""" enum AccountErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountErrorStatsFilter{and:[AccountErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountErrorStatsFilterIn not:AccountErrorStatsFilter or:[AccountErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input AccountErrorStatsOrderBySpec{column:AccountErrorStatsColumn!direction:Ordering!}type AccountErrorStatsRecord{"""Dimensions of AccountErrorStats that can be grouped by.""" groupBy:AccountErrorStatsDimensions!"""Metrics of AccountErrorStats that can be aggregated over.""" metrics:AccountErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountExperimentalFeatures{auditLogs:Boolean!championDashboard:Boolean!federation2Preview:Boolean!preRequestPreview:Boolean!publicVariants:Boolean!variantHomepage:Boolean!webhooksPreview:Boolean!}"""Columns of AccountFieldExecutions.""" enum AccountFieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldExecutionsFilter{and:[AccountFieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldExecutionsFilterIn not:AccountFieldExecutionsFilter or:[AccountFieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input AccountFieldExecutionsOrderBySpec{column:AccountFieldExecutionsColumn!direction:Ordering!}type AccountFieldExecutionsRecord{"""Dimensions of AccountFieldExecutions that can be grouped by.""" groupBy:AccountFieldExecutionsDimensions!"""Metrics of AccountFieldExecutions that can be aggregated over.""" metrics:AccountFieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldLatencies.""" enum AccountFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilter{and:[AccountFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldLatenciesFilterIn not:AccountFieldLatenciesFilter or:[AccountFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input AccountFieldLatenciesOrderBySpec{column:AccountFieldLatenciesColumn!direction:Ordering!}type AccountFieldLatenciesRecord{"""Dimensions of AccountFieldLatencies that can be grouped by.""" groupBy:AccountFieldLatenciesDimensions!"""Metrics of AccountFieldLatencies that can be aggregated over.""" metrics:AccountFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldRequestsByClientVersion.""" enum AccountFieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldRequestsByClientVersionFilter{and:[AccountFieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldRequestsByClientVersionFilterIn not:AccountFieldRequestsByClientVersionFilter or:[AccountFieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input AccountFieldRequestsByClientVersionOrderBySpec{column:AccountFieldRequestsByClientVersionColumn!direction:Ordering!}type AccountFieldRequestsByClientVersionRecord{"""Dimensions of AccountFieldRequestsByClientVersion that can be grouped by.""" groupBy:AccountFieldRequestsByClientVersionDimensions!"""Metrics of AccountFieldRequestsByClientVersion that can be aggregated over.""" metrics:AccountFieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldUsage.""" enum AccountFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldUsageFilter{and:[AccountFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldUsageFilterIn not:AccountFieldUsageFilter or:[AccountFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input AccountFieldUsageOrderBySpec{column:AccountFieldUsageColumn!direction:Ordering!}type AccountFieldUsageRecord{"""Dimensions of AccountFieldUsage that can be grouped by.""" groupBy:AccountFieldUsageDimensions!"""Metrics of AccountFieldUsage that can be aggregated over.""" metrics:AccountFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountInvitation{"""An accepted invitation cannot be used anymore""" acceptedAt:Timestamp """Who accepted the invitation""" acceptedBy:User """Time the invitation was created""" createdAt:Timestamp!"""Who created the invitation""" createdBy:User email:String!id:ID!"""Last time we sent an email for the invitation""" lastSentAt:Timestamp """Access role for the invitee""" role:UserPermission!}type AccountMembership{account:Account!createdAt:Timestamp!"""If this membership is a free seat (based on role)""" free:Boolean permission:UserPermission!user:User!}type AccountMutation{auditExport(id:String!):AuditLogExportMutation """Cancel a pending change from an annual team subscription to a monthly team subscription when the current period expires.""" cancelConvertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Cancel account subscriptions, subscriptions will remain active until the end of the paid period""" cancelSubscriptions:Account """Changes an annual team subscription to a monthly team subscription when the current period expires.""" convertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Changes a monthly team subscription to an annual team subscription.""" convertMonthlyTeamSubscriptionToAnnual:Account createStaticInvitation(role:UserPermission!):OrganizationInviteLink """Delete the account's avatar. Requires Account.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Acknowledge that a trial has expired and return to community""" dismissExpiredTrial:Account """Apollo admins only: extend an ongoing trial""" extendTrial(to:Timestamp!):Account """Hard delete an account and all associated services""" hardDelete:Void """Send an invitation to join the account by E-mail""" invite(email:String!role:UserPermission):AccountInvitation """Reactivate a canceled current subscription""" reactivateCurrentSubscription:Account """Refresh billing information from third-party billing service""" refreshBilling:Void """Delete an invitation""" removeInvitation(id:ID):Void """Remove a member of the account""" removeMember(id:ID!):Account requestAuditExport(actors:[ActorInput!]from:Timestamp!graphIds:[String!]to:Timestamp!):Account """Send a new E-mail for an existing invitation""" resendInvitation(id:ID):AccountInvitation revokeStaticInvitation(token:String!):OrganizationInviteLink """Apollo admins only: set the billing plan to an arbitrary plan""" setPlan(id:ID!):Void """Start a new team subscription with the given billing period""" startTeamSubscription(billingPeriod:BillingPeriod!):Account """Start a team trial""" startTrial:Account """This is called by the form shown to users after they cancel their team subscription.""" submitTeamCancellationFeedback(feedback:String!):Void """Apollo admins only: terminate any ongoing subscriptions in the account, without refunds""" terminateSubscriptions:Account """Update the billing address for a Recurly token""" updateBillingAddress(billingAddress:BillingAddressInput!):Account """Update the billing information from a Recurly token""" updateBillingInfo(token:String!):Void updateCompanyUrl(companyUrl:String):Account """Set the E-mail address of the account, used notably for billing""" updateEmail(email:String!):Void """Update the account ID""" updateID(id:ID!):Account """Update the company name""" updateName(name:String!):Void """Apollo admins only: enable or disable an account for PingOne SSO login""" updatePingOneSSOIDPID(idpid:String):Account """Updates the role assigned to new SSO users.""" updateSSODefaultRole(role:UserPermission!):OrganizationSSO """A (currently) internal to Apollo mutation to update a user's role within an organization""" updateUserPermission(permission:UserPermission!userID:ID!):User}"""Columns of AccountOperationCheckStats.""" enum AccountOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type AccountOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilter{and:[AccountOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountOperationCheckStatsFilterIn not:AccountOperationCheckStatsFilter or:[AccountOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input AccountOperationCheckStatsOrderBySpec{column:AccountOperationCheckStatsColumn!direction:Ordering!}type AccountOperationCheckStatsRecord{"""Dimensions of AccountOperationCheckStats that can be grouped by.""" groupBy:AccountOperationCheckStatsDimensions!"""Metrics of AccountOperationCheckStats that can be aggregated over.""" metrics:AccountOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountPublishesStatsMetrics{totalPublishes:Long!}type AccountPublishesStatsRecord{id:ID!metrics:AccountPublishesStatsMetrics!timestamp:Timestamp!}"""Columns of AccountQueryStats.""" enum AccountQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type AccountQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountQueryStatsFilter{and:[AccountQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:AccountQueryStatsFilterIn not:AccountQueryStatsFilter or:[AccountQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input AccountQueryStatsOrderBySpec{column:AccountQueryStatsColumn!direction:Ordering!}type AccountQueryStatsRecord{"""Dimensions of AccountQueryStats that can be grouped by.""" groupBy:AccountQueryStatsDimensions!"""Metrics of AccountQueryStats that can be aggregated over.""" metrics:AccountQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountRoles{canAudit:Boolean!canCreateDevGraph:Boolean!canCreateService:Boolean!canDelete:Boolean!canDownloadInvoice:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canManageMembers:Boolean!canQuery:Boolean!canQueryAudit:Boolean!canQueryBillingInfo:Boolean!canQueryInvoices:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canQueryMembers:Boolean!canQueryStats:Boolean!canReadTickets:Boolean!canRemoveMembers:Boolean!canSetConstrainedPlan:Boolean!canUpdateBillingInfo:Boolean!canUpdateMetadata:Boolean!}enum AccountState{ACTIVE CLOSED UNKNOWN UNPROVISIONED}"""A time window with a specified granularity over a given account.""" type AccountStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:AccountBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountBillingUsageStatsOrderBySpec!]):[AccountBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:AccountEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountEdgeServerInfosOrderBySpec!]):[AccountEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:AccountErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountErrorStatsOrderBySpec!]):[AccountErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:AccountFieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldExecutionsOrderBySpec!]):[AccountFieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:AccountFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldLatenciesOrderBySpec!]):[AccountFieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:AccountFieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldRequestsByClientVersionOrderBySpec!]):[AccountFieldRequestsByClientVersionRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:AccountFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldUsageOrderBySpec!]):[AccountFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:AccountOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountOperationCheckStatsOrderBySpec!]):[AccountOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:AccountQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountQueryStatsOrderBySpec!]):[AccountQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:AccountTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTracePathErrorsRefsOrderBySpec!]):[AccountTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:AccountTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTraceRefsOrderBySpec!]):[AccountTraceRefsRecord!]!}"""Columns of AccountTracePathErrorsRefs.""" enum AccountTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type AccountTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in AccountTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilter{and:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:AccountTracePathErrorsRefsFilterIn not:AccountTracePathErrorsRefsFilter or:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input AccountTracePathErrorsRefsOrderBySpec{column:AccountTracePathErrorsRefsColumn!direction:Ordering!}type AccountTracePathErrorsRefsRecord{"""Dimensions of AccountTracePathErrorsRefs that can be grouped by.""" groupBy:AccountTracePathErrorsRefsDimensions!"""Metrics of AccountTracePathErrorsRefs that can be aggregated over.""" metrics:AccountTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountTraceRefs.""" enum AccountTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type AccountTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in AccountTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTraceRefsFilter{and:[AccountTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:AccountTraceRefsFilterIn not:AccountTraceRefsFilter or:[AccountTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input AccountTraceRefsOrderBySpec{column:AccountTraceRefsColumn!direction:Ordering!}type AccountTraceRefsRecord{"""Dimensions of AccountTraceRefs that can be grouped by.""" groupBy:AccountTraceRefsDimensions!"""Metrics of AccountTraceRefs that can be aggregated over.""" metrics:AccountTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""An actor (view of Identity) that performed an action within Studio.""" type Actor{actorId:ID!type:ActorType!}input ActorInput{actorId:ID!type:ActorType!}enum ActorType{ANONYMOUS_USER BACKFILL CRON GRAPH INTERNAL_IDENTITY SYNCHRONIZATION SYSTEM USER}union AddOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union AddOperationCollectionToVariantResult=GraphVariant|InvalidTarget|PermissionError|ValidationError type AffectedClient{"""ID, often the name, of the client set by the user and reported alongside metrics""" clientReferenceId:ID@deprecated(reason:"Unsupported.")"""version of the client set by the user and reported alongside metrics""" clientVersion:String@deprecated(reason:"Unsupported.")}type AffectedQuery{"""If the operation would be approved if the check ran again. Returns null if queried from SchemaDiff.changes.affectedQueries.alreadyApproved""" alreadyApproved:Boolean """If the operation would be ignored if the check ran again""" alreadyIgnored:Boolean """List of changes affecting this query. Returns null if queried from SchemaDiff.changes.affectedQueries.changes""" changes:[ChangeOnOperation!]"""Name to display to the user for the operation""" displayName:String id:ID!"""Determines if this query validates against the proposed schema""" isValid:Boolean """Whether this operation was ignored and its severity was downgraded for that reason""" markedAsIgnored:Boolean """Whether the changes were marked as safe and its severity was downgraded for that reason""" markedAsSafe:Boolean """Name provided for the operation, which can be empty string if it is an anonymous operation""" name:String """First 128 characters of query signature for display""" signature:String}interface ApiKey{id:ID!keyName:String token:String!}type ApiKeyProvision{apiKey:ApiKey!created:Boolean!}type AuditLogExport{actors:[Identity!]bigqueryTriggeredAt:Timestamp completedAt:Timestamp createdAt:Timestamp!exportedFiles:[String!]from:Timestamp!graphs:[Service!]id:ID!requester:User status:AuditStatus!to:Timestamp!}type AuditLogExportMutation{cancel:Account delete:Account}enum AuditStatus{CANCELLED COMPLETED EXPIRED FAILED IN_PROGRESS QUEUED}type AvatarDeleteError{clientMessage:String!code:AvatarDeleteErrorCode!serverMessage:String!}enum AvatarDeleteErrorCode{SSO_USERS_CANNOT_DELETE_SELF_AVATAR}type AvatarUploadError{clientMessage:String!code:AvatarUploadErrorCode!serverMessage:String!}enum AvatarUploadErrorCode{SSO_USERS_CANNOT_UPLOAD_SELF_AVATAR}union AvatarUploadResult=AvatarUploadError|MediaUploadInfo type BillingAddress{address1:String address2:String city:String country:String state:String zip:String}"""Billing address inpnut""" input BillingAddressInput{address1:String!address2:String city:String!country:String!state:String!zip:String!}type BillingInfo{address:BillingAddress!cardType:String firstName:String lastFour:Int lastName:String month:Int vatNumber:String year:Int}enum BillingModel{REQUEST_BASED SEAT_BASED}type BillingMonth{end:Timestamp!requests:Long!start:Timestamp!}enum BillingPeriod{MONTHLY QUARTERLY SEMI_ANNUALLY YEARLY}type BillingPlan{addons:[BillingPlanAddon!]!billingModel:BillingModel!billingPeriod:BillingPeriod capabilities:BillingPlanCapabilities!description:String id:ID!isTrial:Boolean!kind:BillingPlanKind!name:String!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!tier:BillingPlanTier!}type BillingPlanAddon{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanAddonV2{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanCapabilities{clients:Boolean!contracts:Boolean!datadog:Boolean!errors:Boolean!federation:Boolean!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!notifications:Boolean!operationRegistry:Boolean!ranges:[String!]!schemaValidation:Boolean!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}enum BillingPlanKind{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL}enum BillingPlanKindV2{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL UNKNOWN}enum BillingPlanTier{COMMUNITY ENTERPRISE TEAM}enum BillingPlanTierV2{COMMUNITY ENTERPRISE TEAM UNKNOWN}type BillingPlanV2{addons:[BillingPlanAddonV2!]!billingModel:BillingModel!billingPeriod:BillingPeriod clients:Boolean!contracts:Boolean!datadog:Boolean!description:String errors:Boolean!federation:Boolean!id:ID!isTrial:Boolean!kind:BillingPlanKindV2!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!name:String!notifications:Boolean!operationRegistry:Boolean!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!ranges:[String!]!schemaValidation:Boolean!tier:BillingPlanTierV2!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}type BillingSubscription{activatedAt:Timestamp!addons:[BillingSubscriptionAddon!]!autoRenew:Boolean!"""The price of the subscription when ignoring add-ons (such as seats), ie quantity * pricePerUnitInUsdCents""" basePriceInUsdCents:Long!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlan!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" pricePerUnitInUsdCents:Int!quantity:Int!"""Total price of the subscription when it next renews, including add-ons (such as seats)""" renewalTotalPriceInUsdCents:Long!state:SubscriptionState!"""Total price of the subscription, including add-ons (such as seats)""" totalPriceInUsdCents:Long!"""When this subscription's trial period expires (if it is a trial). Not the same as the
-subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}type BillingSubscriptionAddon{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionAddonV2{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionV2{"""The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" activatedAt:Timestamp!addons:[BillingSubscriptionAddonV2!]!autoRenew:Boolean!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlanV2!"""The price of every seat""" pricePerSeatInUsdCents:Int quantity:Int!state:SubscriptionStateV2!"""When this subscription's trial period expires (if it is a trial). Not the same as the
-subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}"""Columns of BillingUsageStats.""" enum BillingUsageStatsColumn{ACCOUNT_ID OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type BillingUsageStatsDimensions{accountId:ID operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in BillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input BillingUsageStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[BillingUsageStatsFilter!]in:BillingUsageStatsFilterIn not:BillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[BillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in BillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input BillingUsageStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type BillingUsageStatsMetrics{operationCount:Long!}input BillingUsageStatsOrderBySpec{column:BillingUsageStatsColumn!direction:Ordering!}type BillingUsageStatsRecord{"""Dimensions of BillingUsageStats that can be grouped by.""" groupBy:BillingUsageStatsDimensions!"""Metrics of BillingUsageStats that can be aggregated over.""" metrics:BillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A blob (base64'ed in JSON & GraphQL)""" scalar Blob type Build{input:BuildInput!result:BuildResult}type BuildError{code:String locations:[SourceLocation!]!message:String!}type BuildFailure{errorMessages:[BuildError!]!}union BuildInput=CompositionBuildInput|FilterBuildInput union BuildResult=BuildFailure|BuildSuccess type BuildSuccess{coreSchema:CoreSchema!}enum CacheScope{PRIVATE PUBLIC UNKNOWN UNRECOGNIZED}"""A specific change to a definition in your schema.""" type Change{affectedQueries:[AffectedQuery!]"""Target arg of change made.""" argNode:NamedIntrospectionArg """Indication of the category of the change (e.g. addition, removal, edit).""" category:ChangeCategory!"""Node related to the top level node that was changed, such as a field in an object,
-a value in an enum or the object of an interface.""" childNode:NamedIntrospectionValue """Indication of the kind of target and action of the change, e.g. 'TYPE_REMOVED'.""" code:String!"""Human-readable description of the change.""" description:String!"""Top level node affected by the change.""" parentNode:NamedIntrospectionType """Severity of the change, either failure or warning.""" severity:ChangeSeverity!"""Indication of the success of the overall change, either failure, warning, or notice.""" type:ChangeType!@deprecated(reason:"use severity instead")}"""Defines a set of categories that a schema change
-can be grouped by.""" enum ChangeCategory{ADDITION DEPRECATION EDIT REMOVAL}"""These schema change codes represent all of the possible changes that can
-occur during the schema diff algorithm.""" enum ChangeCode{"""Type of the argument was changed.""" ARG_CHANGED_TYPE """Argument was changed from nullable to non-nullable.""" ARG_CHANGED_TYPE_OPTIONAL_TO_REQUIRED """Default value added or changed for the argument.""" ARG_DEFAULT_VALUE_CHANGE """Description was added, removed, or updated for argument.""" ARG_DESCRIPTION_CHANGE """Argument to a field was removed.""" ARG_REMOVED """Argument to the directive was removed.""" DIRECTIVE_ARG_REMOVED """Location of the directive was removed.""" DIRECTIVE_LOCATION_REMOVED """Directive was removed.""" DIRECTIVE_REMOVED """Repeatable flag was removed for directive.""" DIRECTIVE_REPEATABLE_REMOVED """Enum was deprecated.""" ENUM_DEPRECATED """Reason for enum deprecation changed.""" ENUM_DEPRECATED_REASON_CHANGE """Enum deprecation was removed.""" ENUM_DEPRECATION_REMOVED """Description was added, removed, or updated for enum value.""" ENUM_VALUE_DESCRIPTION_CHANGE """Field was added to the type.""" FIELD_ADDED """Return type for the field was changed.""" FIELD_CHANGED_TYPE """Field was deprecated.""" FIELD_DEPRECATED """Reason for field deprecation changed.""" FIELD_DEPRECATED_REASON_CHANGE """Field deprecation removed.""" FIELD_DEPRECATION_REMOVED """Description was added, removed, or updated for field.""" FIELD_DESCRIPTION_CHANGE """Type of the field in the input object was changed.""" FIELD_ON_INPUT_OBJECT_CHANGED_TYPE """Field was removed from the type.""" FIELD_REMOVED """Field was removed from the input object.""" FIELD_REMOVED_FROM_INPUT_OBJECT """Non-nullable field was added to the input object. (Deprecated.)""" NON_NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT """Nullable field was added to the input type. (Deprecated.)""" NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT """Nullable argument was added to the field.""" OPTIONAL_ARG_ADDED """Optional field was added to the input type.""" OPTIONAL_FIELD_ADDED_TO_INPUT_OBJECT """Non-nullable argument was added to the field.""" REQUIRED_ARG_ADDED """Non-nullable argument added to directive.""" REQUIRED_DIRECTIVE_ARG_ADDED """Required field was added to the input object.""" REQUIRED_FIELD_ADDED_TO_INPUT_OBJECT """Type was added to the schema.""" TYPE_ADDED """Type now implements the interface.""" TYPE_ADDED_TO_INTERFACE """A new value was added to the enum.""" TYPE_ADDED_TO_UNION """Type was changed from one kind to another.
-Ex: scalar to object or enum to union.""" TYPE_CHANGED_KIND """Description was added, removed, or updated for type.""" TYPE_DESCRIPTION_CHANGE """Type (object or scalar) was removed from the schema.""" TYPE_REMOVED """Type no longer implements the interface.""" TYPE_REMOVED_FROM_INTERFACE """Type is no longer included in the union.""" TYPE_REMOVED_FROM_UNION """A new value was added to the enum.""" VALUE_ADDED_TO_ENUM """Value was removed from the enum.""" VALUE_REMOVED_FROM_ENUM}"""Represents the tuple of static information
-about a particular kind of schema change.""" type ChangeDefinition{category:ChangeCategory!code:ChangeCode!defaultSeverity:ChangeSeverity!}"""Info about a change in the context of an operation it affects""" type ChangeOnOperation{"""Human-readable explanation of the impact of this change on the operation""" impact:String """The semantic info about this change, i.e. info about the change that doesn't depend on the operation""" semanticChange:SemanticChange!}enum ChangeSeverity{FAILURE NOTICE}"""Summary of the changes for a schema diff, computed by placing the changes into categories and then
+schema {
+  query: Query
+  mutation: Mutation
+}
+
+"""An organization in Apollo Studio. Can have multiple members and graphs."""
+type Organization {
+  auditLogExports: [AuditLogExport!]
+  """Graphs belonging to this organization."""
+  graphs(includeDeleted: Boolean): [Graph!]!
+  """Globally unique identifier, which isn't guaranteed stable (can be changed by administrators)."""
+  id: ID!
+  """Name of the organization, which can change over time and isn't unique."""
+  name: String!
+  """Graphs belonging to this organization."""
+  services(includeDeleted: Boolean): [Graph!]! @deprecated(reason: "Use graphs field instead")
+}
+
+type OrganizationMutation {
+  """Trigger a request for an audit export"""
+  requestAuditExport(actors: [ActorInput!], from: Timestamp!, graphIds: [String!], to: Timestamp!): Organization
+}
+
+"""Represents an actor that performs actions in Apollo Studio. Most actors are either a `USER` or a `GRAPH` (based on a request's provided API key), and they have the corresponding `ActorType`."""
+type Actor {
+  actorId: ID!
+  type: ActorType!
+}
+
+"""Input type to provide when specifying an `Actor` in operation arguments. See also the `Actor` object type."""
+input ActorInput {
+  actorId: ID!
+  type: ActorType!
+}
+
+enum ActorType {
+  ANONYMOUS_USER
+  BACKFILL
+  CRON
+  GRAPH
+  INTERNAL_IDENTITY
+  SYNCHRONIZATION
+  SYSTEM
+  USER
+}
+
+union AddOperationCollectionEntriesResult = AddOperationCollectionEntriesSuccess | PermissionError | ValidationError
+
+type AddOperationCollectionEntriesSuccess {
+  operationCollectionEntries: [OperationCollectionEntry!]!
+}
+
+union AddOperationCollectionEntryResult = OperationCollectionEntry | PermissionError | ValidationError
+
+input AddOperationInput {
+  """The operation's fields."""
+  document: OperationCollectionEntryStateInput!
+  """The operation's name."""
+  name: String!
+}
+
+type AffectedQuery {
+  id: ID!
+  """First 128 characters of query signature for display"""
+  signature: String
+  """Name to display to the user for the operation"""
+  displayName: String
+  """Name provided for the operation, which can be empty string if it is an anonymous operation"""
+  name: String
+  """Determines if this query validates against the proposed schema"""
+  isValid: Boolean
+  """List of changes affecting this query. Returns null if queried from SchemaDiff.changes.affectedQueries.changes"""
+  changes: [ChangeOnOperation!]
+  """Whether this operation was ignored and its severity was downgraded for that reason"""
+  markedAsIgnored: Boolean
+  """Whether the changes were marked as safe and its severity was downgraded for that reason"""
+  markedAsSafe: Boolean
+  """If the operation would be approved if the check ran again. Returns null if queried from SchemaDiff.changes.affectedQueries.alreadyApproved"""
+  alreadyApproved: Boolean
+  """If the operation would be ignored if the check ran again"""
+  alreadyIgnored: Boolean
+}
+
+"""
+Represents an API key that's used to authenticate a
+particular Apollo user or graph.
+"""
+interface ApiKey {
+  """The API key's ID."""
+  id: ID!
+  """The API key's name, for distinguishing it from other keys."""
+  keyName: String
+  """The value of the API key. **This is a secret credential!**"""
+  token: String!
+}
+
+type ApiKeyProvision {
+  apiKey: ApiKey!
+  created: Boolean!
+}
+
+type AuditLogExport {
+  """The list of actors to filter the audit export"""
+  actors: [Identity!]
+  """The time when the audit export was completed"""
+  completedAt: Timestamp
+  """The time when the audit export was reqeusted"""
+  createdAt: Timestamp!
+  """List of URLs to download the audits for the requested range"""
+  downloadUrls: [String!]
+  """The starting point of audits to include in export"""
+  from: Timestamp!
+  """The list of graphs to filter the audit export"""
+  graphs: [Graph!]
+  """The id for the audit export"""
+  id: ID!
+  """The user that initiated the audit export"""
+  requester: User
+  """The status of the audit export"""
+  status: AuditStatus!
+  """The end point of audits to include in export"""
+  to: Timestamp!
+}
+
+enum AuditStatus {
+  CANCELLED
+  COMPLETED
+  EXPIRED
+  FAILED
+  IN_PROGRESS
+  QUEUED
+}
+
+"""The building of a Studio variant (including supergraph composition and any contract filtering) as part of a launch."""
+type Build {
+  """The inputs provided to the build, including subgraph and contract details."""
+  input: BuildInput!
+  """The result of the build. This value is null until the build completes."""
+  result: BuildResult
+}
+
+"""A single error that occurred during the failed execution of a build."""
+type BuildError {
+  code: String
+  locations: [SourceLocation!]!
+  message: String!
+}
+
+"""Contains the details of an executed build that failed."""
+type BuildFailure {
+  """A list of all errors that occurred during the failed build."""
+  errorMessages: [BuildError!]!
+}
+
+union BuildInput = CompositionBuildInput | FilterBuildInput
+
+union BuildResult = BuildFailure | BuildSuccess
+
+"""Contains the details of an executed build that succeeded."""
+type BuildSuccess {
+  """Contains the supergraph and API schemas created by composition."""
+  coreSchema: CoreSchema!
+}
+
+"""A single change that was made to a definition in a schema."""
+type Change {
+  """The severity of the change (e.g., `FAILURE` or `NOTICE`)"""
+  severity: ChangeSeverity!
+  """Indicates the type of change that was made, and to what (e.g., 'TYPE_REMOVED')."""
+  code: String!
+  """Indication of the category of the change (e.g. addition, removal, edit)."""
+  category: ChangeCategory!
+  """A human-readable description of the change."""
+  description: String!
+  affectedQueries: [AffectedQuery!]
+  """Top level node affected by the change."""
+  parentNode: NamedIntrospectionType
+  """
+  Node related to the top level node that was changed, such as a field in an object,
+  a value in an enum or the object of an interface.
+  """
+  childNode: NamedIntrospectionValue
+  """Target arg of change made."""
+  argNode: NamedIntrospectionArg
+}
+
+"""
+Defines a set of categories that a schema change
+can be grouped by.
+"""
+enum ChangeCategory {
+  ADDITION
+  EDIT
+  REMOVAL
+  DEPRECATION
+}
+
+"""
+These schema change codes represent all of the possible changes that can
+occur during the schema diff algorithm.
+"""
+enum ChangeCode {
+  """Field was removed from the type."""
+  FIELD_REMOVED
+  """Type (object or scalar) was removed from the schema."""
+  TYPE_REMOVED
+  """Argument to a field was removed."""
+  ARG_REMOVED
+  """Type is no longer included in the union."""
+  TYPE_REMOVED_FROM_UNION
+  """Field was removed from the input object."""
+  FIELD_REMOVED_FROM_INPUT_OBJECT
+  """Value was removed from the enum."""
+  VALUE_REMOVED_FROM_ENUM
+  """Type no longer implements the interface."""
+  TYPE_REMOVED_FROM_INTERFACE
+  """Non-nullable argument was added to the field."""
+  REQUIRED_ARG_ADDED
+  """Non-nullable field was added to the input object. (Deprecated.)"""
+  NON_NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT
+  """Required field was added to the input object."""
+  REQUIRED_FIELD_ADDED_TO_INPUT_OBJECT
+  """Return type for the field was changed."""
+  FIELD_CHANGED_TYPE
+  """Type of the field in the input object was changed."""
+  FIELD_ON_INPUT_OBJECT_CHANGED_TYPE
+  """
+  Type was changed from one kind to another.
+  Ex: scalar to object or enum to union.
+  """
+  TYPE_CHANGED_KIND
+  """Type of the argument was changed."""
+  ARG_CHANGED_TYPE
+  """Argument was changed from nullable to non-nullable."""
+  ARG_CHANGED_TYPE_OPTIONAL_TO_REQUIRED
+  """A new value was added to the enum."""
+  VALUE_ADDED_TO_ENUM
+  """A new value was added to the enum."""
+  TYPE_ADDED_TO_UNION
+  """Type now implements the interface."""
+  TYPE_ADDED_TO_INTERFACE
+  """Default value added or changed for the argument."""
+  ARG_DEFAULT_VALUE_CHANGE
+  """Nullable argument was added to the field."""
+  OPTIONAL_ARG_ADDED
+  """Nullable field was added to the input type. (Deprecated.)"""
+  NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT
+  """Optional field was added to the input type."""
+  OPTIONAL_FIELD_ADDED_TO_INPUT_OBJECT
+  """Field was added to the type."""
+  FIELD_ADDED
+  """Type was added to the schema."""
+  TYPE_ADDED
+  """Enum was deprecated."""
+  ENUM_DEPRECATED
+  """Enum deprecation was removed."""
+  ENUM_DEPRECATION_REMOVED
+  """Reason for enum deprecation changed."""
+  ENUM_DEPRECATED_REASON_CHANGE
+  """Field was deprecated."""
+  FIELD_DEPRECATED
+  """Field deprecation removed."""
+  FIELD_DEPRECATION_REMOVED
+  """Reason for field deprecation changed."""
+  FIELD_DEPRECATED_REASON_CHANGE
+  """Description was added, removed, or updated for type."""
+  TYPE_DESCRIPTION_CHANGE
+  """Description was added, removed, or updated for field."""
+  FIELD_DESCRIPTION_CHANGE
+  """Description was added, removed, or updated for enum value."""
+  ENUM_VALUE_DESCRIPTION_CHANGE
+  """Description was added, removed, or updated for argument."""
+  ARG_DESCRIPTION_CHANGE
+  """Directive was removed."""
+  DIRECTIVE_REMOVED
+  """Argument to the directive was removed."""
+  DIRECTIVE_ARG_REMOVED
+  """Location of the directive was removed."""
+  DIRECTIVE_LOCATION_REMOVED
+  """Repeatable flag was removed for directive."""
+  DIRECTIVE_REPEATABLE_REMOVED
+  """Non-nullable argument added to directive."""
+  REQUIRED_DIRECTIVE_ARG_ADDED
+}
+
+"""
+Represents the tuple of static information
+about a particular kind of schema change.
+"""
+type ChangeDefinition {
+  code: ChangeCode!
+  defaultSeverity: ChangeSeverity!
+  category: ChangeCategory!
+}
+
+"""An addition made to a Studio variant's changelog after a launch."""
+type ChangelogLaunchResult {
+  createdAt: Timestamp!
+  schemaTagID: ID!
+}
+
+"""Info about a change in the context of an operation it affects"""
+type ChangeOnOperation {
+  """The semantic info about this change, i.e. info about the change that doesn't depend on the operation"""
+  semanticChange: SemanticChange!
+  """Human-readable explanation of the impact of this change on the operation"""
+  impact: String
+}
+
+enum ChangeSeverity {
+  FAILURE
+  NOTICE
+}
+
+"""
+Summary of the changes for a schema diff, computed by placing the changes into categories and then
 counting the size of each category. This categorization can be done in different ways, and
 accordingly there are multiple fields here for each type of categorization.
 
@@ -22,54 +321,1130 @@ Note that if an object or interface field is added/removed, there won't be any a
 changes generated for its arguments or @deprecated usages. If an enum type is added/removed, there
 will be addition/removal changes generated for its values, but not for those values' @deprecated
 usages. Description changes won't be generated for a schema element if that element (or an
-ancestor) was added/removed.""" type ChangeSummary{"""Counts for changes to fields of objects, input objects, and interfaces.""" field:FieldChangeSummaryCounts!"""Counts for all changes.""" total:TotalChangeSummaryCounts!"""Counts for changes to non-field aspects of objects, input objects, and interfaces,
-and all aspects of enums, unions, and scalars.""" type:TypeChangeSummaryCounts!}enum ChangeType{FAILURE NOTICE}type ChangelogLaunchResult{createdAt:Timestamp!schemaTagID:ID!}"""Destination for notifications""" interface Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!}interface ChannelSubscription{channels:[Channel!]!enabled:Boolean!id:ID!variant:String}type CheckConfiguration{"""Time when check configuration was created""" createdAt:Timestamp!"""Clients to ignore during validation""" excludedClients:[ClientFilter!]!"""Operation names to ignore during validation""" excludedOperationNames:[OperationNameFilter]"""Operations to ignore during validation""" excludedOperations:[ExcludedOperation!]!"""Graph that this check configuration belongs to""" graphID:ID!"""ID of the check configuration""" id:ID!"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean!"""Variant overrides for validation""" includedVariants:[String!]!"""Minimum number of requests within the window for an operation to be considered.""" operationCountThreshold:Int!"""Number of requests within the window for an operation to be considered, relative to
-total request count. Expected values are between 0 and 0.05 (minimum 5% of
-total request volume)""" operationCountThresholdPercentage:Float!"""Only check operations from the last <timeRangeSeconds> seconds.
-The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long!"""Time when check configuration was last updated""" updatedAt:Timestamp!"""Identity of the last user to update the check configuration""" updatedBy:Identity}"""Filter options available when listing checks.""" input CheckFilterInput{authors:[String!]branches:[String!]status:CheckFilterInputStatusOption subgraphs:[String!]}"""Options for filtering CheckWorkflows by status""" enum CheckFilterInputStatusOption{FAILED PASSED PENDING}"""The result of performing a subgraph check, including all steps.""" type CheckPartialSchemaResult{"""Overall result of the check. This will be null if composition validation was unsuccessful.""" checkSchemaResult:CheckSchemaResult """Result of compostion run as part of the overall subgraph check.""" compositionValidationResult:CompositionValidationResult!"""Whether any modifications were detected in the composed core schema.""" coreSchemaModified:Boolean!"""Check workflow associated with the overall subgraph check.""" workflow:CheckWorkflow}type CheckSchemaResult{"""Schema diff and affected operations generated by the schema check""" diffToPrevious:SchemaDiff!"""ID of the operations check that was created""" operationsCheckID:ID!"""Generated url to view schema diff in Engine""" targetUrl:String """Workflow associated with this check result""" workflow:CheckWorkflow}type CheckWorkflow{"""The variant provided as a base to check against.  Only the differences from the
-base schema will be tested in operations checks.""" baseVariant:GraphVariant completedAt:Timestamp createdAt:Timestamp!"""Contextual parameters supplied by the runtime environment where the check was run.""" gitContext:GitContext id:ID!"""The name of the implementing service that was responsible for triggering the validation.""" implementingServiceName:String """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String """Only true if the check was triggered from Sandbox Checks page.""" isSandboxCheck:Boolean!"""If this check was created by rerunning, the original check that was rerun.""" rerunOf:CheckWorkflow """Checks created by re-running this check, most recent first.""" reruns(limit:Int!=20):[CheckWorkflow!]startedAt:Timestamp """Overall status of the workflow, based on the underlying task statuses.""" status:CheckWorkflowStatus!"""The set of check tasks associated with this workflow, e.g. OperationsCheck, GraphComposition, etc.""" tasks:[CheckWorkflowTask!]!"""Identity of the user who ran this check""" triggeredBy:Identity """Configuration of validation at the time the check was run.""" validationConfig:SchemaDiffValidationConfig}type CheckWorkflowMutation{"""Re-run a check workflow using the current configuration. A new workflow is created and returned.""" rerun:CheckWorkflowRerunResult}type CheckWorkflowRerunResult{"""Check workflow created by re-running.""" result:CheckWorkflow """Check workflow that was rerun.""" source:CheckWorkflow}enum CheckWorkflowStatus{FAILED PASSED PENDING}interface CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!status:CheckWorkflowTaskStatus!"""The workflow that this task belongs to.""" workflow:CheckWorkflow!}enum CheckWorkflowTaskStatus{BLOCKED FAILED PASSED PENDING}"""Client filter configuration for a graph.""" type ClientFilter{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Options to filter by client reference ID, client name, and client version.
-If passing client version, make sure to either provide a client reference ID or client name.""" input ClientFilterInput{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Filter options to exclude by client reference ID, client name, and client version.""" input ClientInfoFilter{name:String """Ignored""" referenceID:ID version:String}"""Filter options to exclude clients. Used as an output type for SchemaDiffValidationConfig.""" type ClientInfoFilterOutput{name:String version:String}enum ComparisonOperator{EQUALS GREATER_THAN GREATER_THAN_OR_EQUAL_TO LESS_THAN LESS_THAN_OR_EQUAL_TO NOT_EQUALS UNRECOGNIZED}"""The result of composition run in the cloud, upon an attempted subgraph deletion.""" type CompositionAndRemoveResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Whether the removed implementing service existed.""" didExist:Boolean!"""  List of errors during composition. Errors mean that Apollo was unable to compose the
-  graph variant's subgraphs into a GraphQL schema. If present, gateways / routers
-are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!}"""The result of composition run in the cloud, upon attempted publish of a subgraph.""" type CompositionAndUpsertResult{"""The produced composition config, or null if there are any errors.""" compositionConfig:CompositionConfig """List of errors during composition. Errors mean that Apollo was unable to compose the
-graph variant's subgraphs into a supergraph schema. If present, gateways / routers
-are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""Copy text for the launch result of a publish.""" launchCliCopy:String """Link to corresponding launches page on Studio if available.""" launchUrl:String """List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!"""Whether a subgraph was created as part of this mutation.""" wasCreated:Boolean!"""Whether an implementingService was updated as part of this mutation""" wasUpdated:Boolean!}type CompositionBuildInput{subgraphs:[Subgraph!]!version:String}type CompositionCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the composition.""" result:CompositionResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}"""Composition configuration exposed to the gateway.""" type CompositionConfig{"""List of GCS links for implementing services that comprise a composed graph. Is empty if tag/inaccessible is enabled.""" implementingServiceLocations:[ImplementingServiceLocation!]!@deprecated(reason:"Soon we will stop writing to GCS locations")"""Hash of the API schema.""" schemaHash:String!}"""The result of composition run in the cloud.""" type CompositionPublishResult implements CompositionResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the
-graph variant's subgraphs into a supergraph schema. If present, gateways / routers
-are not updated.""" errors:[SchemaCompositionError!]!"""ID for a particular composition.""" graphCompositionID:ID!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph SDL generated by composition.""" supergraphSdl:GraphQLDocument """Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!webhookNotificationBody:String}"""Result of a composition, often as the result of a subgraph check or subgraph publish.
-See implementations for more details.""" interface CompositionResult{"""Supergraph SDL generated by composition (this is not the cSDL, a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the
-graph variant's subgraphs into a supergraph schema. If present, gateways / routers
-are not updated.""" errors:[SchemaCompositionError!]!"""Globally unique identifier for the composition.""" graphCompositionID:ID!"""List of subgraphs included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph SDL generated by composition.""" supergraphSdl:GraphQLDocument}type CompositionStatusSubscription implements ChannelSubscription{channels:[Channel!]!createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!variant:String}"""The composition config exposed to the gateway""" type CompositionValidationDetails{"""List of implementing service partial schemas that comprised the graph composed during validation""" implementingServices:[FederatedImplementingServicePartialSchema!]!"""Hash of the composed schema""" schemaHash:String}"""Metadata about the result of compositions validation run in the cloud, during a subgraph check.""" type CompositionValidationResult implements CompositionResult{"""Describes whether composition succeeded.""" compositionSuccess:Boolean!"""Akin to a composition config, represents the subgraph schemas and corresponding subgraphs that were used
-in running composition. Will be null if any errors are encountered. Also may contain a schema hash if
-one could be computed, which can be used for schema validation.""" compositionValidationDetails:CompositionValidationDetails """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the
-graph variant's subgraphs into a supergraph schema. If present, gateways / routers
-are not updated.""" errors:[SchemaCompositionError!]!"""ID that points to the results of this composition.""" graphCompositionID:ID!"""The implementing service that was responsible for triggering the validation""" proposedImplementingService:FederatedImplementingServicePartialSchema!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph schema document generated by composition.""" supergraphSdl:GraphQLDocument """If created as part of a check workflow, the associated workflow task.""" workflowTask:CompositionCheckTask}type ContractPreview{result:ContractPreviewResult!upstreamLaunch:Launch!}type ContractPreviewErrors{errors:[String!]!failedAt:ContractVariantFailedStep!}union ContractPreviewResult=ContractPreviewErrors|ContractPreviewSuccess type ContractPreviewSuccess{apiDocument:String!coreDocument:String!fieldCount:Int!typeCount:Int!}enum ContractVariantFailedStep{ADD_DIRECTIVE_DEFINITIONS_IF_NOT_PRESENT DIRECTIVE_DEFINITION_LOCATION_AUGMENTING EMPTY_ENUM_MASKING EMPTY_INPUT_OBJECT_MASKING EMPTY_OBJECT_AND_INTERFACE_FIELD_MASKING EMPTY_OBJECT_AND_INTERFACE_MASKING EMPTY_UNION_MASKING INPUT_VALIDATION PARSING PARSING_TAG_DIRECTIVES PARTIAL_INTERFACE_MASKING SCHEMA_RETRIEVAL TAG_INHERITING TAG_MATCHING TO_API_SCHEMA TO_FILTER_SCHEMA UNKNOWN VERSION_CHECK}type ContractVariantPreviewErrors{errorMessages:[String!]!failedStep:ContractVariantFailedStep!}union ContractVariantPreviewResult=ContractVariantPreviewErrors|ContractVariantPreviewSuccess type ContractVariantPreviewSuccess{baseApiSchema:String!baseCoreSchema:String!contractApiSchema:String!contractCoreSchema:String!}type ContractVariantUpsertErrors{errorMessages:[String!]!}union ContractVariantUpsertResult=ContractVariantUpsertErrors|ContractVariantUpsertSuccess type ContractVariantUpsertSuccess{contractVariant:GraphVariant!}type CoreSchema{apiDocument:GraphQLDocument!coreDocument:GraphQLDocument!coreHash:String!fieldCount:Int!tags:[String!]!typeCount:Int!}union CreateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type CronExecution{completedAt:Timestamp failure:String id:ID!job:CronJob!resolvedAt:Timestamp resolvedBy:Actor schedule:String!startedAt:Timestamp!}type CronJob{group:String!name:String!recentExecutions(n:Int):[CronExecution!]!}enum DatadogApiRegion{EU EU1 US US1 US1FED US3 US5}type DatadogMetricsConfig{apiKey:String!apiRegion:DatadogApiRegion!enabled:Boolean!legacyMetricNames:Boolean!}union DeleteOperationCollectionResult=DeleteOperationCollectionSuccess|PermissionError type DeleteOperationCollectionSuccess{sandboxOwner:User variants:[GraphVariant!]!}"""The result of attempting to delete a graph variant.""" type DeleteSchemaTagResult{"""WHether a variant was deleted or not.""" deleted:Boolean!}enum DeletionTargetType{ACCOUNT USER}"""Support for a single directive on a graph variant""" type DirectiveSupportStatus{"""whether the directive is supported on the current graph variant""" enabled:Boolean!"""name of the directive""" name:String!}union DuplicateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type DurationHistogram{averageDurationMs:Float buckets:[DurationHistogramBucket!]!durationMs("""Percentile (between 0 and 1)""" percentile:Float!):Float """Counts per durationBucket, where sequences of zeroes are replaced with the negative of their size""" sparseBuckets:[Long!]!totalCount:Long!totalDurationMs:Float!}type DurationHistogramBucket{count:Long!index:Int!rangeBeginMs:Float!rangeEndMs:Float!}input EdgeServerInfo{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""A unique identifier for the executable GraphQL served by the edge server. length must be <= 64 characters.""" executableSchemaId:String!"""The graph variant, defaults to 'current'""" graphVariant:String!="current" """The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""Columns of EdgeServerInfos.""" enum EdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type EdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in EdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input EdgeServerInfosFilter{and:[EdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:EdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:EdgeServerInfosFilter or:[EdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in EdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input EdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input EdgeServerInfosOrderBySpec{column:EdgeServerInfosColumn!direction:Ordering!}type EdgeServerInfosRecord{"""Dimensions of EdgeServerInfos that can be grouped by.""" groupBy:EdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}enum EmailCategory{EDUCATIONAL}type EmailPreferences{email:String!subscriptions:[EmailCategory!]!unsubscribedFromAll:Boolean!}interface Error{message:String!}"""Columns of ErrorStats.""" enum ErrorStatsColumn{ACCOUNT_ID CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type ErrorStatsDimensions{accountId:ID clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in ErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ErrorStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[ErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ErrorStatsFilterIn not:ErrorStatsFilter or:[ErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in ErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ErrorStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type ErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ErrorStatsOrderBySpec{column:ErrorStatsColumn!direction:Ordering!}type ErrorStatsRecord{"""Dimensions of ErrorStats that can be grouped by.""" groupBy:ErrorStatsDimensions!"""Metrics of ErrorStats that can be aggregated over.""" metrics:ErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}""" Input parameters for run explorer operation event.""" enum EventEnum{CLICK_CHECK_LIST CLICK_GO_TO_GRAPH_SETTINGS RUN_EXPLORER_OPERATION}"""Excluded operation for a graph.""" type ExcludedOperation{"""Operation ID to exclude from schema check.""" ID:ID!}"""Option to filter by operation ID.""" input ExcludedOperationInput{"""Operation ID to exclude from schema check.""" ID:ID!}type FeatureIntros{devGraph:Boolean!federatedGraph:Boolean!freeConsumerSeats:Boolean!}"""Feature Intros Input Type""" input FeatureIntrosInput{devGraph:Boolean federatedGraph:Boolean freeConsumerSeats:Boolean}"""Subgraph. Federated graph variants that are managed by Apollo Studio are composed of subgraphs.
-See https://www.apollographql.com/docs/federation/managed-federation/overview/ for more information.""" type FederatedImplementingService{"""The subgraph schema actively published, used for composition for the graph variant this subgraph belongs to.""" activePartialSchema:PartialSchema!"""Timestamp of when this subgraph was created.""" createdAt:Timestamp!"""The ID of the graph this subgraph belongs to.""" graphID:String!"""Which variant of a graph this subgraph belongs to.""" graphVariant:String!"""Name of the subgraph.""" name:String!"""The particular version/edition of a subgraph, entered by users. Typically a Git SHA or docker image ID.""" revision:String!"""Timestamp for when this subgraph was updated.""" updatedAt:Timestamp!"""URL of the subgraph's GraphQL endpoint.""" url:String}"""A minimal representation of a federated implementing service, using only a name and partial schema SDL""" type FederatedImplementingServicePartialSchema{"""The name of the implementing service""" name:String!"""The partial schema of the implementing service""" sdl:String!}"""Container for a list of subgraphs composing a graph.""" type FederatedImplementingServices{"""The list of underlying subgraphs.""" services:[FederatedImplementingService!]!}"""Counts of changes at the field level, including objects, interfaces, and input fields.""" type FieldChangeSummaryCounts{"""Number of changes that are additions of fields to object, interface, and input types.""" additions:Int!"""Number of changes that are field edits. This includes fields changing type and any field
-deprecation and description changes, but also includes any argument changes and any input object
-field changes.""" edits:Int!"""Number of changes that are removals of fields from object, interface, and input types.""" removals:Int!}"""Columns of FieldExecutions.""" enum FieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in FieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldExecutionsFilter{and:[FieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldExecutionsFilterIn not:FieldExecutionsFilter or:[FieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input FieldExecutionsOrderBySpec{column:FieldExecutionsColumn!direction:Ordering!}type FieldExecutionsRecord{"""Dimensions of FieldExecutions that can be grouped by.""" groupBy:FieldExecutionsDimensions!"""Metrics of FieldExecutions that can be aggregated over.""" metrics:FieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldLatencies.""" enum FieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldLatenciesFilter{and:[FieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldLatenciesFilterIn not:FieldLatenciesFilter or:[FieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input FieldLatenciesOrderBySpec{column:FieldLatenciesColumn!direction:Ordering!}type FieldLatenciesRecord{"""Dimensions of FieldLatencies that can be grouped by.""" groupBy:FieldLatenciesDimensions!"""Metrics of FieldLatencies that can be aggregated over.""" metrics:FieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldRequestsByClientVersion.""" enum FieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in FieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldRequestsByClientVersionFilter{and:[FieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldRequestsByClientVersionFilterIn not:FieldRequestsByClientVersionFilter or:[FieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input FieldRequestsByClientVersionOrderBySpec{column:FieldRequestsByClientVersionColumn!direction:Ordering!}type FieldRequestsByClientVersionRecord{"""Dimensions of FieldRequestsByClientVersion that can be grouped by.""" groupBy:FieldRequestsByClientVersionDimensions!"""Metrics of FieldRequestsByClientVersion that can be aggregated over.""" metrics:FieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldUsage.""" enum FieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldUsageFilter{and:[FieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldUsageFilterIn not:FieldUsageFilter or:[FieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input FieldUsageOrderBySpec{column:FieldUsageColumn!direction:Ordering!}type FieldUsageRecord{"""Dimensions of FieldUsage that can be grouped by.""" groupBy:FieldUsageDimensions!"""Metrics of FieldUsage that can be aggregated over.""" metrics:FieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type FilterBuildInput{filterConfig:FilterConfig!schemaHash:String!}type FilterConfig{exclude:[String!]!include:[String!]!}input FilterConfigInput{exclude:[String!]!include:[String!]!}type GitContext{branch:String commit:ID commitUrl:String committer:String message:String remoteHost:GitRemoteHost remoteUrl:String}"""This is stored with a schema when it is uploaded""" input GitContextInput{branch:String commit:ID committer:String message:String remoteUrl:String}enum GitRemoteHost{BITBUCKET GITHUB GITLAB}type GlobalExperimentalFeatures{operationsCollections:Boolean!sandboxesFullRelease:Boolean!sandboxesPreview:Boolean!sandboxesSchemaChecksPage:Boolean!sandboxesSchemaDiffPage:Boolean!subgraphsInSandbox:Boolean!}type GraphApiKey implements ApiKey{createdAt:Timestamp!createdBy:Identity id:ID!keyName:String role:UserPermission!token:String!}"""A union of all combinations that can comprise the implementingServices for a Service""" union GraphImplementors=FederatedImplementingServices|NonFederatedImplementingService scalar GraphQLDocument """A variant of a graph, often corresponding to an environment where a graph runs (e.g. staging).
-See https://www.apollographql.com/docs/studio/org/graphs/ for more details.""" type GraphVariant{"""As new schema tags keep getting published, activeSchemaPublish refers to the latest.""" activeSchemaPublish:SchemaTag """The version of composition currently in use, if applicable""" compositionVersion:String """Filter configuration used to create the contract schema""" contractFilterConfig:FilterConfig """Preview a Contract schema built from this source variant.""" contractPreview(filters:FilterConfigInput!):ContractPreview!defaultHeaders:String@deprecated(reason:"Use sharedHeaders instead")derivedVariantCount:Int!"""Graph the variant belongs to.""" graph:Service!"""Graph ID of the variant. Prefer using graph { id } when feasible.""" graphId:String!"""If the variant has managed subgraphs.""" hasManagedSubgraphs:Boolean """Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Represents whether this variant is a Contract.""" isContract:Boolean!"""Is this variant one of the current user's favorite variants?""" isFavoriteOfCurrentUser:Boolean!"""If the variant has managed subgraphs.""" isFederated:Boolean@deprecated(reason:"Replaced by hasManagedSubgraphs")"""If the variant is protected""" isProtected:Boolean!isPublic:Boolean!"""Represents whether this variant should be listed in the public variants directory. This can only be true if the variant is also public.""" isPubliclyListed:Boolean!"""Represents whether Apollo has verified the authenticity of this public variant. This can only be true if the variant is also public.""" isVerified:Boolean!"""Latest approved launch for the variant, and what is served through Uplink.""" latestApprovedLaunch:Launch """Latest launch for the variant, whether successful or not.""" latestLaunch:Launch """Latest publication for the variant.""" latestPublication:SchemaTag launch(id:ID!):Launch launchHistory(limit:Int!=100):[Launch!]!links:[LinkInfo!]"""Name of the variant, like `variant`.""" name:String!operationCollections:[OperationCollection!]!"""Which permissions the current user has for interacting with this variant""" permissions:GraphVariantPermissions!"""Generate a federated operation plan for a given operation""" plan(document:GraphQLDocument!operationName:String):QueryPlan """Explorer setting for preflight script to run before the actual GraphQL operations is run.""" preflightScript:String readme:Readme """Registry stats for this particular graph variant""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """The total number of requests for this variant in the last 24 hours""" requestsInLastDay:Long """If the graphql endpoint is set up to accept cookies.""" sendCookies:Boolean """Explorer setting for shared headers for a graph""" sharedHeaders:String sourceVariant:GraphVariant """Subgraph of a given name, null if non-existent.""" subgraph(name:ID!):FederatedImplementingService """List of subgraphs that comprise a variant, null if not federated.
-Set includeDeleted to see deleted subgraphs.""" subgraphs(includeDeleted:Boolean!=false):[FederatedImplementingService!]"""URL where subscription operations can be executed.""" subscriptionUrl:String """A list of supported directives""" supportedDirectives:[DirectiveSupportStatus!]"""URL where non-subscription operations can be executed.""" url:String """The last instant that usage information (e.g. operation stat, client stats) was reported for this variant""" usageLastReportedAt:Timestamp}"""Result of looking up a variant by ref""" union GraphVariantLookup=GraphVariant|InvalidRefFormat """Modifies a variant of a graph, also called a schema tag in parts of our product.""" type GraphVariantMutation{addLinkToVariant(title:String type:LinkInfoType!url:String!):GraphVariant!configureComposition(enableTagAndInaccessible:Boolean version:String):GraphVariant """Delete the variant.""" delete:DeleteSchemaTagResult!enableTagAndInaccessible(enabled:Boolean!):GraphVariant@deprecated(reason:"Use configureComposition instead")"""Graph ID of the variant""" graphId:String!"""Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Name of the variant, like `variant`.""" name:String!relaunch:RelaunchResult!removeLinkFromVariant(linkInfoId:ID!):GraphVariant!setIsFavoriteOfCurrentUser(favorite:Boolean!):GraphVariant!updateDefaultHeaders(defaultHeaders:String):GraphVariant@deprecated(reason:"Use updateSharedHeaders instead")updateIsProtected(isProtected:Boolean!):GraphVariant updatePreflightScript(preflightScript:String):GraphVariant updateSendCookies(sendCookies:Boolean!):GraphVariant updateSharedHeaders(sharedHeaders:String):GraphVariant updateSubscriptionURL(subscriptionUrl:String):GraphVariant updateURL(url:String):GraphVariant updateVariantIsPublic(isPublic:Boolean!):GraphVariant updateVariantIsPubliclyListed(isPubliclyListed:Boolean!):GraphVariant updateVariantIsVerified(isVerified:Boolean!):GraphVariant updateVariantReadme(readme:String!):GraphVariant}"""A map from permission String to boolean that the currently authenticated user is allowed for a particular graph variant.""" type GraphVariantPermissions{canCreateCollectionInVariant:Boolean!"""Whether the currently authenticated user is permitted to manage/update the build configuration (e.g. build pipeline version) for this variant.""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to update variant-level settings for the Schema Explorer.""" canManageExplorerSettings:Boolean!"""Whether the currently authenticated user is permitted to publish schemas to this variant.""" canPushSchemas:Boolean!"""Whether the currently authenticated user is permitted to view details regarding the build configuration (e.g. build pipeline version) for this variant.""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to download schemas associated to this variant.""" canQuerySchemas:Boolean!canShareCollectionInVariant:Boolean!canUpdateVariantLinkInfo:Boolean!"""Whether the currently authenticated user is permitted to update the README for this variant.""" canUpdateVariantReadme:Boolean!variantId:ID!}enum HTTPMethod{CONNECT DELETE GET HEAD OPTIONS PATCH POST PUT TRACE UNKNOWN UNRECOGNIZED}input HistoricQueryParameters{"""A list of clients to filter out during validation.""" excludedClients:[ClientInfoFilter!]=null """A list of operation names to filter out during validation.""" excludedOperationNames:[OperationNameFilterInput!]=null from:Timestamp="-86400" """A list of operation IDs to filter out during validation.""" ignoredOperations:[ID!]=null """A list of variants to include in the validation. If no variants are provided
-then this defaults to the "current" variant along with the base variant. The
-base variant indicates the schema that generates diff and marks the metrics that
-are checked for broken queries. We union this base variant with the untagged values('',
-same as null inside of `in`, and 'current') in this metrics fetch. This strategy
-supports users who have not tagged their metrics or schema.""" includedVariants:[String!]=null """Minimum number of requests within the window for a query to be considered.""" queryCountThreshold:Int=1 """Number of requests within the window for a query to be considered, relative to
-total request count. Expected values are between 0 and 0.05 (minimum 5% of total
-request volume)""" queryCountThresholdPercentage:Float=0 to:Timestamp="-0"}"""An identity (e.g. Anonymous, a specific User) within Apollo Studio. See implementations.""" interface Identity{"""A view of the identity as an Actor type.""" asActor:Actor!"""An identifier for a given identity, unique within the context of the identity type.""" id:ID!"""A human-readable name for the identity in question.""" name:String!}"""An actor's identity and info about the client they used to perform the action""" type IdentityAndClientInfo{"""Client name provided when the actor performed the action""" clientName:String """Client version provided when the actor performed the action""" clientVersion:String """Identity info about the actor""" identity:Identity}union IdentityMutation=ServiceMutation|UserMutation type IgnoreOperationsInChecksResult{graph:Service!}"""The location of the implementing service config file in storage""" type ImplementingServiceLocation{"""The name of the implementing service""" name:String!"""The path in storage to access the implementing service config file""" path:String!}type InternalAdminUser{role:InternalMdgAdminRole!userID:String!}type InternalIdentity implements Identity{accounts:[Account!]!asActor:Actor!email:String id:ID!name:String!}enum InternalMdgAdminRole{INTERNAL_MDG_READ_ONLY INTERNAL_MDG_SALES INTERNAL_MDG_SUPER_ADMIN INTERNAL_MDG_SUPPORT}type IntrospectionDirective{args:[IntrospectionInputValue!]!description:String locations:[IntrospectionDirectiveLocation!]!name:String!}input IntrospectionDirectiveInput{args:[IntrospectionInputValueInput!]!description:String isRepeatable:Boolean locations:[IntrospectionDirectiveLocation!]!name:String!}"""__DirectiveLocation introspection type""" enum IntrospectionDirectiveLocation{"""Location adjacent to an argument definition.""" ARGUMENT_DEFINITION """Location adjacent to an enum definition.""" ENUM """Location adjacent to an enum value definition.""" ENUM_VALUE """Location adjacent to a field.""" FIELD """Location adjacent to a field definition.""" FIELD_DEFINITION """Location adjacent to a fragment definition.""" FRAGMENT_DEFINITION """Location adjacent to a fragment spread.""" FRAGMENT_SPREAD """Location adjacent to an inline fragment.""" INLINE_FRAGMENT """Location adjacent to an input object field definition.""" INPUT_FIELD_DEFINITION """Location adjacent to an input object type definition.""" INPUT_OBJECT """Location adjacent to an interface definition.""" INTERFACE """Location adjacent to a mutation operation.""" MUTATION """Location adjacent to an object type definition.""" OBJECT """Location adjacent to a query operation.""" QUERY """Location adjacent to a scalar definition.""" SCALAR """Location adjacent to a schema definition.""" SCHEMA """Location adjacent to a subscription operation.""" SUBSCRIPTION """Location adjacent to a union definition.""" UNION """Location adjacent to a variable definition.""" VARIABLE_DEFINITION}"""Values associated with introspection result for an enum value""" type IntrospectionEnumValue{depreactionReason:String@deprecated(reason:"Use deprecationReason instead")deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""__EnumValue introspection type""" input IntrospectionEnumValueInput{deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""Values associated with introspection result for field""" type IntrospectionField{args:[IntrospectionInputValue!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionType!}"""__Field introspection type""" input IntrospectionFieldInput{args:[IntrospectionInputValueInput!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionTypeInput!}"""Values associated with introspection result for an input field""" type IntrospectionInputValue{defaultValue:String description:String name:String!type:IntrospectionType!}"""__Value introspection type""" input IntrospectionInputValueInput{defaultValue:String deprecationReason:String description:String isDeprecated:Boolean name:String!type:IntrospectionTypeInput!}type IntrospectionSchema{directives:[IntrospectionDirective!]!mutationType:IntrospectionType queryType:IntrospectionType!subscriptionType:IntrospectionType types(filter:TypeFilterConfig={includeAbstractTypes:true includeBuiltInTypes:true includeIntrospectionTypes:true}):[IntrospectionType!]!}"""__Schema introspection type""" input IntrospectionSchemaInput{description:String directives:[IntrospectionDirectiveInput!]!mutationType:IntrospectionTypeRefInput queryType:IntrospectionTypeRefInput!subscriptionType:IntrospectionTypeRefInput types:[IntrospectionTypeInput!]}"""Object containing all possible values for an introspectionType""" type IntrospectionType{"""the base kind of the type this references, ignoring lists and nullability""" baseKind:IntrospectionTypeKind description:String enumValues(includeDeprecated:Boolean=false):[IntrospectionEnumValue!]fields:[IntrospectionField!]inputFields:[IntrospectionInputValue!]interfaces:[IntrospectionType!]kind:IntrospectionTypeKind name:String ofType:IntrospectionType possibleTypes:[IntrospectionType!]"""printed representation of type, including nested nullability and list ofTypes""" printed:String!}"""__Type introspection type""" input IntrospectionTypeInput{description:String enumValues:[IntrospectionEnumValueInput!]fields:[IntrospectionFieldInput!]inputFields:[IntrospectionInputValueInput!]interfaces:[IntrospectionTypeInput!]kind:IntrospectionTypeKind!name:String ofType:IntrospectionTypeInput possibleTypes:[IntrospectionTypeInput!]specifiedByUrl:String}enum IntrospectionTypeKind{"""Indicates this type is an enum. 'enumValues' is a valid field.""" ENUM """Indicates this type is an input object. 'inputFields' is a valid field.""" INPUT_OBJECT """Indicates this type is an interface. 'fields' and 'possibleTypes' are valid
-fields""" INTERFACE """Indicates this type is a list. 'ofType' is a valid field.""" LIST """Indicates this type is a non-null. 'ofType' is a valid field.""" NON_NULL """Indicates this type is an object. 'fields' and 'interfaces' are valid fields.""" OBJECT """Indicates this type is a scalar.""" SCALAR """Indicates this type is a union. 'possibleTypes' is a valid field.""" UNION}"""Shallow __Type introspection type""" input IntrospectionTypeRefInput{kind:String name:String!}type InvalidOperation{errors:[OperationValidationError!]signature:ID!}"""Type returned by reference lookup when the reference was invalid""" type InvalidRefFormat implements Error{message:String!}type InvalidTarget implements Error{message:String!}type Invoice{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceState!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}enum InvoiceState{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}enum InvoiceStateV2{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}type InvoiceV2{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceStateV2!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}"""A Launch represents the complete process of making a set of updates to your deployed graph.""" type Launch{"""The time at which this launch was approved.""" approvedAt:Timestamp """The build for the variant being launched. Is non-null once the build is initiated.""" build:Build """Set of items that will be passed to the build.""" buildInput:BuildInput!"""The time at which this launch completed.""" completedAt:Timestamp """The time at which this launch initiated.""" createdAt:Timestamp!"""Contract launches that were triggered by this launch.""" downstreamLaunches:[Launch!]!"""The ID of the graph that this launch was initiated for.""" graphId:String!"""The name of the variant that this launch was initiated for.""" graphVariant:String!"""Unique identifier for this launch.""" id:ID!isAvailable:Boolean """Whether the launch completed.""" isCompleted:Boolean """Whether the launch was published.""" isPublished:Boolean isTarget:Boolean """Returns the most recent launch sequence step.""" latestSequenceStep:LaunchSequenceStep """A specific publication of a graph variant pertaining to this launch.""" publication:SchemaTag """The outcome of the launch.""" results:[LaunchResult!]!schemaTag:SchemaTag """This represents a sequence in the Launch. Returns a list of sequence steps that represents points of time in the launch.""" sequence:[LaunchSequenceStep!]!"""A shortened version of Launch.id. Contains the first 8 characters of the ID.""" shortenedID:String!"""The status of the launch.""" status:LaunchStatus!"""Changes that were made to the subgraphs for this launch.""" subgraphChanges:[SubgraphChange!]"""The time at which this launch was superseded by another launch.""" supersededAt:Timestamp """Represents the launch that caused this launch to not continue/publish.""" supersededBy:Launch """Upstream launch represents the launch of the source variant.""" upstreamLaunch:Launch}"""more result types will be supported in the future""" union LaunchResult=ChangelogLaunchResult type LaunchSequenceBuildStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCheckStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCompletedStep{completedAt:Timestamp}type LaunchSequenceInitiatedStep{startedAt:Timestamp}type LaunchSequencePublishStep{completedAt:Timestamp startedAt:Timestamp}union LaunchSequenceStep=LaunchSequenceBuildStep|LaunchSequenceCheckStep|LaunchSequenceCompletedStep|LaunchSequenceInitiatedStep|LaunchSequencePublishStep|LaunchSequenceSupersededStep type LaunchSequenceSupersededStep{completedAt:Timestamp}enum LaunchStatus{LAUNCH_COMPLETED LAUNCH_FAILED LAUNCH_INITIATED}type LinkInfo{createdAt:Timestamp!id:ID!title:String type:LinkInfoType!url:String!}enum LinkInfoType{DEVELOPER_PORTAL OTHER REPOSITORY}"""Long type""" scalar Long type MarkChangesForOperationAsSafeResult{"""Nice to have for the frontend since the Apollo cache is already watching for AffectedQuery to update.
-This might return null if no behavior changes were found for the affected operation ID.
-This is a weird situation that should never happen.""" affectedOperation:AffectedQuery message:String!success:Boolean!}type MediaUploadInfo{csrfToken:String!maxContentLength:Int!url:String!}union MoveOperationCollectionEntryResult=InvalidTarget|MoveOperationCollectionEntrySuccess|PermissionError type MoveOperationCollectionEntrySuccess{operation:OperationCollectionEntry!originCollection:OperationCollection!targetCollection:OperationCollection!}type Mutation{account(id:ID!):AccountMutation """Creates an operation collection for the given variantRefs, or make a sandbox collection without variantRefs.""" createOperationCollection(description:String editRoles:[UserPermission!]isSandbox:Boolean!isShared:Boolean!name:String!variantRefs:[ID!]):CreateOperationCollectionResult!"""Finalize a password reset with a token included in the E-mail link,
-returns the corresponding login email when successful""" finalizePasswordReset(newPassword:String!resetToken:String!):String """Mutation a graph.""" graph(id:ID!):ServiceMutation """Join an account with a token""" joinAccount(accountId:ID!joinToken:String!):Account me:IdentityMutation newAccount(companyUrl:String id:ID!):Account newService(accountId:ID!description:String hiddenFromUninvitedNonAdminAccountMembers:Boolean!=false id:ID!isDev:Boolean!=false name:String onboardingArchitecture:OnboardingArchitecture title:String):Service operationCollection(id:ID!):OperationCollectionMutation """Report a running GraphQL server's schema.""" reportSchema("""Only sent if previously requested i.e. received ReportSchemaResult with withCoreSchema = true. This is a GraphQL schema document as a string. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchema:String """Information about server and its schema.""" report:SchemaReport!):ReportSchemaResult """Ask for a user's password to be reset by E-mail""" resetPassword(email:String!):Void resolveAllInternalCronExecutions(group:String name:String):Void resolveInternalCronExecution(id:ID!):CronExecution service(id:ID!):ServiceMutation """Set the subscriptions for a given email""" setSubscriptions(email:String!subscriptions:[EmailCategory!]!token:String!):EmailPreferences """Set the studio settings for the current user""" setUserSettings(newSettings:UserSettingsInput):UserSettings signUp(email:String!fullName:String!password:String!referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """This is called by the form shown to users after they delete their user or organization account.""" submitPostDeletionFeedback(feedback:String!targetIdentifier:ID!targetType:DeletionTargetType!):Void """Mutation for basic engagement tracking in studio""" track(event:EventEnum!graphID:String!graphVariant:String!="current"):Void """Rover session tracking. Reserved to https://rover.apollo.dev/telemetry (https://github.com/apollographql/orbiter).""" trackRoverSession(anonymousId:ID!arguments:[RoverArgumentInput!]!ci:String command:String!cwdHash:SHA256!os:String!remoteUrlHash:SHA256 sessionId:ID!version:String!):Void """Unsubscribe a given email from all emails""" unsubscribeFromAll(email:String!token:String!):EmailPreferences user(id:ID!):UserMutation}type NamedIntrospectionArg{description:String name:String}type NamedIntrospectionArgNoDescription{name:String}"""The shared fields for a named introspection type. Currently this is returned for the
+ancestor) was added/removed.
+"""
+type ChangeSummary {
+  """
+  Counts for changes to non-field aspects of objects, input objects, and interfaces,
+  and all aspects of enums, unions, and scalars.
+  """
+  type: TypeChangeSummaryCounts!
+  """Counts for changes to fields of objects, input objects, and interfaces."""
+  field: FieldChangeSummaryCounts!
+  """Counts for all changes."""
+  total: TotalChangeSummaryCounts!
+}
+
+enum ChangeType {
+  FAILURE
+  NOTICE
+}
+
+"""Filter options available when listing checks."""
+input CheckFilterInput {
+  authors: [String!]
+  branches: [String!]
+  subgraphs: [String!]
+  status: CheckFilterInputStatusOption
+  variants: [String!]
+}
+
+"""Options for filtering CheckWorkflows by status"""
+enum CheckFilterInputStatusOption {
+  FAILED
+  PENDING
+  PASSED
+}
+
+"""The result of performing a subgraph check, including all steps."""
+type CheckPartialSchemaResult {
+  """Result of compostion run as part of the overall subgraph check."""
+  compositionValidationResult: CompositionCheckResult!
+  """Overall result of the check. This will be null if composition validation was unsuccessful."""
+  checkSchemaResult: CheckSchemaResult
+  """Whether any modifications were detected in the composed core schema."""
+  coreSchemaModified: Boolean!
+}
+
+"""The possible results of a request to initiate schema checks (either a success object or one of multiple `Error` objects)."""
+union CheckRequestResult = CheckRequestSuccess | InvalidInputError | PermissionError | PlanError
+
+"""Represents a successfully initiated execution of schema checks. This does not indicate the _result_ of the checks, only that they were initiated."""
+type CheckRequestSuccess {
+  """The URL of the Apollo Studio page for this check."""
+  targetURL: String!
+  """The unique ID for this execution of schema checks."""
+  workflowID: ID!
+}
+
+"""Input type to provide when running schema checks asynchronously for a non-federated graph."""
+input CheckSchemaAsyncInput {
+  """Configuration options for the check execution."""
+  config: HistoricQueryParametersInput!
+  """The GitHub context to associate with the check."""
+  gitContext: GitContextInput!
+  graphRef: ID @deprecated(reason: "This field is not required to be sent anymore")
+  """The URL of the GraphQL endpoint that Apollo Sandbox introspected to obtain the proposed schema. Required if `isSandbox` is `true`."""
+  introspectionEndpoint: String
+  """If `true`, the check was initiated by Apollo Sandbox."""
+  isSandbox: Boolean!
+  proposedSchemaDocument: String
+}
+
+"""The result of running schema checks on a graph variant."""
+type CheckSchemaResult {
+  """The schema diff and affected operations generated by the schema check."""
+  diffToPrevious: SchemaDiff!
+  """The URL to view the schema diff in Studio."""
+  targetUrl: String
+}
+
+type CheckWorkflow {
+  """
+  The variant provided as a base to check against. Only the differences from the
+  base schema will be tested in operations checks.
+  """
+  baseVariant: GraphVariant
+  """The timestamp when the check workflow completed."""
+  completedAt: Timestamp
+  id: ID!
+  """The name of the implementing service that was responsible for triggering the validation."""
+  implementingServiceName: String
+  """The timestamp when the check workflow started."""
+  startedAt: Timestamp
+  """Overall status of the workflow, based on the underlying task statuses."""
+  status: CheckWorkflowStatus!
+  """The set of check tasks associated with this workflow, e.g. composition, operations, etc."""
+  tasks: [CheckWorkflowTask!]!
+  """Contextual parameters supplied by the runtime environment where the check was run."""
+  gitContext: GitContext
+  createdAt: Timestamp!
+}
+
+enum CheckWorkflowStatus {
+  FAILED
+  PASSED
+  PENDING
+}
+
+interface CheckWorkflowTask {
+  completedAt: Timestamp
+  createdAt: Timestamp!
+  id: ID!
+  """
+  The status of this task. All tasks start with the PENDING status while initializing. If any
+   prerequisite task fails, then the task status becomes BLOCKED. Otherwise, if all prerequisite
+   tasks pass, then this task runs (still having the PENDING status). Once the task completes, the
+   task status will become either PASSED or FAILED.
+  """
+  status: CheckWorkflowTaskStatus!
+  """A studio UI url to view the details of this check workflow task"""
+  targetURL: String
+  """The workflow that this task belongs to."""
+  workflow: CheckWorkflow!
+}
+
+enum CheckWorkflowTaskStatus {
+  BLOCKED
+  FAILED
+  PASSED
+  PENDING
+}
+
+"""Filter options to exclude by client reference ID, client name, and client version."""
+input ClientInfoFilter {
+  name: String!
+  """Ignored"""
+  referenceID: ID
+  version: String
+}
+
+"""The result of supergraph composition that Studio performed in response to an attempted deletion of a subgraph."""
+type SubgraphRemovalResult {
+  """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated."""
+  errors: [SchemaCompositionError]!
+  """Whether this composition result resulted in a new supergraph schema passed to Uplink (`true`), or the build failed for any reason (`false`). For dry runs, this value is `true` if Uplink _would have_ been updated with the result."""
+  updatedGateway: Boolean!
+}
+
+"""The result of supergraph composition that Studio performed in response to an attempted publish of a subgraph."""
+type SubgraphPublicationResult {
+  """The generated composition config, or null if any errors occurred."""
+  compositionConfig: CompositionConfig
+  """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated."""
+  errors: [SchemaCompositionError]!
+  """Whether this composition result resulted in a new supergraph schema passed to Uplink (`true`), or the build failed for any reason (`false`). For dry runs, this value is `true` if Uplink _would have_ been updated with the result."""
+  updatedGateway: Boolean!
+  """Whether a new subgraph was created as part of this publish."""
+  wasCreated: Boolean!
+  """The URL of the Studio page for this update's associated launch, if available."""
+  launchUrl: String
+  """Human-readable text describing the launch result of the subgraph publish."""
+  launchCliCopy: String
+}
+
+type CompositionBuildInput {
+  subgraphs: [Subgraph!]!
+  version: String
+}
+
+type CompositionCheckTask implements CheckWorkflowTask {
+  completedAt: Timestamp
+  """
+  Whether the build's output supergraph core schema differs from that of the active publish for
+  the workflow's variant at the time this field executed (NOT at the time the check workflow
+  started).
+  """
+  coreSchemaModified: Boolean!
+  createdAt: Timestamp!
+  id: ID!
+  status: CheckWorkflowTaskStatus!
+  targetURL: String
+  workflow: CheckWorkflow!
+  """
+  An old version of buildResult that returns a very old GraphQL type that generally should be
+  avoided. This field will soon be deprecated.
+  """
+  result: CompositionResult
+}
+
+"""Composition configuration exposed to the gateway."""
+type CompositionConfig {
+  """The resulting API schema's SHA256 hash, represented as a hexadecimal string."""
+  schemaHash: String!
+}
+
+"""The result of supergraph composition that Studio performed."""
+type CompositionPublishResult implements CompositionResult {
+  """The unique ID for this instance of composition."""
+  graphCompositionID: ID!
+  """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated."""
+  errors: [SchemaCompositionError!]!
+  """The supergraph SDL generated by composition."""
+  supergraphSdl: GraphQLDocument
+}
+
+"""The result of supergraph composition performed by Apollo Studio, often as the result of a subgraph check or subgraph publish. See individual implementations for more details."""
+interface CompositionResult {
+  """The unique ID for this instance of composition."""
+  graphCompositionID: ID!
+  """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated."""
+  errors: [SchemaCompositionError!]!
+  """Supergraph SDL generated by composition."""
+  supergraphSdl: GraphQLDocument
+}
+
+"""The result of composition validation run by Apollo Studio during a subgraph check."""
+type CompositionCheckResult implements CompositionResult {
+  """The unique ID for this instance of composition."""
+  graphCompositionID: ID!
+  """A list of errors that occurred during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If any errors are present, gateways / routers are not updated."""
+  errors: [SchemaCompositionError!]!
+  """The supergraph schema document generated by composition."""
+  supergraphSdl: GraphQLDocument
+}
+
+type ContractVariantUpsertErrors {
+  """A list of all errors that occurred when attempting to create or update a contract variant."""
+  errorMessages: [String!]!
+}
+
+union ContractVariantUpsertResult = ContractVariantUpsertErrors | ContractVariantUpsertSuccess
+
+type ContractVariantUpsertSuccess {
+  """The updated contract variant"""
+  contractVariant: GraphVariant!
+  """Human-readable text describing the launch result of the contract update."""
+  launchCliCopy: String
+  """The URL of the Studio page for this update's associated launch, if available."""
+  launchUrl: String
+}
+
+"""Contains the supergraph and API schemas generated by composition."""
+type CoreSchema {
+  """The composed API schema document."""
+  apiDocument: GraphQLDocument!
+  """The composed supergraph schema document."""
+  coreDocument: GraphQLDocument!
+  """The supergraph schema document's SHA256 hash, represented as a hexadecimal string."""
+  coreHash: String!
+}
+
+union CreateOperationCollectionResult = OperationCollection | PermissionError | ValidationError
+
+"""
+Implement the DateTime<Utc> scalar
+
+The input/output is a string in RFC3339 format.
+"""
+scalar DateTime @specifiedBy(url: "https://datatracker.ietf.org/doc/html/rfc3339")
+
+union DeleteOperationCollectionResult = PermissionError
+
+"""The result of attempting to delete a graph variant."""
+type GraphVariantDeletionResult {
+  """Whether the variant was deleted or not."""
+  deleted: Boolean!
+}
+
+"""The result of a schema checks workflow that was run on a downstream variant as part of checks for the corresponding source variant. Most commonly, these downstream checks are [contract checks](https://www.apollographql.com/docs/studio/contracts#contract-checks)."""
+type DownstreamCheckResult {
+  """Whether the downstream check workflow blocks the upstream check workflow from completing."""
+  blocking: Boolean!
+  """The ID of the graph that the downstream variant belongs to."""
+  downstreamGraphID: String!
+  """The name of the downstream variant."""
+  downstreamVariantName: String!
+  """
+  The downstream checks workflow that this result corresponds to. This value is null
+  if the workflow hasn't been initialized yet, or if the downstream variant was deleted.
+  """
+  downstreamWorkflow: CheckWorkflow
+  """
+  Whether the downstream check workflow is causing the upstream check workflow to fail. This occurs
+  when the downstream check workflow is both blocking and failing. This may be null while the
+  downstream check workflow is pending.
+  """
+  failsUpstreamWorkflow: Boolean
+  """The downstream checks task that this result corresponds to."""
+  workflowTask: DownstreamCheckTask!
+}
+
+type DownstreamCheckTask implements CheckWorkflowTask {
+  completedAt: Timestamp
+  createdAt: Timestamp!
+  id: ID!
+  """
+  A list of results for all downstream checks triggered as part of the source variant's checks workflow.
+  This value is null if the task hasn't been initialized yet, or if the build task fails (the build task is a
+  prerequisite to this task). This value is _not_ null _while_ the task is running. The returned list is empty
+  if the source variant has no downstream variants.
+  """
+  results: [DownstreamCheckResult!]
+  status: CheckWorkflowTaskStatus!
+  targetURL: String
+  workflow: CheckWorkflow!
+}
+
+interface Error {
+  message: String!
+}
+
+"""A single subgraph in a supergraph. Every supergraph managed by Apollo Studio includes at least one subgraph. See https://www.apollographql.com/docs/federation/managed-federation/overview/ for more information."""
+type GraphVariantSubgraph {
+  """The subgraph's name."""
+  name: String!
+  """The URL of the subgraph's GraphQL endpoint."""
+  url: String
+  """The current user-provided version/edition of the subgraph. Typically a Git SHA or docker image ID."""
+  revision: String!
+  """The ID of the graph this subgraph belongs to."""
+  graphID: String!
+  """The name of the graph variant this subgraph belongs to."""
+  graphVariant: String!
+  """The subgraph's current active schema, used in supergraph composition for the the associated variant."""
+  activePartialSchema: SubgraphSchema!
+  """The timestamp when the subgraph was created."""
+  createdAt: Timestamp!
+  """The timestamp when the subgraph was most recently updated."""
+  updatedAt: Timestamp!
+}
+
+"""Container for a list of subgraphs composing a supergraph."""
+type GraphVariantSubgraphs {
+  """The list of underlying subgraphs."""
+  services: [GraphVariantSubgraph!]!
+}
+
+"""Counts of changes at the field level, including objects, interfaces, and input fields."""
+type FieldChangeSummaryCounts {
+  """Number of changes that are additions of fields to object, interface, and input types."""
+  additions: Int!
+  """Number of changes that are removals of fields from object, interface, and input types."""
+  removals: Int!
+  """
+  Number of changes that are field edits. This includes fields changing type and any field
+  deprecation and description changes, but also includes any argument changes and any input object
+  field changes.
+  """
+  edits: Int!
+}
+
+"""Inputs provided to the build for a contract variant, which filters types and fields from a source variant's schema."""
+type FilterBuildInput {
+  """Schema filtering rules for the build, such as tags to include or exclude from the source variant schema."""
+  filterConfig: FilterConfig!
+  """The source variant schema document's SHA256 hash, represented as a hexadecimal string."""
+  schemaHash: String!
+}
+
+type FilterCheckTask implements CheckWorkflowTask {
+  completedAt: Timestamp
+  createdAt: Timestamp!
+  id: ID!
+  status: CheckWorkflowTaskStatus!
+  targetURL: String
+  workflow: CheckWorkflow!
+}
+
+"""The filter configuration used to build a contract schema. The configuration consists of lists of tags for schema elements to include or exclude in the resulting schema."""
+type FilterConfig {
+  """Tags of schema elements to exclude from the contract schema."""
+  exclude: [String!]!
+  """Tags of schema elements to include in the contract schema."""
+  include: [String!]!
+}
+
+input FilterConfigInput {
+  """A list of tags for schema elements to exclude from the resulting contract schema."""
+  exclude: [String!]!
+  """
+  Whether to hide unreachable objects, interfaces, unions, inputs, enums and scalars from
+  the resulting contract schema. Defaults to `false`.
+  """
+  hideUnreachableTypes: Boolean! = false
+  """A list of tags for schema elements to include in the resulting contract schema."""
+  include: [String!]!
+}
+
+type GitContext {
+  commit: ID
+}
+
+"""Input type to provide when specifying the Git context for a run of schema checks."""
+input GitContextInput {
+  """The Git repository branch used in the check."""
+  branch: String
+  """The ID of the Git commit used in the check."""
+  commit: ID
+  """The username of the user who created the Git commit used in the check."""
+  committer: String
+  """The commit message of the Git commit used in the check."""
+  message: String
+  """The Git repository's remote URL."""
+  remoteUrl: String
+}
+
+"""
+Represents a graph API key, which has permissions scoped to a
+user role for a single Apollo graph.
+"""
+type GraphApiKey implements ApiKey {
+  """The timestamp when the API key was created."""
+  createdAt: Timestamp!
+  """Details of the user or graph that created the API key."""
+  createdBy: Identity
+  """The API key's ID."""
+  id: ID!
+  """The API key's name, for distinguishing it from other keys."""
+  keyName: String
+  """The permission level assigned to the API key upon creation."""
+  role: UserPermission!
+  """The value of the API key. **This is a secret credential!**"""
+  token: String!
+}
+
+"""A union of all containers that can comprise the components of a Studio graph"""
+union GraphImplementors = GraphVariantSubgraphs
+
+"""A GraphQL document, such as the definition of an operation or schema."""
+scalar GraphQLDocument
+
+"""A graph variant"""
+type GraphVariant {
+  """The variant's global identifier in the form `graphID@variant`."""
+  id: ID!
+  router: Router
+  """The filter configuration used to build a contract schema. The configuration consists of lists of tags for schema elements to include or exclude in the resulting schema."""
+  contractFilterConfig: FilterConfig
+  """
+  A human-readable description of the filter configuration of this contract variant, or null if this isn't a contract
+  variant.
+  """
+  contractFilterConfigDescription: String
+  """The graph that this variant belongs to."""
+  graph: Graph!
+  """Latest approved launch for the variant, and what is served through Uplink."""
+  latestApprovedLaunch: Launch
+  """Latest launch for the variant, whether successful or not."""
+  latestLaunch: Launch
+  """The variant's name (e.g., `staging`)."""
+  name: String!
+  """Which permissions the current user has for interacting with this variant"""
+  permissions: GraphVariantPermissions!
+  readme: Readme!
+  """The variant this variant is derived from. This property currently only exists on contract variants."""
+  sourceVariant: GraphVariant
+  """A list of the saved [operation collections](https://www.apollographql.com/docs/studio/explorer/operation-collections/) associated with this variant."""
+  operationCollections: [OperationCollection!]!
+  """The URL of the variant's GraphQL endpoint for query and mutation operations. For subscription operations, use `subscriptionUrl`."""
+  url: String
+  """The URL of the variant's GraphQL endpoint for subscription operations."""
+  subscriptionUrl: String
+  """The details of the variant's most recent publication."""
+  latestPublication: SchemaPublication
+  """A list of the subgraphs included in this variant. This value is null for non-federated variants. Set `includeDeleted` to `true` to include deleted subgraphs."""
+  subgraphs(includeDeleted: Boolean! = false): [GraphVariantSubgraph!]
+  """Returns the details of the subgraph with the provided `name`, or null if this variant doesn't include a subgraph with that name."""
+  subgraph(name: ID!): GraphVariantSubgraph
+}
+
+"""Result of looking up a variant by ref"""
+union GraphVariantLookup = GraphVariant | InvalidRefFormat
+
+"""Modifies a variant of a graph, also called a schema tag in parts of our product."""
+type GraphVariantMutation {
+  """
+  _Asynchronously_ kicks off operation checks for a proposed non-federated
+  schema change against its associated graph.
+
+  Returns a `CheckRequestSuccess` object with a workflow ID that you can use
+  to check status, or an error object if the checks workflow failed to start.
+  """
+  submitCheckSchemaAsync(input: CheckSchemaAsyncInput!): CheckRequestResult!
+  """
+  _Asynchronously_ kicks off composition and operation checks for a proposed subgraph schema change against its associated supergraph.
+
+  Returns a `CheckRequestSuccess` object with a workflow ID that you can use
+  to check status, or an error object if the checks workflow failed to start.
+  """
+  submitSubgraphCheckAsync(input: SubgraphCheckAsyncInput!): CheckRequestResult!
+  """Updates the [README](https://www.apollographql.com/docs/studio/org/graphs/#the-readme-page) of this variant."""
+  updateVariantReadme(
+    """The full new text of the README, as a Markdown-formatted string."""
+    readme: String!
+  ): GraphVariant
+  """Delete the variant."""
+  delete: GraphVariantDeletionResult!
+}
+
+"""Individual permissions for the current user when interacting with a particular Studio graph variant."""
+type GraphVariantPermissions {
+  """Whether the currently authenticated user is permitted to manage/update this variant's build configuration (e.g., build pipeline version)."""
+  canManageBuildConfig: Boolean!
+  """Whether the currently authenticated user is permitted to manage/update cloud routers"""
+  canManageCloudRouter: Boolean!
+  """Whether the currently authenticated user is permitted to update variant-level settings for the Apollo Studio Explorer."""
+  canManageExplorerSettings: Boolean!
+  """Whether the currently authenticated user is permitted to publish schemas to this variant."""
+  canPushSchemas: Boolean!
+  """Whether the currently authenticated user is permitted to view this variant's build configuration details (e.g., build pipeline version)."""
+  canQueryBuildConfig: Boolean!
+  """Whether the currently authenticated user is permitted to view details regarding cloud routers"""
+  canQueryCloudRouter: Boolean!
+  """Whether the currently authenticated user is permitted to view cloud router logs"""
+  canQueryCloudRouterLogs: Boolean!
+  """Whether the currently authenticated user is permitted to download schemas associated to this variant."""
+  canQuerySchemas: Boolean!
+  """Whether the currently authenticated user is permitted to update the README for this variant."""
+  canUpdateVariantReadme: Boolean!
+  canCreateCollectionInVariant: Boolean!
+  canShareCollectionInVariant: Boolean!
+}
+
+input HistoricQueryParameters {
+  from: String = "-86400"
+  to: String = "0"
+  """Minimum number of requests within the window for a query to be considered."""
+  queryCountThreshold: Int = 1
+  """
+  Number of requests within the window for a query to be considered, relative to
+  total request count. Expected values are between 0 and 0.05 (minimum 5% of total
+  request volume)
+  """
+  queryCountThresholdPercentage: Float = 0
+  """A list of operation IDs to filter out during validation."""
+  ignoredOperations: [ID!] = null
+  """A list of clients to filter out during validation."""
+  excludedClients: [ClientInfoFilter!] = null
+  """A list of operation names to filter out during validation."""
+  excludedOperationNames: [OperationNameFilterInput!] = null
+  """
+  A list of variants to include in the validation. If no variants are provided
+  then this defaults to the "current" variant along with the base variant. The
+  base variant indicates the schema that generates diff and marks the metrics that
+  are checked for broken queries. We union this base variant with the untagged values('',
+  same as null inside of `in`, and 'current') in this metrics fetch. This strategy
+  supports users who have not tagged their metrics or schema.
+  """
+  includedVariants: [String!] = null
+}
+
+"""Input type to provide when specifying configuration details for schema checks."""
+input HistoricQueryParametersInput {
+  """Clients to be excluded from check."""
+  excludedClients: [ClientInfoFilter!]
+  """Operations to be ignored in this schema check, specified by operation name."""
+  excludedOperationNames: [OperationNameFilterInput!]
+  """Start time for operations to be checked against. Specified as either a) an ISO formatted date/time string or b) a negative number of seconds relative to the time the check request was submitted."""
+  from: String
+  """Operations to be ignored in this schema check, specified by ID."""
+  ignoredOperations: [ID!]
+  """Graph variants to be included in check."""
+  includedVariants: [String!]
+  """Maximum number of queries to be checked against the change."""
+  queryCountThreshold: Int
+  """Only fail check if this percentage of operations would be negatively impacted."""
+  queryCountThresholdPercentage: Float
+  """End time for operations to be checked against. Specified as either a) an ISO formatted date/time string or b) a negative number of seconds relative to the time the check request was submitted."""
+  to: String
+}
+
+"""An identity (such as a `User` or `Graph`) in Apollo Studio. See implementing types for details."""
+interface Identity {
+  """Returns a representation of the identity as an `Actor` type."""
+  asActor: Actor!
+  """The identity's identifier, which is unique among objects of its type."""
+  id: ID!
+  """The identity's human-readable name."""
+  name: String!
+}
+
+type InternalIdentity implements Identity {
+  accounts: [Organization!]!
+  asActor: Actor!
+  email: String
+  id: ID!
+  name: String!
+}
+
+input IntrospectionDirectiveInput {
+  name: String!
+  description: String
+  locations: [IntrospectionDirectiveLocation!]!
+  args: [IntrospectionInputValueInput!]!
+  isRepeatable: Boolean
+}
+
+"""__DirectiveLocation introspection type"""
+enum IntrospectionDirectiveLocation {
+  """Location adjacent to a query operation."""
+  QUERY
+  """Location adjacent to a mutation operation."""
+  MUTATION
+  """Location adjacent to a subscription operation."""
+  SUBSCRIPTION
+  """Location adjacent to a field."""
+  FIELD
+  """Location adjacent to a fragment definition."""
+  FRAGMENT_DEFINITION
+  """Location adjacent to a fragment spread."""
+  FRAGMENT_SPREAD
+  """Location adjacent to an inline fragment."""
+  INLINE_FRAGMENT
+  """Location adjacent to a variable definition."""
+  VARIABLE_DEFINITION
+  """Location adjacent to a schema definition."""
+  SCHEMA
+  """Location adjacent to a scalar definition."""
+  SCALAR
+  """Location adjacent to an object type definition."""
+  OBJECT
+  """Location adjacent to a field definition."""
+  FIELD_DEFINITION
+  """Location adjacent to an argument definition."""
+  ARGUMENT_DEFINITION
+  """Location adjacent to an interface definition."""
+  INTERFACE
+  """Location adjacent to a union definition."""
+  UNION
+  """Location adjacent to an enum definition."""
+  ENUM
+  """Location adjacent to an enum value definition."""
+  ENUM_VALUE
+  """Location adjacent to an input object type definition."""
+  INPUT_OBJECT
+  """Location adjacent to an input object field definition."""
+  INPUT_FIELD_DEFINITION
+}
+
+"""__EnumValue introspection type"""
+input IntrospectionEnumValueInput {
+  name: String!
+  description: String
+  isDeprecated: Boolean!
+  deprecationReason: String
+}
+
+"""__Field introspection type"""
+input IntrospectionFieldInput {
+  name: String!
+  description: String
+  args: [IntrospectionInputValueInput!]!
+  type: IntrospectionTypeInput!
+  isDeprecated: Boolean!
+  deprecationReason: String
+}
+
+"""__Value introspection type"""
+input IntrospectionInputValueInput {
+  name: String!
+  description: String
+  type: IntrospectionTypeInput!
+  defaultValue: String
+  isDeprecated: Boolean
+  deprecationReason: String
+}
+
+"""__Schema introspection type"""
+input IntrospectionSchemaInput {
+  types: [IntrospectionTypeInput!]
+  queryType: IntrospectionTypeRefInput!
+  mutationType: IntrospectionTypeRefInput
+  subscriptionType: IntrospectionTypeRefInput
+  directives: [IntrospectionDirectiveInput!]!
+  description: String
+}
+
+"""__Type introspection type"""
+input IntrospectionTypeInput {
+  kind: IntrospectionTypeKind!
+  name: String
+  description: String
+  specifiedByUrl: String
+  fields: [IntrospectionFieldInput!]
+  interfaces: [IntrospectionTypeInput!]
+  possibleTypes: [IntrospectionTypeInput!]
+  enumValues: [IntrospectionEnumValueInput!]
+  inputFields: [IntrospectionInputValueInput!]
+  ofType: IntrospectionTypeInput
+}
+
+enum IntrospectionTypeKind {
+  """Indicates this type is a scalar."""
+  SCALAR
+  """Indicates this type is an object. 'fields' and 'interfaces' are valid fields."""
+  OBJECT
+  """
+  Indicates this type is an interface. 'fields' and 'possibleTypes' are valid
+  fields
+  """
+  INTERFACE
+  """Indicates this type is a union. 'possibleTypes' is a valid field."""
+  UNION
+  """Indicates this type is an enum. 'enumValues' is a valid field."""
+  ENUM
+  """Indicates this type is an input object. 'inputFields' is a valid field."""
+  INPUT_OBJECT
+  """Indicates this type is a list. 'ofType' is a valid field."""
+  LIST
+  """Indicates this type is a non-null. 'ofType' is a valid field."""
+  NON_NULL
+}
+
+"""Shallow __Type introspection type"""
+input IntrospectionTypeRefInput {
+  name: String!
+  kind: String
+}
+
+"""An error caused by providing invalid input for a task, such as schema checks."""
+type InvalidInputError {
+  """The error message."""
+  message: String!
+}
+
+"""This object is returned when a request to fetch a Studio graph variant provides an invalid graph ref."""
+type InvalidRefFormat implements Error {
+  message: String!
+}
+
+"""Represents the complete process of making a set of updates to a deployed graph variant."""
+type Launch {
+  """The unique identifier for this launch."""
+  id: ID!
+  """The ID of the launch's associated graph."""
+  graphId: String!
+  """The name of the launch's associated variant."""
+  graphVariant: String!
+  order: OrderOrError!
+  """The timestamp when the launch was approved."""
+  approvedAt: Timestamp
+  """The associated build for this launch (a build includes schema composition and contract filtering). This value is null until the build is initiated."""
+  build: Build
+  """The inputs provided to this launch's associated build, including subgraph schemas and contract filters."""
+  buildInput: BuildInput!
+  """The timestamp when the launch completed. This value is null until the launch completes."""
+  completedAt: Timestamp
+  """The timestamp when the launch was initiated."""
+  createdAt: Timestamp!
+  """Contract launches that were triggered by this launch."""
+  downstreamLaunches: [Launch!]!
+  """Whether the launch completed."""
+  isCompleted: Boolean
+  """Whether the result of the launch has been published to the associated graph and variant. This is always false for a failed launch."""
+  isPublished: Boolean
+  """The most recent launch sequence step that has started but not necessarily completed."""
+  latestSequenceStep: LaunchSequenceStep
+  """A specific publication of a graph variant pertaining to this launch."""
+  publication: SchemaPublication
+  """A list of results from the completed launch. The items included in this list vary depending on whether the launch succeeded, failed, or was superseded."""
+  results: [LaunchResult!]!
+  """Cloud router configuration associated with this build event. It will be non-null for any cloud-router variant, and null for any not cloudy variant/graph."""
+  routerConfig: String
+  """A list of all serial steps in the launch sequence. This list can change as the launch progresses. For example, a `LaunchCompletedStep` is appended after a launch completes."""
+  sequence: [LaunchSequenceStep!]!
+  """A shortened version of `Launch.id` that includes only the first 8 characters."""
+  shortenedID: String!
+  """The launch's status. If a launch is superseded, its status remains `LAUNCH_INITIATED`. To check for a superseded launch, use `supersededAt`."""
+  status: LaunchStatus!
+  """A list of subgraph changes that are included in this launch."""
+  subgraphChanges: [SubgraphChange!]
+  """The timestamp when this launch was superseded by another launch. If an active launch is superseded, it terminates."""
+  supersededAt: Timestamp
+  """The launch that superseded this launch, if any. If an active launch is superseded, it terminates."""
+  supersededBy: Launch
+  """The source variant launch that caused this launch to be initiated. This value is present only for contract variant launches. Otherwise, it's null."""
+  upstreamLaunch: Launch
+}
+
+"""Types of results that can be associated with a `Launch`"""
+union LaunchResult = ChangelogLaunchResult
+
+"""The timing details for the build step of a launch."""
+type LaunchSequenceBuildStep {
+  """The timestamp when the step completed."""
+  completedAt: Timestamp
+  """The timestamp when the step started."""
+  startedAt: Timestamp
+}
+
+"""The timing details for the checks step of a launch."""
+type LaunchSequenceCheckStep {
+  """The timestamp when the step completed."""
+  completedAt: Timestamp
+  """The timestamp when the step started."""
+  startedAt: Timestamp
+}
+
+"""The timing details for the completion step of a launch."""
+type LaunchSequenceCompletedStep {
+  """The timestamp when the step (and therefore the launch) completed."""
+  completedAt: Timestamp
+}
+
+"""The timing details for the initiation step of a launch."""
+type LaunchSequenceInitiatedStep {
+  """The timestamp when the step (and therefore the launch) started."""
+  startedAt: Timestamp
+}
+
+"""The timing details for the publish step of a launch."""
+type LaunchSequencePublishStep {
+  """The timestamp when the step completed."""
+  completedAt: Timestamp
+  """The timestamp when the step started."""
+  startedAt: Timestamp
+}
+
+"""Represents the various steps that occur in sequence during a single launch."""
+union LaunchSequenceStep = LaunchSequenceBuildStep | LaunchSequenceCheckStep | LaunchSequenceCompletedStep | LaunchSequenceInitiatedStep | LaunchSequencePublishStep | LaunchSequenceSupersededStep
+
+"""The timing details for the superseded step of a launch. This step occurs only if the launch is superseded by another launch."""
+type LaunchSequenceSupersededStep {
+  """The timestamp when the step completed, thereby ending the execution of this launch in favor of the superseding launch."""
+  completedAt: Timestamp
+}
+
+enum LaunchStatus {
+  LAUNCH_COMPLETED
+  LAUNCH_FAILED
+  LAUNCH_INITIATED
+}
+
+enum LogLevel {
+  WARN
+  INFO
+  ERROR
+  DEBUG
+}
+
+type LogMessage {
+  """Timestamp in UTC"""
+  timestamp: DateTime!
+  """Log message contents"""
+  message: String!
+  """Log level"""
+  level: LogLevel!
+}
+
+type Mutation {
+  """Provides access to mutation fields for modifying a Studio graph with the provided ID."""
+  graph(id: ID!): GraphMutation
+  """
+  Provides access to mutation fields for modifying an Apollo user with the
+  provided ID.
+  """
+  user(id: ID!): UserMutation
+  """Creates an [operation collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/) for a given variant, or creates a [sandbox collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/#sandbox-collections) without an associated variant."""
+  createOperationCollection(
+    """The collection's description."""
+    description: String
+    """Whether the collection is a [sandbox collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/#sandbox-collections)."""
+    isSandbox: Boolean!
+    """Whether the collection is shared across its associated organization."""
+    isShared: Boolean!
+    """The minimum role a user needs to edit this collection. Valid values: null, CONSUMER, OBSERVER, DOCUMENTER, CONTRIBUTOR, GRAPH_ADMIN. This value is ignored if `isShared` is `false`. The default value is  `GRAPH_ADMIN`."""
+    minEditRole: UserPermission
+    """The collection's name."""
+    name: String!
+    """The [graph ref](https://www.apollographql.com/docs/rover/conventions/#graph-refs) of the graph variants to associate the collection with."""
+    variantRefs: [ID!]
+  ): CreateOperationCollectionResult!
+  operationCollection(id: ID!): OperationCollectionMutation
+}
+
+"""
+ISO 8601 combined date and time without timezone.
+
+# Examples
+
+* `2015-07-01T08:59:60.123`,
+"""
+scalar NaiveDateTime
+
+type NamedIntrospectionArg {
+  name: String
+  description: String
+}
+
+"""
+The shared fields for a named introspection type. Currently this is returned for the
 top level value affected by a change. In the future, we may update this
 type to be an interface, which is extended by the more specific types:
 scalar, object, input object, union, interface, and enum
 
 For an in-depth look at where these types come from, see:
-https://github.com/DefinitelyTyped/DefinitelyTyped/blob/659eb50d3/types/graphql/utilities/introspectionQuery.d.ts#L31-L37""" type NamedIntrospectionType{description:String kind:IntrospectionTypeKind name:String}type NamedIntrospectionTypeNoDescription{name:String}"""Introspection values that can be children of other types for changes, such
+https://github.com/DefinitelyTyped/DefinitelyTyped/blob/659eb50d3/types/graphql/utilities/introspectionQuery.d.ts#L31-L37
+"""
+type NamedIntrospectionType {
+  kind: IntrospectionTypeKind
+  name: String
+  description: String
+}
+
+"""
+Introspection values that can be children of other types for changes, such
 as input fields, objects in interfaces, enum values. In the future, this
 value could become an interface to allow fields specific to the types
-returned.""" type NamedIntrospectionValue{description:String name:String printedType:String}type NamedIntrospectionValueNoDescription{name:String printedType:String}"""A non-federated service for a monolithic graph.""" type NonFederatedImplementingService{"""Timestamp of when this implementing service was created.""" createdAt:Timestamp!"""Identifies which graph this non-implementing service belongs to.
-Formerly known as "service_id".""" graphID:String!"""Specifies which variant of a graph this implementing service belongs to".
-Formerly known as "tag".""" graphVariant:String!}type NotFoundError implements Error{message:String!}"""Arbitrary JSON object""" scalar Object type OdysseyAttempt{completedAt:Timestamp id:ID!responses:[OdysseyResponse!]!startedAt:Timestamp!testId:String!}type OdysseyCertification{certificationId:String!earnedAt:Timestamp!id:ID!owner:OdysseyCertificationOwner}type OdysseyCertificationOwner{fullName:String!id:ID!}type OdysseyCourse{completedAt:Timestamp enrolledAt:Timestamp id:ID!}input OdysseyCourseInput{completedAt:Timestamp courseId:String!}type OdysseyResponse{correct:Boolean!id:ID!questionId:String!values:[OdysseyValue!]!}input OdysseyResponseInput{attemptId:ID!correct:Boolean!questionId:String!values:[String!]!}type OdysseyTask{completedAt:Timestamp id:ID!value:String}input OdysseyTaskInput{completedAt:Timestamp taskId:String!value:String}type OdysseyValue{id:ID!value:String!}enum OnboardingArchitecture{MONOLITH SUPERGRAPH}type Operation{id:ID!name:String signature:String truncated:Boolean!}type OperationAcceptedChange{acceptedAt:Timestamp!acceptedBy:Identity!change:StoredApprovedChange!checkID:ID!graphID:ID!id:ID!operationID:String!}"""Columns of OperationCheckStats.""" enum OperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type OperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in OperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input OperationCheckStatsFilter{and:[OperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:OperationCheckStatsFilterIn not:OperationCheckStatsFilter or:[OperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in OperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input OperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type OperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input OperationCheckStatsOrderBySpec{column:OperationCheckStatsColumn!direction:Ordering!}type OperationCheckStatsRecord{"""Dimensions of OperationCheckStats that can be grouped by.""" groupBy:OperationCheckStatsDimensions!"""Metrics of OperationCheckStats that can be aggregated over.""" metrics:OperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type OperationCollection{createdAt:Timestamp!createdBy:Identity description:String """If a user has any of these roles, they will be able to edit this
-collection. This will be null if and only if \`isShared\` is false""" editRoles:[UserPermission!]@deprecated(reason:"deprecated in favour of minEditRole")id:ID!isFavorite:Boolean!isSandbox:Boolean!isShared:Boolean!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity minEditRole:UserPermission name:String!operation(id:ID!):OperationCollectionEntryResult operations:[OperationCollectionEntry!]!""" Permissions the current user has for this collection""" permissions:OperationCollectionPermissions!variants:[GraphVariant!]!}type OperationCollectionEntry{collection:OperationCollection!createdAt:Timestamp!createdBy:Identity currentOperationRevision:OperationCollectionEntryState!id:ID!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity name:String!orderingIndex:String!}type OperationCollectionEntryMutation{moveToCollection(collectionId:ID!lowerOrderingBound:String upperOrderingBound:String):MoveOperationCollectionEntryResult!reorderEntry(lowerOrderingBound:String upperOrderingBound:String):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionEntryResult updateValues(operationInput:OperationCollectionEntryStateInput!):UpdateOperationCollectionEntryResult}union OperationCollectionEntryMutationResult=NotFoundError|OperationCollectionEntryMutation|PermissionError union OperationCollectionEntryResult=NotFoundError|OperationCollectionEntry type OperationCollectionEntryState{body:String!createdAt:Timestamp!createdBy:Identity headers:[OperationHeader!]variables:String}input OperationCollectionEntryStateInput{body:String!headers:[OperationHeaderInput!]""" I'm assuming this is non null""" variables:String}type OperationCollectionMutation{addOperation(name:String!operationInput:OperationCollectionEntryStateInput!):AddOperationCollectionEntryResult addToVariant(variantRef:ID!):AddOperationCollectionToVariantResult!@deprecated(reason:"Will throw NotImplemented")delete:DeleteOperationCollectionResult deleteOperation(id:ID!):RemoveOperationCollectionEntryResult duplicateCollection(description:String isSandbox:Boolean!isShared:Boolean!name:String!variantRef:ID):DuplicateOperationCollectionResult!operation(id:ID!):OperationCollectionEntryMutationResult removeFromVariant(variantRef:ID!):RemoveOperationCollectionFromVariantResult!@deprecated(reason:"Will throw NotImplemented")setMinEditRole(editRole:UserPermission):UpdateOperationCollectionResult updateDescription(description:String):UpdateOperationCollectionResult updateEditRoles(editRoles:[UserPermission!]!):UpdateOperationCollectionResult@deprecated(reason:"Deprecated in favour of setMinEditRole")updateIsFavorite(isFavorite:Boolean!):UpdateOperationCollectionResult updateIsShared(isShared:Boolean!):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionResult}type OperationCollectionPermissions{canEditOperations:Boolean!canManage:Boolean!canReadOperations:Boolean!}union OperationCollectionResult=NotFoundError|OperationCollection|PermissionError type OperationDocument{"""Operation document body""" body:String!"""Operation name""" name:String}input OperationDocumentInput{"""Operation document body""" body:String!"""Operation name""" name:String}type OperationHeader{name:String!value:String!}input OperationHeaderInput{name:String!value:String!}"""Operation name filter configuration for a graph.""" type OperationNameFilter{"""name of the operation by the user and reported alongside metrics""" name:String!}"""Options to filter by operation name.""" input OperationNameFilterInput{"""name of the operation set by the user and reported alongside metrics""" name:String!}type OperationValidationError{message:String!}type OperationsCheckResult{"""Operations affected by all changes in diff""" affectedQueries:[AffectedQuery!]"""Summary/counts for all changes in diff""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations""" changes:[Change!]!"""Indication of the success of the change, either failure, warning, or notice.""" checkSeverity:ChangeSeverity!"""The variant that was used as a base to check against""" checkedVariant:GraphVariant!createdAt:Timestamp!id:ID!"""Number of affected query operations that are neither marked as SAFE or IGNORED""" numberOfAffectedOperations:Int!"""Number of operations that were validated during schema diff""" numberOfCheckedOperations:Int!workflowTask:OperationsCheckTask!}type OperationsCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the check.""" result:OperationsCheckResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}enum Ordering{ASCENDING DESCENDING}"""A reusable invite link for an organization.""" type OrganizationInviteLink{createdAt:Timestamp!"""A joinToken that can be passed to Mutation.joinAccount to join the organization.""" joinToken:String!"""The role that the user will receive if they join the organization with this link.""" role:UserPermission!}type OrganizationSSO{defaultRole:UserPermission!idpid:ID!provider:OrganizationSSOProvider!}enum OrganizationSSOProvider{PINGONE}"""PagerDuty notification channel""" type PagerDutyChannel implements Channel{id:ID!name:String!routingKey:String!subscriptions:[ChannelSubscription!]!}"""PagerDuty notification channel parameters""" input PagerDutyChannelInput{name:String routingKey:String!}"""Schema for a subgraph with associated metadata""" type PartialSchema{"""Timestamp for when the partial schema was created""" createdAt:Timestamp!"""If this sdl is currently actively composed in the gateway, this is true""" isLive:Boolean!"""The GraphQL document for a subgraph schema.""" sdl:String!"""The path of deep storage to find the raw enriched partial schema file""" sdlPath:String!}"""Input for registering a partial schema to an implementing service.
+returned.
+"""
+type NamedIntrospectionValue {
+  name: String
+  description: String
+  printedType: String
+}
+
+"""An error that occurs when a requested object is not found."""
+type NotFoundError implements Error {
+  """The error message."""
+  message: String!
+}
+
+"""A list of saved GraphQL operations."""
+type OperationCollection {
+  """The timestamp when the collection was created."""
+  createdAt: Timestamp!
+  """The user or other entity that created the collection."""
+  createdBy: Identity
+  """The collection's description. A `null` description was never set, and empty string description was set to be empty string by a user, or other entity."""
+  description: String
+  id: ID!
+  """Whether the current user has marked the collection as a favorite."""
+  isFavorite: Boolean!
+  """Whether the collection is a [sandbox collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/#sandbox-collections)."""
+  isSandbox: Boolean!
+  """Whether the collection is shared across its associated organization."""
+  isShared: Boolean!
+  """The timestamp when the collection was most recently updated."""
+  lastUpdatedAt: Timestamp!
+  """The user or other entity that most recently updated the collection."""
+  lastUpdatedBy: Identity
+  """The minimum role a user needs to edit this collection. Valid values: null, CONSUMER, OBSERVER, DOCUMENTER, CONTRIBUTOR, GRAPH_ADMIN. This value is always `null` if `isShared` is `false`. If `null` when `isShared` is `true`, the minimum role is `GRAPH_ADMIN`."""
+  minEditRole: UserPermission
+  """The collection's name."""
+  name: String!
+  """Returns the operation in the collection with the specified ID, if any."""
+  operation(id: ID!): OperationCollectionEntryResult
+  """A list of the GraphQL operations that belong to the collection."""
+  operations: [OperationCollectionEntry!]!
+  """The permissions that the current user has for the collection."""
+  permissions: OperationCollectionPermissions!
+}
+
+"""A saved operation entry within an Operation Collection."""
+type OperationCollectionEntry {
+  """The timestamp when the entry was created."""
+  createdAt: Timestamp!
+  """The user or other entity that created the entry."""
+  createdBy: Identity
+  """Details of the entry's associated operation, such as its `body` and `variables`."""
+  currentOperationRevision: OperationCollectionEntryState!
+  id: ID!
+  """The timestamp when the entry was most recently updated."""
+  lastUpdatedAt: Timestamp!
+  """The user or other entity that most recently updated the entry."""
+  lastUpdatedBy: Identity
+  """The entry's name."""
+  name: String!
+  """The entry's lexicographical ordering index within its containing collection."""
+  orderingIndex: String!
+}
+
+"""Provides fields for modifying an operation in a collection."""
+type OperationCollectionEntryMutation {
+  """Updates the name of an operation."""
+  updateName(name: String!): UpdateOperationCollectionEntryResult
+  """Updates the body, headers, and/or variables of an operation."""
+  updateValues(operationInput: OperationCollectionEntryStateInput!): UpdateOperationCollectionEntryResult
+}
+
+union OperationCollectionEntryMutationResult = NotFoundError | OperationCollectionEntryMutation | PermissionError
+
+"""Possible return values when querying for an entry in an operation collection (either the entry object or an `Error` object)."""
+union OperationCollectionEntryResult = NotFoundError | OperationCollectionEntry
+
+"""The most recent body, variable and header values of a saved operation entry."""
+type OperationCollectionEntryState {
+  """The raw body of the entry's GraphQL operation."""
+  body: String!
+  """Headers for the entry's GraphQL operation."""
+  headers: [OperationHeader!]
+  """Variables for the entry's GraphQL operation, as a JSON string."""
+  variables: String
+}
+
+"""Fields for creating or modifying an operation collection entry."""
+input OperationCollectionEntryStateInput {
+  """The operation's query body."""
+  body: String!
+  """The operation's headers."""
+  headers: [OperationHeaderInput!]
+  """The operation's variables."""
+  variables: String
+}
+
+"""Provides fields for modifying an [operation collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/)."""
+type OperationCollectionMutation {
+  """Adds an operation to this collection."""
+  addOperation(name: String!, operationInput: OperationCollectionEntryStateInput!): AddOperationCollectionEntryResult
+  """Adds operations to this collection."""
+  addOperations(operations: [AddOperationInput!]!): AddOperationCollectionEntriesResult
+  """Deletes this operation collection. This also deletes all of the collection's associated operations."""
+  delete: DeleteOperationCollectionResult
+  """Deletes an operation from this collection."""
+  deleteOperation(id: ID!): RemoveOperationCollectionEntryResult
+  operation(id: ID!): OperationCollectionEntryMutationResult
+  """Updates the minimum role a user needs to be able to modify this collection."""
+  setMinEditRole(editRole: UserPermission): UpdateOperationCollectionResult
+  """Updates this collection's description."""
+  updateDescription(description: String): UpdateOperationCollectionResult
+  """Updates whether the current user has marked this collection as a favorite."""
+  updateIsFavorite(isFavorite: Boolean!): UpdateOperationCollectionResult
+  """Updates whether this collection is shared across its associated organization."""
+  updateIsShared(isShared: Boolean!): UpdateOperationCollectionResult
+  """Updates this operation collection's name."""
+  updateName(name: String!): UpdateOperationCollectionResult
+}
+
+"""Whether the current user can perform various actions on the associated collection."""
+type OperationCollectionPermissions {
+  """Whether the current user can edit operations in the associated collection."""
+  canEditOperations: Boolean!
+  """Whether the current user can delete or update the associated collection's metadata, such as its name and description."""
+  canManage: Boolean!
+  """Whether the current user can read operations in the associated collection."""
+  canReadOperations: Boolean!
+}
+
+union OperationCollectionResult = NotFoundError | OperationCollection | PermissionError | ValidationError
+
+"""Saved headers on a saved operation."""
+type OperationHeader {
+  """The header's name."""
+  name: String!
+  """The header's value."""
+  value: String!
+}
+
+input OperationHeaderInput {
+  """The header's name."""
+  name: String!
+  """The header's value."""
+  value: String!
+}
+
+"""Options to filter by operation name."""
+input OperationNameFilterInput {
+  """name of the operation set by the user and reported alongside metrics"""
+  name: String!
+  version: String
+}
+
+type OperationsCheckResult {
+  id: ID!
+  """Indication of the success of the change, either failure, warning, or notice."""
+  checkSeverity: ChangeSeverity!
+  """Number of operations that were validated during schema diff"""
+  numberOfCheckedOperations: Int!
+  """List of schema changes with associated affected clients and operations"""
+  changes: [Change!]!
+  """Summary/counts for all changes in diff"""
+  changeSummary: ChangeSummary!
+  """Operations affected by all changes in diff"""
+  affectedQueries: [AffectedQuery!]
+  """Number of affected query operations that are neither marked as SAFE or IGNORED"""
+  numberOfAffectedOperations: Int!
+  createdAt: Timestamp!
+}
+
+type OperationsCheckTask implements CheckWorkflowTask {
+  completedAt: Timestamp
+  createdAt: Timestamp!
+  id: ID!
+  status: CheckWorkflowTaskStatus!
+  targetURL: String
+  workflow: CheckWorkflow!
+  """
+  The result of the operations check. This will be null when the task is initializing or running,
+  or when the build task fails (which is a prerequisite task to this one).
+  """
+  result: OperationsCheckResult
+}
+
+type Order {
+  id: ID!
+  orderType: OrderType!
+  status: OrderStatus!
+  reason: String
+  logs(first: Int, offset: Int): [LogMessage!]!
+  router: Router!
+}
+
+union OrderOrError = Order
+
+enum OrderStatus {
+  PENDING
+  COMPLETED
+  ROLLING_BACK
+  ERRORED
+  SUPERSEDED
+}
+
+enum OrderType {
+  CREATE_ROUTER
+  DESTROY_ROUTER
+  UPDATE_ROUTER
+}
+
+"""The schema for a single published subgraph in Studio."""
+type SubgraphSchema {
+  """The subgraph schema document as SDL."""
+  sdl: String!
+}
+
+"""
+Input for registering a partial schema to an implementing service.
 One of the fields must be specified (validated server-side).
 
 If a new partialSchemaSDL is passed in, this operation will store it before
@@ -77,96 +1452,529 @@ creating the association.
 
 If both the sdl and hash are specified, an error will be thrown if the provided
 hash doesn't match our hash of the sdl contents. If the sdl field is specified,
-the hash does not need to be and will be computed server-side.""" input PartialSchemaInput{"""Hash of the partial schema to associate; error is thrown if only the hash is
-specified and the hash has not been seen before""" hash:String """Contents of the partial schema in SDL syntax, but may reference types
-that aren't defined in this document""" sdl:String}type PermissionError implements Error{message:String!}type PromoteSchemaError{code:PromoteSchemaErrorCode!message:String!}enum PromoteSchemaErrorCode{CANNOT_PROMOTE_SCHEMA_FOR_FEDERATED_GRAPH}type PromoteSchemaResponse{code:PromoteSchemaResponseCode!tag:SchemaTag!}enum PromoteSchemaResponseCode{NO_CHANGES_DETECTED PROMOTION_SUCCESS}union PromoteSchemaResponseOrError=PromoteSchemaError|PromoteSchemaResponse type Protobuf{json:String!object:Object!raw:Blob!text:String!}type Query{"""Account by ID""" account(id:ID!):Account """Retrieve account by billing provider identifier""" accountByBillingCode(id:ID!):Account """Retrieve account by internal id""" accountByInternalID(id:ID!):Account """Whether an account ID is available for mutation{newAccount(id:)}""" accountIDAvailable(id:ID!):Boolean!"""All accounts""" allAccounts(search:String tier:BillingPlanTier):[Account!]"""All available plans""" allPlans:[BillingPlan!]!allPublicVariants:[GraphVariant!]"""All services""" allServices(search:String):[Service!]"""All timezones with their offsets from UTC""" allTimezoneOffsets:[TimezoneOffset!]!"""All users""" allUsers(search:String):[User!]"""Look up a plan by ID""" billingPlan(id:ID):BillingPlanV2 """All available plans""" billingPlans:[BillingPlanV2!]!"""If this is true, the user is an Apollo administrator who can ignore restrictions based purely on billing plan.""" canBypassPlanRestrictions:Boolean!diffSchemas(baseSchema:String!nextSchema:String!):[Change!]!"""Get the unsubscribe settings for a given email.""" emailPreferences(email:String!token:String!):EmailPreferences experimentalFeatures:GlobalExperimentalFeatures!"""Address of the Studio frontend.""" frontendUrlRoot:String!"""Access a graph by ID.""" graph(id:ID!):Service internalActiveCronJobs:[CronJob!]!internalAdminUsers:[InternalAdminUser!]internalUnresolvedCronExecutionFailures:[CronExecution!]!"""User or graph querying the API, null if not authenticated.""" me:Identity odysseyCertification(id:ID!):OdysseyCertification operationCollection(id:ID!):OperationCollectionResult!operationCollectionEntries(collectionEntryIds:[ID!]!):[OperationCollectionEntry!]!"""Access an organization by ID.""" organization(id:ID!):Account """Look up a plan by ID""" plan(id:ID):BillingPlan """A list of public variants that have been selected to be shown on our Graph Directory.""" publiclyListedVariants:[GraphVariant!]"""Service by ID""" service(id:ID!):Service """Query statistics across all services. For admins only; normal users must go through AccountsStatsWindow or ServiceStatsWindow.""" stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):StatsWindow!"""Get the studio settings for the current user""" studioSettings:UserSettings """The plan started by AccountMutation.startTeamSubscription""" teamBillingPlan(billingPeriod:BillingPeriod!):BillingPlanV2!"""The plan started by AccountMutation.startTeamSubscription""" teamPlan(billingPeriod:BillingPeriod!):BillingPlan!"""Schema transformation for the Apollo platform API. Renames types. Internal to Apollo.""" transformSchemaForPlatformApi(baseSchema:GraphQLDocument!):GraphQLDocument """The plan started by AccountMutation.startTrial""" trialBillingPlan:BillingPlanV2!"""The plan started by AccountMutation.startTrial""" trialPlan:BillingPlan!"""User by ID""" user(id:ID!):User """Access a variant by reference of the form `graphID@variantName`, or `graphID` for the default `current` variant.
-Returns null when the graph or variant do not exist, or when the graph cannot be accessed.
-Note that we can return more types implementing Error in the future.""" variant(ref:ID!):GraphVariantLookup}"""query documents to validate against""" input QueryDocumentInput{document:String}type QueryPlan{json:String!object:Object!text:String!}"""Columns of QueryStats.""" enum QueryStatsColumn{ACCOUNT_ID CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type QueryStatsDimensions{accountId:ID clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in QueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input QueryStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[QueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:QueryStatsFilterIn not:QueryStatsFilter or:[QueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in QueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input QueryStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type QueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input QueryStatsOrderBySpec{column:QueryStatsColumn!direction:Ordering!}type QueryStatsRecord{"""Dimensions of QueryStats that can be grouped by.""" groupBy:QueryStatsDimensions!"""Metrics of QueryStats that can be aggregated over.""" metrics:QueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Query Trigger""" type QueryTrigger implements ChannelSubscription{channels:[Channel!]!comparisonOperator:ComparisonOperator!enabled:Boolean!excludedOperationNames:[String!]!id:ID!metric:QueryTriggerMetric!operationNames:[String!]!percentile:Float scope:QueryTriggerScope!serviceId:String!state:QueryTriggerState!threshold:Float!variant:String window:QueryTriggerWindow!}"""Query trigger""" input QueryTriggerInput{channelIds:[String!]comparisonOperator:ComparisonOperator!enabled:Boolean excludedOperationNames:[String!]metric:QueryTriggerMetric!operationNames:[String!]percentile:Float scope:QueryTriggerScope threshold:Float!variant:String window:QueryTriggerWindow!}enum QueryTriggerMetric{"""Number of requests within the window that resulted in an error. Ignores `percentile`.""" ERROR_COUNT """Number of error requests divided by total number of requests. Ignores `percentile`.""" ERROR_PERCENTAGE """Number of requests within the window. Ignores `percentile`.""" REQUEST_COUNT """Request latency in ms. Requires `percentile`.""" REQUEST_SERVICE_TIME}enum QueryTriggerScope{ALL ANY UNRECOGNIZED}"""Query trigger state""" type QueryTriggerState{evaluatedAt:Timestamp!lastTriggeredAt:Timestamp operations:[QueryTriggerStateOperation!]!triggered:Boolean!}type QueryTriggerStateOperation{count:Long!operation:String!triggered:Boolean!value:Float!}enum QueryTriggerWindow{FIFTEEN_MINUTES FIVE_MINUTES ONE_MINUTE UNRECOGNIZED}"""The documentation for a graph variant, as display in Studio.""" type Readme{"""Content of the document.""" content:String!id:ID!"""Last time the document was updated.""" lastUpdatedAt:Timestamp!"""Identity of who updated the document last.""" lastUpdatedBy:Identity}type RegisterOperationsMutationResponse{invalidOperations:[InvalidOperation!]newOperations:[RegisteredOperation!]registrationSuccess:Boolean!}input RegisteredClientIdentityInput{identifier:String!name:String!version:String}type RegisteredOperation{signature:ID!}input RegisteredOperationInput{document:String metadata:RegisteredOperationMetadataInput signature:ID!}input RegisteredOperationMetadataInput{"""This will be used to link existing records in Engine to a new ID.""" engineSignature:String}type RegistryApiKey{keyName:String token:String!}type RegistryStatsWindow{schemaCheckStats:[AccountChecksStatsRecord!]!schemaPublishStats:[AccountPublishesStatsRecord!]!}type RegistrySubscription implements ChannelSubscription{channel:Channel channels:[Channel!]!@deprecated(reason:"Use channels list instead")createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!options:SubscriptionOptions!variant:String}type RelaunchComplete{latestLaunch:Launch!updated:Boolean!}type RelaunchError{message:String!}union RelaunchResult=RelaunchComplete|RelaunchError union RemoveOperationCollectionEntryResult=OperationCollection|PermissionError union RemoveOperationCollectionFromVariantResult=GraphVariant|NotFoundError|PermissionError|ValidationError union ReorderOperationCollectionResult=OperationCollection|PermissionError type ReportSchemaError implements ReportSchemaResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withCoreSchema:Boolean!}enum ReportSchemaErrorCode{BOOT_ID_IS_NOT_VALID_UUID BOOT_ID_IS_REQUIRED CORE_SCHEMA_HASH_IS_NOT_SCHEMA_SHA256 CORE_SCHEMA_HASH_IS_REQUIRED CORE_SCHEMA_HASH_IS_TOO_LONG EXECUTABLE_SCHEMA_ID_IS_NOT_SCHEMA_SHA256 EXECUTABLE_SCHEMA_ID_IS_REQUIRED EXECUTABLE_SCHEMA_ID_IS_TOO_LONG GRAPH_REF_INVALID_FORMAT GRAPH_REF_IS_REQUIRED GRAPH_VARIANT_DOES_NOT_MATCH_REGEX GRAPH_VARIANT_IS_REQUIRED LIBRARY_VERSION_IS_TOO_LONG PLATFORM_IS_TOO_LONG RUNTIME_VERSION_IS_TOO_LONG SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID SERVER_ID_IS_TOO_LONG USER_VERSION_IS_TOO_LONG}type ReportSchemaResponse implements ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}interface ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}type ReportServerInfoError implements ReportServerInfoResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withExecutableSchema:Boolean!}type ReportServerInfoResponse implements ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}interface ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}enum Resolution{R1D R1H R1M R5M R6H R15M}enum ResponseHints{NONE SAMPLE_RESPONSES SUBGRAPHS TIMINGS TRACE_TIMINGS}type RoleOverride{graph:Service!lastUpdatedAt:Timestamp!role:UserPermission!user:User!}input RoverArgumentInput{key:String!value:Object}"""SHA-256 hash, represented in lowercase hexadecimal""" scalar SHA256 type ScheduledSummary implements ChannelSubscription{channel:Channel@deprecated(reason:"Use channels list instead")channels:[Channel!]!enabled:Boolean!id:ID!timezone:String!variant:String!}"""A GraphQL schema document, which may optionally map back to context with which the schema was ingested.""" type Schema{createTemporaryURL(expiresInSeconds:Int!=86400):TemporaryURL """The timestamp of initial ingestion of a schema to a graph.""" createdAt:Timestamp!"""The raw GraphQL document for the schema in question""" document:GraphQLDocument!"""The number of fields; this includes user defined fields only, excluding built-in types and fields""" fieldCount:Int!gitContext:GitContext """The hex representation of the SHA256 of the GraphQL document.""" hash:ID!introspection:IntrospectionSchema!"""The number of types; this includes user defined types only, excluding built-in types""" typeCount:Int!}"""Represents an error from running schema composition on a list of subgraph definitions.""" type SchemaCompositionError{"""A machine-readable error code.""" code:String """Affected locations.""" locations:[SourceLocation]!"""A human-readable locations.""" message:String!}"""The difference between two schemas, as usually computed during a schema check.""" type SchemaDiff{"""Clients affected by all changes in the diff.""" affectedClients:[AffectedClient!]@deprecated(reason:"Unsupported.")"""Operations affected by all changes in the diff.""" affectedQueries:[AffectedQuery!]"""Numeric summary of all changes in the diff.""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations.""" changes:[Change!]!"""Number of affected query operations that are neither marked as safe or ignored.""" numberOfAffectedOperations:Int!"""Number of operations that were validated during the check.""" numberOfCheckedOperations:Int """Indication of the success of the change; either failure, warning, or notice.""" severity:ChangeSeverity!"""The tag against which this diff was created""" tag:String type:ChangeType!@deprecated(reason:"use severity instead")"""Configuration of validation""" validationConfig:SchemaDiffValidationConfig}type SchemaDiffValidationConfig{"""Clients to ignore during validation.""" excludedClients:[ClientInfoFilterOutput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilter]"""delta in seconds from current time that determines the start of the window
-for reported metrics included in a schema diff. A day window from the present
-day would have a `from` value of -86400. In rare cases, this could be an ISO
-timestamp if the user passed one in on diff creation""" from:Timestamp """Operation IDs to ignore during validation.""" ignoredOperations:[ID!]"""Variants to include during validation.""" includedVariants:[String!]"""Minimum number of requests within the window for a query to be considered.""" queryCountThreshold:Int """Number of requests within the window for a query to be considered, relative to
-total request count. Expected values are between 0 and 0.05 (minimum 5% of
-total request volume)""" queryCountThresholdPercentage:Float """delta in seconds from current time that determines the end of the
-window for reported metrics included in a schema diff. A day window
-from the present day would have a `to` value of -0. In rare
-cases, this could be an ISO timestamp if the user passed one in on diff
-creation""" to:Timestamp}type SchemaPublishSubscription implements ChannelSubscription{channels:[Channel!]!createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!variant:String}input SchemaReport{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""The hex SHA256 hash of the schema being reported. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchemaHash:String!"""The graph ref (eg, 'id@variant')""" graphRef:String!"""The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""A specific publication of a graph variant.""" type SchemaTag{"""The result of composition, including either a supergraph schema or errors,
-executed during this publication. Only available with managed federation.""" compositionResult:CompositionResult createdAt:Timestamp!"""Differences with the schema from the previous successful publication.""" diffToPrevious:SchemaDiff gitContext:GitContext """List of previously uploaded SchemaTags under the same tag name, starting with
-the selected published schema record. Sorted in reverse chronological order
-by creation date (newest publish first).
-
-Note: This does not include the history of checked schemas""" history(includeUnchanged:Boolean!=true limit:Int!=3 offset:Int=0):[SchemaTag!]!"""Number of tagged schemas created under the same tag name.
-Also represents the maximum size of the history's limit argument.""" historyLength:Int!"""Number of schemas tagged prior to this one under the same tag name, its position
-in the tag history.""" historyOrder:Int!"""The identifier for this specific publication.""" id:ID!"""Time of publication.""" publishedAt:Timestamp!"""The Identity that published this schema and their client info, or null if this isn't
-a publish. Sub-fields may be null if they weren't recorded.""" publishedBy:IdentityAndClientInfo """Indicates the schemaTag of the schema's original upload, null if this is the
-first upload of the schema.""" reversionFrom:SchemaTag """The published schema.""" schema:Schema!slackNotificationBody(graphDisplayName:String!):String tag:String!@deprecated(reason:"Please use variant { name } instead")"""The graph variant this belongs to.""" variant:GraphVariant!webhookNotificationBody:String!}"""How many seats of the given types does an organization have (regardless of plan type)?""" type Seats{"""How many members that are free in this organization.""" free:Int!"""How many members that are not free in this organization.""" fullPrice:Int!}type SemanticChange{"""Target arg of change made.""" argNode:NamedIntrospectionArg """Node related to the top level node that was changed, such as a field in an object,
-a value in an enum or the object of an interface""" childNode:NamedIntrospectionValue """Semantic metadata about the type of change""" definition:ChangeDefinition!"""Top level node affected by the change""" parentNode:NamedIntrospectionType}"""A graph in Apollo Studio represents a graph in your organization.
+the hash does not need to be and will be computed server-side.
+"""
+input PartialSchemaInput {
+  """
+  Contents of the partial schema in SDL syntax, but may reference types
+  that aren't defined in this document
+  """
+  sdl: String
+  """
+  Hash of the partial schema to associate; error is thrown if only the hash is
+  specified and the hash has not been seen before
+  """
+  hash: String
+}
+
+"""An error that occurs when the current user doesn't have sufficient permissions to perform an action."""
+type PermissionError implements Error {
+  """The error message."""
+  message: String!
+}
+
+"""An error related to an organization's Apollo Studio plan."""
+type PlanError {
+  """The error message."""
+  message: String!
+}
+
+type Query {
+  """Returns the root URL of the Apollo Studio frontend."""
+  frontendUrlRoot: String!
+  """Returns details of the graph with the provided ID."""
+  graph(id: ID!): Graph
+  """Returns details of the authenticated `User` or `Graph` executing this query. If this is an unauthenticated query (i.e., no API key is provided), this field returns null."""
+  me: Identity
+  """Returns details of the Studio organization with the provided ID."""
+  organization(id: ID!): Organization
+  """Returns details of the Apollo user with the provided ID."""
+  user(id: ID!): User
+  """Returns details of a Studio graph variant with the provided graph ref. A graph ref has the format `graphID@variantName` (or just `graphID` for the default variant `current`). Returns null if the graph or variant doesn't exist, or if the graph isn't accessible by the current actor."""
+  variant(ref: ID!): GraphVariantLookup
+  """Returns the [operation collection](https://www.apollographql.com/docs/studio/explorer/operation-collections/) for the provided ID."""
+  operationCollection(id: ID!): OperationCollectionResult!
+}
+
+"""The README documentation for a graph variant, which is displayed in Studio."""
+type Readme {
+  """The contents of the README in plaintext."""
+  content: String!
+  """The README's unique ID. `a15177c0-b003-4837-952a-dbfe76062eb1` for the default README"""
+  id: ID!
+  """The actor that most recently updated the README (usually a `User`). `null` for the default README, or if the `User` was deleted."""
+  lastUpdatedBy: Identity
+  """The timestamp when the README was most recently updated. `null` for the default README"""
+  lastUpdatedTime: Timestamp
+}
+
+union RemoveOperationCollectionEntryResult = OperationCollection | PermissionError
+
+type Router {
+  """
+  Last time when the Cloud Router was updated
+
+  If the Cloud Router was never updated, this value will be null
+  """
+  updatedAt: NaiveDateTime
+  """Current status of the Cloud Router"""
+  status: RouterStatus!
+  """Current version of the Cloud Router"""
+  routerVersion: RouterVersion!
+  """
+  URL where the Cloud Router can be found
+
+  This will be null if the Cloud Router is in a deleted status
+  """
+  routerUrl: String
+  """Retrieves a specific Order related to this Cloud Router"""
+  order(orderId: ID!): Order
+  """Retrieves all Orders related to this Cloud Router"""
+  orders(first: Int, offset: Int): [Order!]!
+  """Return the list of secrets for this Cloud Router with their hash values"""
+  secrets: [Secret!]!
+}
+
+enum RouterStatus {
+  CREATING
+  UPDATING
+  DELETING
+  ROLLING_BACK
+  RUNNING
+  DELETED
+}
+
+type RouterVersion {
+  version: String!
+  core: String!
+  build: String!
+  status: Status!
+  configVersion: String!
+  configSchema: String!
+}
+
+"""A GraphQL schema document and associated metadata."""
+type Schema {
+  """The GraphQL schema document's SHA256 hash, represented as a hexadecimal string."""
+  hash: ID!
+  """The GraphQL schema document."""
+  document: GraphQLDocument!
+}
+
+"""An error that occurred while running schema composition on a set of subgraph schemas."""
+type SchemaCompositionError {
+  """A human-readable message describing the error."""
+  message: String!
+  """Source locations related to the error."""
+  locations: [SourceLocation]!
+  """A machine-readable error code."""
+  code: String
+}
+
+"""The result of computing the difference between two schemas, usually as part of schema checks."""
+type SchemaDiff {
+  """Indicates the overall safety of the changes included in the diff, based on operation history (e.g., `FAILURE` or `NOTICE`)."""
+  severity: ChangeSeverity!
+  """A list of all schema changes in the diff, including their severity."""
+  changes: [Change!]!
+  """Numeric summaries for each type of change in the diff."""
+  changeSummary: ChangeSummary!
+  """Operations affected by all changes in the diff."""
+  affectedQueries: [AffectedQuery!]
+  """The number of GraphQL operations that were validated during the check."""
+  numberOfCheckedOperations: Int
+  """The number of GraphQL operations affected by the diff's changes that are neither marked as safe nor ignored."""
+  numberOfAffectedOperations: Int!
+}
+
+"""Contains details for an individual publication of an individual graph variant."""
+type SchemaPublication {
+  """
+  The variant that was published to."
+  """
+  variant: GraphVariant!
+  """The schema that was published to the variant."""
+  schema: Schema!
+  """The result of federated composition executed for this publication. This result includes either a supergraph schema or error details, depending on whether composition succeeded. This value is null when the publication is for a non-federated graph."""
+  compositionResult: CompositionResult
+  """The timestamp when the variant was published to."""
+  publishedAt: Timestamp!
+  """A schema diff comparing against the schema from the most recent previous successful publication."""
+  diffToPrevious: SchemaDiff
+}
+
+type Secret {
+  createdAt: DateTime!
+  name: String!
+  hash: String!
+}
+
+type SemanticChange {
+  """Semantic metadata about the type of change"""
+  definition: ChangeDefinition!
+  """Top level node affected by the change"""
+  parentNode: NamedIntrospectionType
+  """
+  Node related to the top level node that was changed, such as a field in an object,
+  a value in an enum or the object of an interface
+  """
+  childNode: NamedIntrospectionValue
+  """Target arg of change made."""
+  argNode: NamedIntrospectionArg
+}
+
+"""
+A graph in Apollo Studio represents a graph in your organization.
 Each graph has one or more variants, which correspond to the different environments where that graph runs (such as staging and production).
-Each variant has its own GraphQL schema, which means schemas can differ between environments.""" type Service implements Identity{"""Organization that this graph belongs to.""" account:Account accountId:ID apiKeys:[GraphApiKey!]"""A view of the identity as an Actor type.""" asActor:Actor!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request
-with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the
-browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the
-filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to
-MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the service's avatar. Note that CORS is not enabled for these URLs. The size
-argument is used for bandwidth reduction, and should be the size of the image as displayed in the
-application. Apollo's media server will downscale larger images to at least the requested size,
-but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String """Get available notification endpoints""" channels(channelIds:[ID!]):[Channel!]"""Get check configuration for this graph.""" checkConfiguration:CheckConfiguration """Get a check workflow for this graph by its ID""" checkWorkflow(id:ID!):CheckWorkflow """Get check workflows for this graph ordered by creation time, most recent first.""" checkWorkflows(filter:CheckFilterInput limit:Int!=100):[CheckWorkflow!]!"""List of options available for filtering checks for this graph by author.
-If a filter is passed, constrains results to match the filter.""" checksAuthorOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by branch.
-If a filter is passed, constrains results to match the filter.""" checksBranchOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by subgraph name.
-If a filter is passed, constrains results to match the filter.""" checksSubgraphOptions(filter:CheckFilterInput):[String!]!"""Given a graphCompositionID, return the results of composition. This can represent either a validation or a publish.""" compositionResultById(id:ID!):CompositionResult createdAt:Timestamp!createdBy:Identity datadogMetricsConfig:DatadogMetricsConfig defaultBuildPipelineTrack:String deletedAt:Timestamp description:String devGraphOwner:User """Get a GraphQL document by hash""" document(hash:SHA256):GraphQLDocument """When this is true, this graph will be hidden from non-admin members of the org who haven't been explicitly assigned a
-role on this graph.""" hiddenFromUninvitedNonAdminAccountMembers:Boolean!"""Globally unique identifier for this graph.""" id:ID!"""List of subgraphs that comprise a graph. A non-federated graph should have a single implementing service.
-Set includeDeleted to see deleted subgraphs.""" implementingServices(graphVariant:String!includeDeleted:Boolean):GraphImplementors lastReportedAt(graphVariant:String):Timestamp """Current identity, null if not authenticated.""" me:Identity """The composition result that was most recently published to a graph variant.""" mostRecentCompositionPublish(graphVariant:String!):CompositionPublishResult """Permissions of the current user in this graph.""" myRole:UserPermission """Name of this graph. Note that this field is deprecated.""" name:String!@deprecated(reason:"Use Service.title")onboardingArchitecture:OnboardingArchitecture operation(id:ID!):Operation """Gets the operations and their approved changes for this graph, checkID, and operationID.""" operationsAcceptedChanges(checkID:ID!operationID:String!):[OperationAcceptedChange!]!"""Get an operations check result for a specific check ID""" operationsCheck(checkID:ID!):OperationsCheckResult """Get query triggers for a given variant. If variant is null all the triggers for this service will be gotten.""" queryTriggers(graphVariant:String operationNames:[String!]):[QueryTrigger!]readme:Readme """Registry specific stats for this graph.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """Whether registry subscriptions (with any options) are enabled. If variant is not passed, returns true if configuration is present for any variant""" registrySubscriptionsEnabled(graphVariant:String):Boolean!@deprecated(reason:"This field will be removed")reportingEnabled:Boolean!"""The list of members that can access this graph, accounting for graph role overrides""" roleOverrides:[RoleOverride!]"""Which permissions the current user has for interacting with this graph""" roles:ServiceRoles scheduledSummaries:[ScheduledSummary!]!"""Get a schema by hash or current tag""" schema(hash:ID tag:String):Schema """The current publish associated to a given variant (with 'tag' as the variant name).""" schemaTag(tag:String!):SchemaTag schemaTagById(id:ID!):SchemaTag """Get schema tags, with optional filtering to a set of tags. Always sorted by creation
-date in reverse chronological order.""" schemaTags(tags:[String!]):[SchemaTag!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow!@deprecated(reason:"use Service.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow """Generate a test schema publish notification body""" testSchemaPublishBody(variant:String!):String!"""Name of this graph.""" title:String!trace(id:ID!):Trace traceStorageEnabled:Boolean!"""A particular variant often representing a live traffic environment (such as "dev", "staging", or "prod").
-Each variant can represent a specific URL or destination to query at, analytics, and its own schema history.
-Pass in a name to get a specific variant. Use `Graph.variants` to get a list of variants.""" variant(name:String!):GraphVariant """The list of variants that exist for this graph""" variants:[GraphVariant!]!}"""Columns of ServiceBillingUsageStats.""" enum ServiceBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG TIMESTAMP}type ServiceBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilter{and:[ServiceBillingUsageStatsFilter!]in:ServiceBillingUsageStatsFilterIn not:ServiceBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[ServiceBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceBillingUsageStatsMetrics{operationCount:Long!}input ServiceBillingUsageStatsOrderBySpec{column:ServiceBillingUsageStatsColumn!direction:Ordering!}type ServiceBillingUsageStatsRecord{"""Dimensions of ServiceBillingUsageStats that can be grouped by.""" groupBy:ServiceBillingUsageStatsDimensions!"""Metrics of ServiceBillingUsageStats that can be aggregated over.""" metrics:ServiceBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceEdgeServerInfos.""" enum ServiceEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID TIMESTAMP USER_VERSION}type ServiceEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilter{and:[ServiceEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:ServiceEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:ServiceEdgeServerInfosFilter or:[ServiceEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input ServiceEdgeServerInfosOrderBySpec{column:ServiceEdgeServerInfosColumn!direction:Ordering!}type ServiceEdgeServerInfosRecord{"""Dimensions of ServiceEdgeServerInfos that can be grouped by.""" groupBy:ServiceEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceErrorStats.""" enum ServiceErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilter{and:[ServiceErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceErrorStatsFilterIn not:ServiceErrorStatsFilter or:[ServiceErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ServiceErrorStatsOrderBySpec{column:ServiceErrorStatsColumn!direction:Ordering!}type ServiceErrorStatsRecord{"""Dimensions of ServiceErrorStats that can be grouped by.""" groupBy:ServiceErrorStatsDimensions!"""Metrics of ServiceErrorStats that can be aggregated over.""" metrics:ServiceErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldExecutions.""" enum ServiceFieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG TIMESTAMP}type ServiceFieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String}"""Filter for data in ServiceFieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldExecutionsFilter{and:[ServiceFieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldExecutionsFilterIn not:ServiceFieldExecutionsFilter or:[ServiceFieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input ServiceFieldExecutionsOrderBySpec{column:ServiceFieldExecutionsColumn!direction:Ordering!}type ServiceFieldExecutionsRecord{"""Dimensions of ServiceFieldExecutions that can be grouped by.""" groupBy:ServiceFieldExecutionsDimensions!"""Metrics of ServiceFieldExecutions that can be aggregated over.""" metrics:ServiceFieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldLatencies.""" enum ServiceFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilter{and:[ServiceFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldLatenciesFilterIn not:ServiceFieldLatenciesFilter or:[ServiceFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input ServiceFieldLatenciesOrderBySpec{column:ServiceFieldLatenciesColumn!direction:Ordering!}type ServiceFieldLatenciesRecord{"""Dimensions of ServiceFieldLatencies that can be grouped by.""" groupBy:ServiceFieldLatenciesDimensions!"""Metrics of ServiceFieldLatencies that can be aggregated over.""" metrics:ServiceFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldRequestsByClientVersion.""" enum ServiceFieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG TIMESTAMP}type ServiceFieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String}"""Filter for data in ServiceFieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldRequestsByClientVersionFilter{and:[ServiceFieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldRequestsByClientVersionFilterIn not:ServiceFieldRequestsByClientVersionFilter or:[ServiceFieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input ServiceFieldRequestsByClientVersionOrderBySpec{column:ServiceFieldRequestsByClientVersionColumn!direction:Ordering!}type ServiceFieldRequestsByClientVersionRecord{"""Dimensions of ServiceFieldRequestsByClientVersion that can be grouped by.""" groupBy:ServiceFieldRequestsByClientVersionDimensions!"""Metrics of ServiceFieldRequestsByClientVersion that can be aggregated over.""" metrics:ServiceFieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldUsage.""" enum ServiceFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilter{and:[ServiceFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldUsageFilterIn not:ServiceFieldUsageFilter or:[ServiceFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input ServiceFieldUsageOrderBySpec{column:ServiceFieldUsageColumn!direction:Ordering!}type ServiceFieldUsageRecord{"""Dimensions of ServiceFieldUsage that can be grouped by.""" groupBy:ServiceFieldUsageDimensions!"""Metrics of ServiceFieldUsage that can be aggregated over.""" metrics:ServiceFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Mutations to a graph.""" type ServiceMutation{"""Check a proposed subgraph schema change.
-If the proposal composes successfully, perform a usage check for the resulting schema.""" checkPartialSchema("""Deprecated and ignored.""" frontend:String gitContext:GitContextInput """Specifies which variant of a graph this mutation operates on.""" graphVariant:String!historicParameters:HistoricQueryParameters """Name of the implementing service to validate the partial schema against""" implementingServiceName:String!"""If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String isSandboxCheck:Boolean!=false """The partial schema to validate against an implementing service""" partialSchema:PartialSchemaInput!"""Whether to use the maximum retention for historical validation. This only takes
-effect if historicParameters is null.""" useMaximumRetention:Boolean):CheckPartialSchemaResult!"""Checks a proposed schema against the schema that has been published to
-a particular variant, using metrics corresponding to `historicParameters`.
-Callers can set `historicParameters` directly or rely on defaults set in the
-graph's check configuration (7 days by default).
-If they do not set `historicParameters` but set `useMaximumRetention`,
-validation will use the maximum retention the graph has access to.""" checkSchema(baseSchemaTag:String="current" """Deprecated and ignored.""" frontend:String gitContext:GitContextInput historicParameters:HistoricQueryParameters """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String isSandboxCheck:Boolean!=false """Only one of proposedSchema, proposedSchemaDocument, and proposedSchemaHash
-may be specified""" proposedSchema:IntrospectionSchemaInput proposedSchemaDocument:String proposedSchemaHash:String useMaximumRetention:Boolean):CheckSchemaResult!"""Make changes to a check workflow.""" checkWorkflow(id:ID!):CheckWorkflowMutation createCompositionStatusSubscription("""ID of Slack channel for registry notification.""" channelID:ID!"""Variant to notify on.""" variant:String!):SchemaPublishSubscription!createSchemaPublishSubscription("""ID of Slack channel for registry notification.""" channelID:ID!"""Variant to notify on.""" variant:String!):SchemaPublishSubscription!"""Soft delete a graph. Data associated with the graph is not permanently deleted; Apollo support can undo.""" delete:Void """Delete the service's avatar. Requires Service.roles.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Delete an existing channel""" deleteChannel(id:ID!):Boolean!"""Delete an existing query trigger""" deleteQueryTrigger(id:ID!):Boolean!"""Deletes this service's current subscriptions specific to the ID, returns true if it existed""" deleteRegistrySubscription(id:ID!):Boolean!"""Deletes this service's current registry subscription(s) specific to its graph variant,
-returns a list of subscription IDs that were deleted.""" deleteRegistrySubscriptions(variant:String!):[ID!]!deleteScheduledSummary(id:ID!):Boolean!"""Delete a variant by name.""" deleteSchemaTag(tag:String!):DeleteSchemaTagResult!"""Given a UTC timestamp, delete all traces associated with this Service, on that corresponding day. If a timestamp to is provided, deletes all days inclusive.""" deleteTraces(from:Timestamp!to:Timestamp):Void disableDatadogForwardingLegacyMetricNames:Service """Hard delete a graph and all data associated with it. Its ID cannot be reused.""" hardDelete:Void id:ID!@deprecated(reason:"Use service.id")"""Ignore an operation in future checks;
-changes affecting it will be tracked,
-but won't affect the outcome of the check.
-Returns true if the operation is newly ignored,
-false if it already was.""" ignoreOperationsInChecks(ids:[ID!]!):IgnoreOperationsInChecksResult """Mark the changeset that affects an operation in a given check instance as safe.
-Note that only operations marked as behavior changes are allowed to be marked as safe.""" markChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to accept changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!newKey(keyName:String role:UserPermission!=GRAPH_ADMIN):GraphApiKey!"""Adds an override to the given users permission for this graph""" overrideUserPermission(permission:UserPermission userID:ID!):Service """Promote the schema with the given SHA-256 hash to active for the given variant/tag.""" promoteSchema(graphVariant:String!historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false sha256:SHA256!):PromoteSchemaResponseOrError!"""Publish to a subgraph. If composition is successful, this will update running routers.""" publishSubgraph(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult registerOperationsWithResponse(clientIdentity:RegisteredClientIdentityInput gitContext:GitContextInput """Specifies which variant of a graph these operations belong to.
-Formerly known as "tag"
-Defaults to "current"
-""" graphVariant:String!="current" manifestVersion:Int operations:[RegisteredOperationInput!]!):RegisterOperationsMutationResponse """Removes a subgraph. If composition is successful, this will update running routers.""" removeImplementingServiceAndTriggerComposition("""Do not remove the service, but recompose without it and report any errors.""" dryRun:Boolean!=false graphVariant:String!name:String!):CompositionAndRemoveResult!removeKey("""API key ID""" id:ID):Void renameKey(id:ID!newKeyName:String):GraphApiKey reportServerInfo("""Only sent if previously requested i.e. received ReportServerInfoResult with withExecutableSchema = true. An executable schema is a schema document that describes the full GraphQL schema that an external client could execute queries against. This must be a valid GraphQL schema document, as per the GraphQL specification: https://spec.graphql.org/""" executableSchema:String """Information about the edge server, see descriptions for individual fields.""" info:EdgeServerInfo!):ReportServerInfoResult@deprecated(reason:"use Mutation.reportSchema instead")service:Service!setDefaultBuildPipelineTrack(version:String!):String """Store a given schema document. This schema will be attached to the graph but
-not be associated with any variant. On success, returns the schema hash.""" storeSchemaDocument(schemaDocument:String!):StoreSchemaResponseOrError!"""Test Slack notification channel""" testSlackChannel(id:ID!notification:SlackNotificationInput!):Void testSubscriptionForChannel(channelID:ID!subscriptionID:ID!):String!transfer(to:String!):Service triggerRepublish(graphVariant:String!):Void undelete:Service """Revert the effects of ignoreOperation.
-Returns true if the operation is no longer ignored,
-false if it wasn't.""" unignoreOperationsInChecks(ids:[ID!]!):UnignoreOperationsInChecksResult """Unmark changes for an operation as safe.""" unmarkChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to unmark changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!"""Update schema check configuration for a graph.""" updateCheckConfiguration("""Clients to ignore during validation.""" excludedClients:[ClientFilterInput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilterInput!]"""Operations to ignore during validation.""" excludedOperations:[ExcludedOperationInput!]"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean """Variant overrides for validation.""" includedVariants:[String!]"""Minimum number of requests within the window for a query to be considered.""" operationCountThreshold:Int """Number of requests within the window for a query to be considered, relative to
-total request count. Expected values are between 0 and 0.05 (minimum 5% of
-total request volume)""" operationCountThresholdPercentage:Float """Only check operations from the last <timeRangeSeconds> seconds. The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long):CheckConfiguration!updateDatadogMetricsConfig(apiKey:String apiRegion:DatadogApiRegion enabled:Boolean):DatadogMetricsConfig updateDescription(description:String!):Service """Update hiddenFromUninvitedNonAdminAccountMembers""" updateHiddenFromUninvitedNonAdminAccountMembers(hiddenFromUninvitedNonAdminAccountMembers:Boolean!):Service updateReadme(readme:String!):Service updateTitle(title:String!):Service """Publish a schema to this variant, either via a document or an introspection query result.""" uploadSchema(errorOnBadRequest:Boolean!=true gitContext:GitContextInput historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false schema:IntrospectionSchemaInput schemaDocument:String tag:String!):UploadSchemaMutationResponse upsertChannel(id:ID pagerDutyChannel:PagerDutyChannelInput slackChannel:SlackChannelInput webhookChannel:WebhookChannelInput):Channel """Creates a contract schema from a source variant and a set of filter configurations""" upsertContractVariant(contractVariantName:String!filterConfig:FilterConfigInput!initiateLaunch:Boolean!=true sourceVariant:String!):ContractVariantUpsertResult!"""Publish to a subgraph. If composition is successful, this will update running routers.""" upsertImplementingServiceAndTriggerComposition(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult """Create/update PagerDuty notification channel""" upsertPagerDutyChannel(channel:PagerDutyChannelInput!id:ID):PagerDutyChannel upsertQueryTrigger(id:ID trigger:QueryTriggerInput!):QueryTrigger """Create or update a subscription for a service.""" upsertRegistrySubscription("""ID of Slack channel for registry notification.""" channelID:ID """ID of registry subscription""" id:ID """Set of options/customization for notification.""" options:SubscriptionOptionsInput """Variant to notify on.""" variant:String):RegistrySubscription!upsertScheduledSummary(channelID:ID enabled:Boolean id:ID """Deprecated, use the 'variant' argument instead""" tag:String timezone:String variant:String):ScheduledSummary """Create/update Slack notification channel""" upsertSlackChannel(channel:SlackChannelInput!id:ID):SlackChannel upsertWebhookChannel(id:ID name:String secretToken:String url:String!):WebhookChannel validateOperations(gitContext:GitContextInput operations:[OperationDocumentInput!]!tag:String="current"):ValidateOperationsResult!"""This mutation will not result in any changes to the implementing service
-Run composition with the Implementing Service's partial schema replaced with the one provided
-in the mutation's input. Store the composed schema, return the hash of the composed schema,
-and any warnings and errors pertaining to composition.
-This mutation will not run validation against operations.""" validatePartialSchemaOfImplementingServiceAgainstGraph(graphVariant:String!implementingServiceName:String!partialSchema:PartialSchemaInput!):CompositionValidationResult!"""Make changes to a graph variant.""" variant(name:String!):GraphVariantMutation}"""Columns of ServiceOperationCheckStats.""" enum ServiceOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG TIMESTAMP UNCACHED_REQUESTS_COUNT}type ServiceOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilter{and:[ServiceOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceOperationCheckStatsFilterIn not:ServiceOperationCheckStatsFilter or:[ServiceOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input ServiceOperationCheckStatsOrderBySpec{column:ServiceOperationCheckStatsColumn!direction:Ordering!}type ServiceOperationCheckStatsRecord{"""Dimensions of ServiceOperationCheckStats that can be grouped by.""" groupBy:ServiceOperationCheckStatsDimensions!"""Metrics of ServiceOperationCheckStats that can be aggregated over.""" metrics:ServiceOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceQueryStats.""" enum ServiceQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type ServiceQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String}"""Filter for data in ServiceQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilter{and:[ServiceQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:ServiceQueryStatsFilterIn not:ServiceQueryStatsFilter or:[ServiceQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input ServiceQueryStatsOrderBySpec{column:ServiceQueryStatsColumn!direction:Ordering!}type ServiceQueryStatsRecord{"""Dimensions of ServiceQueryStats that can be grouped by.""" groupBy:ServiceQueryStatsDimensions!"""Metrics of ServiceQueryStats that can be aggregated over.""" metrics:ServiceQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A map from role (permission) String to boolean that the current user is allowed for a particular graph.""" type ServiceRoles{"""Whether the currently authenticated user is permitted to perform schema checks (i.e. run `rover (sub)graph check`).""" canCheckSchemas:Boolean!"""Whether the currently authenticated user is permitted to create new graph variants.""" canCreateVariants:Boolean!"""Whether the currently authenticated user is permitted to delete the graph in question""" canDelete:Boolean!"""Whether the currently authenticated user is permitted to manage user access to the graph in question.""" canManageAccess:Boolean!"""Whether the currently authenticated user is permitted to manage the build configuration (e.g. build pipeline version).""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to manage 3rd party integrations (e.g. Datadog forwarding).""" canManageIntegrations:Boolean!"""Whether the currently authenticated user is permitted to manage graph-level API keys.""" canManageKeys:Boolean!"""Whether the currently authenticated user is permitted to perform basic administration (e.g. set to public) over variants.""" canManageVariants:Boolean!"""Whether the currently authenticated user is permitted to view details about the build configuration (e.g. build pipeline version).""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to view details of the check configuration for this graph.""" canQueryCheckConfiguration:Boolean!canQueryDeletedImplementingServices:Boolean!"""Whether the currently authenticated user is permitted to view which subgraphs the graph is composed of.""" canQueryImplementingServices:Boolean!canQueryIntegrations:Boolean!canQueryPrivateInfo:Boolean!canQueryPublicInfo:Boolean!canQueryReadmeAuthor:Boolean!canQueryRoleOverrides:Boolean!"""Whether the currently authenticated user is permitted to download schemas owned by this graph.""" canQuerySchemas:Boolean!canQueryStats:Boolean!canQueryTokens:Boolean!canQueryTraces:Boolean!"""Whether the currently authenticated user is permitted to register operations (i.e. `apollo client:push`) for this graph.""" canRegisterOperations:Boolean!canStoreSchemasWithoutVariant:Boolean!canUndelete:Boolean!canUpdateAvatar:Boolean!canUpdateDescription:Boolean!canUpdateTitle:Boolean!canVisualizeStats:Boolean!@deprecated(reason:"Replaced with canQueryTraces and canQueryStats")"""Whether the currently authenticated user is permitted to make updates to the check configuration for this graph.""" canWriteCheckConfiguration:Boolean!canWriteTraces:Boolean!@deprecated(reason:"Never worked, not replaced")}"""A time window with a specified granularity over a given service.""" type ServiceStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:ServiceBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceBillingUsageStatsOrderBySpec!]):[ServiceBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:ServiceEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceEdgeServerInfosOrderBySpec!]):[ServiceEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ServiceErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceErrorStatsOrderBySpec!]):[ServiceErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:ServiceFieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldExecutionsOrderBySpec!]):[ServiceFieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:ServiceFieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldRequestsByClientVersionOrderBySpec!]):[ServiceFieldRequestsByClientVersionRecord!]!fieldStats("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:ServiceFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldUsageOrderBySpec!]):[ServiceFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:ServiceOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceOperationCheckStatsOrderBySpec!]):[ServiceOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:ServiceQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceQueryStatsOrderBySpec!]):[ServiceQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:ServiceTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTracePathErrorsRefsOrderBySpec!]):[ServiceTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:ServiceTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTraceRefsOrderBySpec!]):[ServiceTraceRefsRecord!]!}"""Columns of ServiceTracePathErrorsRefs.""" enum ServiceTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type ServiceTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in ServiceTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilter{and:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:ServiceTracePathErrorsRefsFilterIn not:ServiceTracePathErrorsRefsFilter or:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input ServiceTracePathErrorsRefsOrderBySpec{column:ServiceTracePathErrorsRefsColumn!direction:Ordering!}type ServiceTracePathErrorsRefsRecord{"""Dimensions of ServiceTracePathErrorsRefs that can be grouped by.""" groupBy:ServiceTracePathErrorsRefsDimensions!"""Metrics of ServiceTracePathErrorsRefs that can be aggregated over.""" metrics:ServiceTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceTraceRefs.""" enum ServiceTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type ServiceTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String traceId:ID}"""Filter for data in ServiceTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilter{and:[ServiceTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:ServiceTraceRefsFilterIn not:ServiceTraceRefsFilter or:[ServiceTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input ServiceTraceRefsOrderBySpec{column:ServiceTraceRefsColumn!direction:Ordering!}type ServiceTraceRefsRecord{"""Dimensions of ServiceTraceRefs that can be grouped by.""" groupBy:ServiceTraceRefsDimensions!"""Metrics of ServiceTraceRefs that can be aggregated over.""" metrics:ServiceTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Slack notification channel""" type SlackChannel implements Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!url:String!}"""Slack notification channel parameters""" input SlackChannelInput{name:String url:String!}input SlackNotificationField{key:String!value:String!}"""Slack notification message""" input SlackNotificationInput{color:String fallback:String!fields:[SlackNotificationField!]iconUrl:String text:String timestamp:Timestamp title:String titleLink:String username:String}type SourceLocation{column:Int!line:Int!}"""A time window with a specified granularity.""" type StatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:BillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order BillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[BillingUsageStatsOrderBySpec!]):[BillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:EdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order EdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[EdgeServerInfosOrderBySpec!]):[EdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ErrorStatsOrderBySpec!]):[ErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:FieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldExecutionsOrderBySpec!]):[FieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:FieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldLatenciesOrderBySpec!]):[FieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:FieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldRequestsByClientVersionOrderBySpec!]):[FieldRequestsByClientVersionRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:FieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldUsageOrderBySpec!]):[FieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:OperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order OperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[OperationCheckStatsOrderBySpec!]):[OperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:QueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order QueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[QueryStatsOrderBySpec!]):[QueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:TracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TracePathErrorsRefsOrderBySpec!]):[TracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:TraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TraceRefsOrderBySpec!]):[TraceRefsRecord!]!}type StoreSchemaError{code:StoreSchemaErrorCode!message:String!}enum StoreSchemaErrorCode{SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID}type StoreSchemaResponse{sha256:SHA256!}union StoreSchemaResponseOrError=StoreSchemaError|StoreSchemaResponse type StoredApprovedChange{argNode:NamedIntrospectionArgNoDescription childNode:NamedIntrospectionValueNoDescription code:ChangeCode!parentNode:NamedIntrospectionTypeNoDescription}scalar StringOrInt type StringToString{key:String!value:String!}input StringToStringInput{key:String!value:String!}type Subgraph{hash:String!name:String!routingURL:String!}type SubgraphChange{name:ID!type:SubgraphChangeType!}enum SubgraphChangeType{ADDITION DELETION MODIFICATION}type SubgraphConfig{id:ID!name:String!schemaHash:String!sdl:String!url:String!}type SubscriptionOptions{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}input SubscriptionOptionsInput{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}enum SubscriptionState{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}enum SubscriptionStateV2{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}type TemporaryURL{url:String!}enum ThemeName{DARK LIGHT}enum TicketPriority{P0 P1 P2 P3}enum TicketStatus{CLOSED HOLD NEW OPEN PENDING SOLVED}"""ISO 8601, extended format with nanoseconds, Zulu (or '[+-]seconds' for times relative to now)""" scalar Timestamp type TimezoneOffset{minutesOffsetFromUTC:Int!zoneID:String!}"""Counts of changes.""" type TotalChangeSummaryCounts{"""Number of changes that are additions. This includes adding types, adding fields to object, input
-object, and interface types, adding values to enums, adding members to interfaces and unions, and
-adding arguments.""" additions:Int!"""Number of changes that are new usages of the @deprecated directive.""" deprecations:Int!"""Number of changes that are edits. This includes types changing kind, fields and arguments
-changing type, arguments changing default value, and any description changes. This also includes
-edits to @deprecated reason strings.""" edits:Int!"""Number of changes that are removals. This includes removing types, removing fields from object,
-input object, and interface types, removing values from enums, removing members from interfaces
-and unions, and removing arguments. This also includes removing @deprecated usages.""" removals:Int!}type Trace{cacheMaxAgeMs:Float cacheScope:CacheScope clientName:String clientVersion:String durationMs:Float!endTime:Timestamp!http:TraceHTTP id:ID!operationName:String protobuf:Protobuf!root:TraceNode!signature:String!startTime:Timestamp!unexecutedOperationBody:String unexecutedOperationName:String variablesJSON:[StringToString!]!}type TraceError{json:String!locations:[TraceSourceLocation!]!message:String!timestamp:Timestamp}type TraceHTTP{host:String method:HTTPMethod!path:String protocol:String requestHeaders:[StringToString!]!responseHeaders:[StringToString!]!secure:Boolean!statusCode:Int!}type TraceNode{cacheMaxAgeMs:Float cacheScope:CacheScope children:[TraceNode!]!childrenIds:[ID!]!descendants:[TraceNode!]!descendantsIds:[ID!]!endTime:Timestamp!errors:[TraceError!]!id:ID!key:StringOrInt originalFieldName:String parent:ID!parentId:ID path:[String!]!startTime:Timestamp!type:String}"""Columns of TracePathErrorsRefs.""" enum TracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type TracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String """If metrics were collected from a federated service, this field will be prefixed with `service:<SERVICE_NAME>.`""" path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in TracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilter{and:[TracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:TracePathErrorsRefsFilterIn not:TracePathErrorsRefsFilter or:[TracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input TracePathErrorsRefsOrderBySpec{column:TracePathErrorsRefsColumn!direction:Ordering!}type TracePathErrorsRefsRecord{"""Dimensions of TracePathErrorsRefs that can be grouped by.""" groupBy:TracePathErrorsRefsDimensions!"""Metrics of TracePathErrorsRefs that can be aggregated over.""" metrics:TracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of TraceRefs.""" enum TraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type TraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in TraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TraceRefsFilter{and:[TraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:TraceRefsFilterIn not:TraceRefsFilter or:[TraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input TraceRefsOrderBySpec{column:TraceRefsColumn!direction:Ordering!}type TraceRefsRecord{"""Dimensions of TraceRefs that can be grouped by.""" groupBy:TraceRefsDimensions!"""Metrics of TraceRefs that can be aggregated over.""" metrics:TraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type TraceSourceLocation{column:Int!line:Int!}"""Counts of changes at the type level, including interfaces, unions, enums, scalars, input objects, etc.""" type TypeChangeSummaryCounts{"""Number of changes that are additions of types.""" additions:Int!"""Number of changes that are edits. This includes types changing kind and any type description
-changes, but also includes adding/removing values from enums, adding/removing members from
-interfaces and unions, and any enum value deprecation and description changes.""" edits:Int!"""Number of changes that are removals of types.""" removals:Int!}"""the TypeFilterConfig is used to isolate
-types, and subsequent fields, through
-various configuration settings.
-
-It defaults to filter towards user defined
-types only""" input TypeFilterConfig{"""include abstract types (interfaces and unions)""" includeAbstractTypes:Boolean=true """include built in scalars (i.e. Boolean, Int, etc)""" includeBuiltInTypes:Boolean=false """include reserved introspection types (i.e. __Type)""" includeIntrospectionTypes:Boolean=false}type URI{"""A GCS URI""" gcs:String!}type UnignoreOperationsInChecksResult{graph:Service!}union UpdateOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union UpdateOperationCollectionResult=OperationCollection|PermissionError|ValidationError """The result of a schema publish to a graph variant.""" type UploadSchemaMutationResponse{"""A response code for processing via machines (e.g. UPLOAD_SUCCESS or NO_CHANGES)""" code:String!"""Human readable result of a schema publish.""" message:String!"""If successful, the corresponding publication.""" publication:SchemaTag """Whether the schema publish successfully completed or encountered errors.""" success:Boolean!"""If successful, the corresponding publication.""" tag:SchemaTag}"""A registered user.""" type User implements Identity{acceptedPrivacyPolicyAt:Timestamp accounts:[Account!]!@deprecated(reason:"Replaced with User.memberships.account")apiKeys(includeCookies:Boolean=false):[UserApiKey!]!"""Translation of this user identity to an 'Actor' type.""" asActor:Actor!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request
-with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the
-browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the
-filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to
-MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the user's avatar. Note that CORS is not enabled for these URLs. The size
-argument is used for bandwidth reduction, and should be the size of the image as displayed in the
-application. Apollo's media server will downscale larger images to at least the requested size,
-but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String betaFeaturesOn:Boolean!canUpdateAvatar:Boolean!canUpdateEmail:Boolean!canUpdateFullName:Boolean!createdAt:Timestamp!email:String emailModifiedAt:Timestamp emailVerified:Boolean!experimentalFeatures:UserExperimentalFeatures!featureIntros:FeatureIntros fullName:String!"""The user's GitHub username, if they log in via GitHub. May be null even for GitHub users in some edge cases.""" githubUsername:String """The unique identifier for a user.""" id:ID!"""This role is reserved exclusively for internal MDG employees, and it controls what access they may have to other
-organizations. Only admins are allowed to see this field.""" internalAdminRole:InternalMdgAdminRole """Last time any API token from this user was used against AGM services""" lastAuthenticatedAt:Timestamp logoutAfterIdleMs:Int """Which organizations a user belongs to.""" memberships:[UserMembership!]!"""The name (first and last) of a user.""" name:String!odysseyAttempt(id:ID!):OdysseyAttempt odysseyAttempts:[OdysseyAttempt!]!odysseyCertifications:[OdysseyCertification!]odysseyCourses:[OdysseyCourse!]odysseyHasEarlyAccess:Boolean!odysseyHasRequestedEarlyAccess:Boolean!odysseyTasks:[OdysseyTask!]sandboxOperationCollections:[OperationCollection!]!synchronized:Boolean!"""List of Zendesk tickets this user has submitted""" tickets:[ZendeskTicket!]type:UserType!}type UserApiKey implements ApiKey{id:ID!keyName:String token:String!}type UserExperimentalFeatures{exampleFeature:Boolean!}"""An organization a given user belongs to.""" type UserMembership{"""The organization a user is a member of.""" account:Account!"""When the user joined the organization.""" createdAt:Timestamp!"""What level of access a use has to an organization.""" permission:UserPermission!"""The user that is a member of an organization.""" user:User!}type UserMutation{acceptPrivacyPolicy:Void """Change the user's password""" changePassword(newPassword:String!previousPassword:String!):Void createOdysseyAttempt(testId:String!):OdysseyAttempt createOdysseyCertification(certificationId:String!):OdysseyCertification createOdysseyCourses(courses:[OdysseyCourseInput!]!):[OdysseyCourse!]createOdysseyTasks(tasks:[OdysseyTaskInput!]!):[OdysseyTask!]"""Delete the user's avatar. Requires User.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Hard deletes the associated user. Throws an error otherwise with reason included.""" hardDelete:Void """Create a new API key for this user. Must take in a name for this key.""" newKey(keyName:String!):UserApiKey!"""Create a new API key for this user if there are no current API keys.
-If an API key already exists, this will return one at random and not create a new one.""" provisionKey(keyName:String!="add-a-name"):ApiKeyProvision """Refresh information about the user from its upstream service (eg list of organizations from GitHub)""" refresh:User """Removes the given key from this user. Can be used to remove either a web cookie or a user API key.""" removeKey(id:ID!):Void """Renames the given key to the new key name.""" renameKey(id:ID!newKeyName:String):UserApiKey resendVerificationEmail:Void setOdysseyCourse(course:OdysseyCourseInput!):OdysseyCourse setOdysseyResponse(response:OdysseyResponseInput!):OdysseyResponse setOdysseyTask(task:OdysseyTaskInput!):OdysseyTask """Submit a zendesk ticket for this user""" submitZendeskTicket(collaborators:[String!]email:String!ticket:ZendeskTicketInput!):ZendeskTicket """Update information about a user; all arguments are optional""" update(email:String fullName:String referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """Updates this users' preference concerning opting into beta features.""" updateBetaFeaturesOn(betaFeaturesOn:Boolean!):User """Update the status of a feature for this. For example, if you want to hide an introductory popup.""" updateFeatureIntros(newFeatureIntros:FeatureIntrosInput):User updateOdysseyAttempt(completedAt:Timestamp id:ID!):OdysseyAttempt """Update user to have the given internal mdg admin role.
-It is necessary to be an MDG_INTERNAL_SUPER_ADMIN to perform update.
-Additionally, upserting a null value explicitly revokes this user's
-admin status.""" updateRole(newRole:InternalMdgAdminRole):User user:User!verifyEmail(token:String!):User}enum UserPermission{BILLING_MANAGER CONSUMER CONTRIBUTOR DOCUMENTER GRAPH_ADMIN LEGACY_GRAPH_KEY OBSERVER ORG_ADMIN}enum UserSegment{JOIN_MY_TEAM LOCAL_DEVELOPMENT NOT_SPECIFIED PRODUCTION_GRAPHS SANDBOX SANDBOX_OPERATION_COLLECTIONS TRY_TEAM}type UserSettings{appNavCollapsed:Boolean!autoManageVariables:Boolean!id:String!mockingResponses:Boolean!preflightScriptEnabled:Boolean!responseHints:ResponseHints!tableMode:Boolean!themeName:ThemeName!}"""Explorer user settings input""" input UserSettingsInput{appNavCollapsed:Boolean autoManageVariables:Boolean mockingResponses:Boolean preflightScriptEnabled:Boolean responseHints:ResponseHints tableMode:Boolean themeName:ThemeName}enum UserType{APOLLO GITHUB SSO}type ValidateOperationsResult{validationResults:[ValidationResult!]!}type ValidationError implements Error{message:String!}enum ValidationErrorCode{DEPRECATED_FIELD INVALID_OPERATION NON_PARSEABLE_DOCUMENT}enum ValidationErrorType{FAILURE INVALID WARNING}"""Represents a single validation error, with information relating to the error
-and its respective operation""" type ValidationResult{"""The validation result's error code""" code:ValidationErrorCode!"""Description of the validation error""" description:String!"""The operation related to this validation result""" operation:OperationDocument!"""The type of validation error thrown - warning, failure, or invalid.""" type:ValidationErrorType!}"""Always null""" scalar Void """Webhook notification channel""" type WebhookChannel implements Channel{id:ID!name:String!secretToken:String subscriptions:[ChannelSubscription!]!url:String!}"""PagerDuty notification channel parameters""" input WebhookChannelInput{name:String secretToken:String url:String!}type ZendeskTicket{createdAt:Timestamp!description:String!graph:Service id:Int!organization:Account priority:TicketPriority!status:TicketStatus subject:String!user:User}"""Zendesk ticket input""" input ZendeskTicketInput{description:String!graphId:String organizationId:String priority:TicketPriority!subject:String!}
+Each variant has its own GraphQL schema, which means schemas can differ between environments.
+"""
+type Graph implements Identity {
+  """The organization that this graph belongs to."""
+  account: Organization
+  """A list of the graph API keys that are active for this graph."""
+  apiKeys: [GraphApiKey!]
+  """Provides a view of the graph as an `Actor` type."""
+  asActor: Actor!
+  """Get a check workflow for this graph by its ID"""
+  checkWorkflow(id: ID!): CheckWorkflow
+  """The graph's globally unique identifier."""
+  id: ID!
+  """Permissions of the current user in this graph."""
+  myRole: UserPermission
+  name: String!
+  """Describes the permissions that the active user has for this graph."""
+  roles: GraphRoles
+  """The graph's name."""
+  title: String!
+  """
+  Provides details of the graph variant with the provided `name`, if a variant
+  with that name exists for this graph. Otherwise, returns null.
+
+   For a list of _all_ variants associated with a graph, use `Graph.variants` instead.
+  """
+  variant(name: String!): GraphVariant
+  """A list of the variants for this graph."""
+  variants: [GraphVariant!]!
+  """Get a GraphQL document by hash"""
+  document(hash: SHA256): GraphQLDocument
+  """Get check workflows for this graph ordered by creation time, most recent first."""
+  checkWorkflows(limit: Int! = 100, filter: CheckFilterInput): [CheckWorkflow!]!
+}
+
+"""Provides access to mutation fields for managing Studio graphs and subgraphs."""
+type GraphMutation {
+  """Generates a new graph API key for this graph with the specified permission level."""
+  newKey(keyName: String, role: UserPermission! = GRAPH_ADMIN): GraphApiKey!
+  """Deletes the existing graph API key with the provided ID, if any."""
+  removeKey(
+    """API key ID"""
+    id: ID!
+  ): Void
+  """Sets a new name for the graph API key with the provided ID, if any. This does not invalidate the key or change its value."""
+  renameKey(id: ID!, newKeyName: String): GraphApiKey
+  """Creates a contract schema from a source variant and a set of filter configurations"""
+  upsertContractVariant(
+    """The name of the contract variant, e.g. `public-api`. Once set, this value cannot be changed."""
+    contractVariantName: String!
+    """The filter configuration used to build a contract schema. The configuration consists of lists of tags for schema elements to include or exclude in the resulting schema."""
+    filterConfig: FilterConfigInput!
+    """Whether a launch and schema publish should be initiated after updating configuration. Defaults to `true`."""
+    initiateLaunch: Boolean! = true
+    """The graphRef of the variant the contract will be derived from, e.g. `my-graph@production`. Once set, this value cannot be changed."""
+    sourceVariant: String
+  ): ContractVariantUpsertResult!
+  """Make changes to a graph variant."""
+  variant(name: String!): GraphVariantMutation
+  """Publish a schema to this variant, either via a document or an introspection query result."""
+  uploadSchema(schema: IntrospectionSchemaInput, schemaDocument: String, tag: String!, historicParameters: HistoricQueryParameters, overrideComposedSchema: Boolean! = false, errorOnBadRequest: Boolean! = true, gitContext: GitContextInput): SchemaPublicationResult
+  """
+  Checks a proposed schema against the schema that has been published to
+  a particular variant, using metrics corresponding to `historicParameters`.
+  Callers can set `historicParameters` directly or rely on defaults set in the
+  graph's check configuration (7 days by default).
+  If they do not set `historicParameters` but set `useMaximumRetention`,
+  validation will use the maximum retention the graph has access to.
+  """
+  checkSchema(
+    """
+    Only one of proposedSchema, proposedSchemaDocument, and proposedSchemaHash
+    may be specified
+    """
+    proposedSchema: IntrospectionSchemaInput
+    proposedSchemaDocument: String
+    proposedSchemaHash: String
+    baseSchemaTag: String = "current"
+    gitContext: GitContextInput
+    historicParameters: HistoricQueryParameters
+    useMaximumRetention: Boolean
+    isSandboxCheck: Boolean! = false
+    """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served."""
+    introspectionEndpoint: String
+    """Deprecated and ignored."""
+    frontend: String
+  ): CheckSchemaResult!
+  """Publish to a subgraph. If composition is successful, this will update running routers."""
+  publishSubgraph(graphVariant: String!, name: String!, url: String, revision: String!, activePartialSchema: PartialSchemaInput!, gitContext: GitContextInput): SubgraphPublicationResult
+  """Removes a subgraph. If composition is successful, this will update running routers."""
+  removeImplementingServiceAndTriggerComposition(
+    graphVariant: String!
+    name: String!
+    """Do not remove the service, but recompose without it and report any errors."""
+    dryRun: Boolean! = false
+  ): SubgraphRemovalResult!
+  """
+  Checks a proposed subgraph schema change against a published subgraph.
+  If the proposal composes successfully, perform a usage check for the resulting supergraph schema.
+  """
+  checkPartialSchema(
+    """The name of the graph variant to run the check against."""
+    graphVariant: String!
+    """Name of the implementing service to validate the partial schema against"""
+    implementingServiceName: String!
+    """The partial schema to validate against an implementing service"""
+    partialSchema: PartialSchemaInput!
+    gitContext: GitContextInput
+    historicParameters: HistoricQueryParameters
+    """Deprecated and ignored."""
+    frontend: String
+    """
+    Whether to use the maximum retention for historical validation. This only takes
+    effect if historicParameters is null.
+    """
+    useMaximumRetention: Boolean
+    isSandboxCheck: Boolean! = false
+    """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served."""
+    introspectionEndpoint: String
+  ): CheckPartialSchemaResult!
+}
+
+"""Individual permissions for the current user when interacting with a particular Studio graph."""
+type GraphRoles {
+  """Whether the currently authenticated user is permitted to perform schema checks (i.e., run `rover (sub)graph check`)."""
+  canCheckSchemas: Boolean!
+  """Whether the currently authenticated user is permitted to create new graph variants."""
+  canCreateVariants: Boolean!
+  """Whether the currently authenticated user is permitted to delete the graph in question"""
+  canDelete: Boolean!
+  """Whether the currently authenticated user is permitted to manage user access to the graph in question."""
+  canManageAccess: Boolean!
+  """Whether the currently authenticated user is permitted to manage the build configuration (e.g., build pipeline version)."""
+  canManageBuildConfig: Boolean!
+  """Whether the currently authenticated user is permitted to manage third-party integrations (e.g., Datadog forwarding)."""
+  canManageIntegrations: Boolean!
+  """Whether the currently authenticated user is permitted to manage graph-level API keys."""
+  canManageKeys: Boolean!
+  """Whether the currently authenticated user is permitted to perform basic administration of variants (e.g., make a variant public)."""
+  canManageVariants: Boolean!
+  """Whether the currently authenticated user is permitted to view details about the build configuration (e.g. build pipeline version)."""
+  canQueryBuildConfig: Boolean!
+  """Whether the currently authenticated user is permitted to view details of the check configuration for this graph."""
+  canQueryCheckConfiguration: Boolean!
+  """Whether the currently authenticated user is permitted to view which subgraphs the graph is composed of."""
+  canQueryImplementingServices: Boolean!
+  """Whether the currently authenticated user is permitted to download schemas owned by this graph."""
+  canQuerySchemas: Boolean!
+  """Whether the currently authenticated user is permitted to register operations (i.e. `apollo client:push`) for this graph."""
+  canRegisterOperations: Boolean!
+  """Whether the currently authenticated user is permitted to make updates to the check configuration for this graph."""
+  canWriteCheckConfiguration: Boolean!
+}
+
+"""A SHA-256 hash, represented as a lowercase hexadecimal string."""
+scalar SHA256
+
+"""A location in a source code file."""
+type SourceLocation {
+  """Column number."""
+  column: Int!
+  """Line number."""
+  line: Int!
+}
+
+enum Status {
+  STABLE
+  NEXT
+  DEPRECATED
+}
+
+"""A subgraph in a federated Studio supergraph."""
+type Subgraph {
+  """The subgraph schema document's SHA256 hash, represented as a hexadecimal string."""
+  hash: String!
+  """The subgraph's registered name."""
+  name: String!
+  """The number of fields in this subgraph"""
+  numberOfFields: Int
+  """The number of types in this subgraph"""
+  numberOfTypes: Int
+  """The subgraph's routing URL, provided to gateways that use managed federation."""
+  routingURL: String!
+  """Timestamp of when the subgraph was published."""
+  updatedAt: Timestamp
+}
+
+"""A change made to a subgraph as part of a launch."""
+type SubgraphChange {
+  """The subgraph's name."""
+  name: ID!
+  """The type of change that was made."""
+  type: SubgraphChangeType!
+}
+
+enum SubgraphChangeType {
+  ADDITION
+  DELETION
+  MODIFICATION
+}
+
+"""Input type to provide when running schema checks asynchronously for a federated supergraph."""
+input SubgraphCheckAsyncInput {
+  """Configuration options for the check execution."""
+  config: HistoricQueryParametersInput!
+  """The GitHub context to associate with the check."""
+  gitContext: GitContextInput!
+  """The graph ref of the Studio graph and variant to run checks against (such as `my-graph@current`)."""
+  graphRef: ID
+  """The URL of the GraphQL endpoint that Apollo Sandbox introspected to obtain the proposed schema. Required if `isSandbox` is `true`."""
+  introspectionEndpoint: String
+  """If `true`, the check was initiated by Apollo Sandbox."""
+  isSandbox: Boolean!
+  """The proposed subgraph schema to perform checks with."""
+  proposedSchema: GraphQLDocument!
+  """The name of the subgraph to check schema changes for."""
+  subgraphName: String!
+}
+
+"""ISO 8601, extended format with nanoseconds, Zulu (or "[+-]seconds" as a string or number relative to now)"""
+scalar Timestamp
+
+"""Counts of changes."""
+type TotalChangeSummaryCounts {
+  """
+  Number of changes that are additions. This includes adding types, adding fields to object, input
+  object, and interface types, adding values to enums, adding members to interfaces and unions, and
+  adding arguments.
+  """
+  additions: Int!
+  """
+  Number of changes that are removals. This includes removing types, removing fields from object,
+  input object, and interface types, removing values from enums, removing members from interfaces
+  and unions, and removing arguments. This also includes removing @deprecated usages.
+  """
+  removals: Int!
+  """
+  Number of changes that are edits. This includes types changing kind, fields and arguments
+  changing type, arguments changing default value, and any description changes. This also includes
+  edits to @deprecated reason strings.
+  """
+  edits: Int!
+  """Number of changes that are new usages of the @deprecated directive."""
+  deprecations: Int!
+}
+
+"""Counts of changes at the type level, including interfaces, unions, enums, scalars, input objects, etc."""
+type TypeChangeSummaryCounts {
+  """Number of changes that are additions of types."""
+  additions: Int!
+  """Number of changes that are removals of types."""
+  removals: Int!
+  """
+  Number of changes that are edits. This includes types changing kind and any type description
+  changes, but also includes adding/removing values from enums, adding/removing members from
+  interfaces and unions, and any enum value deprecation and description changes.
+  """
+  edits: Int!
+}
+
+union UpdateOperationCollectionEntryResult = OperationCollectionEntry | PermissionError | ValidationError
+
+union UpdateOperationCollectionResult = OperationCollection | PermissionError | ValidationError
+
+"""Describes the result of publishing a schema to a graph variant."""
+type SchemaPublicationResult {
+  """A machine-readable response code that indicates the type of result (e.g., `UPLOAD_SUCCESS` or `NO_CHANGES`)"""
+  code: String!
+  """Whether the schema publish operation succeeded (`true`) or encountered errors (`false`)."""
+  success: Boolean!
+  """A Human-readable message describing the type of result."""
+  message: String!
+  """If the publish operation succeeded, this contains its details. Otherwise, this is null."""
+  publication: SchemaPublication
+}
+
+"""A registered Apollo Studio user."""
+type User implements Identity {
+  """Returns a list of all active user API keys for the user."""
+  apiKeys(includeCookies: Boolean = false): [UserApiKey!]!
+  """Returns a representation of this user as an `Actor` type. Useful when determining which actor (usually a `User` or `Graph`) performed a particular action in Studio."""
+  asActor: Actor!
+  """The user's unique ID."""
+  id: ID!
+  """A list of the user's memberships in Apollo Studio organizations."""
+  memberships: [UserMembership!]!
+  """The user's first and last name."""
+  name: String!
+}
+
+"""
+Represents a user API key, which has permissions identical to
+its associated Apollo user.
+"""
+type UserApiKey implements ApiKey {
+  """The API key's ID."""
+  id: ID!
+  """The API key's name, for distinguishing it from other keys."""
+  keyName: String
+  """The value of the API key. **This is a secret credential!**"""
+  token: String!
+}
+
+"""A single user's membership in a single Apollo Studio organization."""
+type UserMembership {
+  """The organization that the user belongs to."""
+  account: Organization!
+  """The timestamp when the user was added to the organization."""
+  createdAt: Timestamp!
+  """The user's permission level within the organization."""
+  permission: UserPermission!
+  """The user that belongs to the organization."""
+  user: User!
+}
+
+type UserMutation {
+  """Creates a new user API key for this user."""
+  newKey(keyName: String!): UserApiKey!
+  """
+  If this user has no active user API keys, this creates one for the user.
+
+  If this user has at least one active user API key, this returns one of those keys at random and does _not_ create a new key.
+  """
+  provisionKey(keyName: String! = "add-a-name"): ApiKeyProvision
+  """Deletes the user API key with the provided ID, if any."""
+  removeKey(
+    """API key ID"""
+    id: ID!
+  ): Void
+  """Sets a new name for the user API key with the provided ID, if any. This does not invalidate the key or change its value."""
+  renameKey(id: ID!, newKeyName: String): UserApiKey
+}
+
+enum UserPermission {
+  BILLING_MANAGER
+  CONSUMER
+  CONTRIBUTOR
+  DOCUMENTER
+  GRAPH_ADMIN
+  LEGACY_GRAPH_KEY
+  OBSERVER
+  ORG_ADMIN
+}
+
+"""An error that occurs when an operation contains invalid user input."""
+type ValidationError implements Error {
+  """The error's details."""
+  message: String!
+}
+
+"""Always null"""
+scalar Void
diff --git a/pkgs/development/tools/rust/typeshare/default.nix b/pkgs/development/tools/rust/typeshare/default.nix
index 6e96bf72bbad2..afcdedd9bf883 100644
--- a/pkgs/development/tools/rust/typeshare/default.nix
+++ b/pkgs/development/tools/rust/typeshare/default.nix
@@ -1,22 +1,37 @@
-{ lib, rustPlatform, fetchCrate }:
+{ lib
+, rustPlatform
+, fetchFromGitHub
+, installShellFiles
+}:
 
 rustPlatform.buildRustPackage rec {
   pname = "typeshare";
-  version = "1.0.1";
+  version = "1.1.0";
 
-  src = fetchCrate {
-    inherit version;
-    pname = "typeshare-cli";
-    sha256 = "sha256-SbTI7170Oc1e09dv4TvUwByG3qkyAL5YXZ96NzI0FSI=";
+  src = fetchFromGitHub {
+    owner = "1password";
+    repo = "typeshare";
+    rev = "v${version}";
+    hash = "sha256-FQ9KL8X7zz3ew+H1lhh4bkZ01Te1TD+QXAMxS8dXAaI=";
   };
 
-  cargoSha256 = "sha256-5EhXw2WcRJqCbdMvOtich9EYQqi0uwCH1a1XXIo8aAo=";
+  cargoHash = "sha256-t6tGNHmPasmTRto2hobvJywrF/8tO79zkfWwa6lCPK8=";
+
+  nativeBuildInputs = [ installShellFiles ];
 
   buildFeatures = [ "go" ];
 
+  postInstall = ''
+    installShellCompletion --cmd typeshare \
+      --bash <($out/bin/typeshare completions bash) \
+      --fish <($out/bin/typeshare completions fish) \
+      --zsh <($out/bin/typeshare completions zsh)
+  '';
+
   meta = with lib; {
     description = "Command Line Tool for generating language files with typeshare";
     homepage = "https://github.com/1password/typeshare";
+    changelog = "https://github.com/1password/typeshare/blob/v${version}/CHANGELOG.md";
     license = with licenses; [ asl20 /* or */ mit ];
     maintainers = with maintainers; [ figsoda ];
   };
diff --git a/pkgs/games/osu-lazer/bin.nix b/pkgs/games/osu-lazer/bin.nix
index 7a62db1003fa0..cf17332785fdf 100644
--- a/pkgs/games/osu-lazer/bin.nix
+++ b/pkgs/games/osu-lazer/bin.nix
@@ -2,11 +2,11 @@
 
 appimageTools.wrapType2 rec {
   pname = "osu-lazer-bin";
-  version = "2023.207.0";
+  version = "2023.301.0";
 
   src = fetchurl {
     url = "https://github.com/ppy/osu/releases/download/${version}/osu.AppImage";
-    sha256 = "sha256-xJQcqNV/Pr3gEGStczc3gv8AYrEKFsAo2g4WtA59fwk=";
+    sha256 = "sha256-0c74bGOY9f2K52xE7CZy/i3OfyCC+a6XGI30c6hI7jM=";
   };
 
   extraPkgs = pkgs: with pkgs; [ icu ];
diff --git a/pkgs/games/osu-lazer/default.nix b/pkgs/games/osu-lazer/default.nix
index 3a56c35fe88da..f3aea2a333af0 100644
--- a/pkgs/games/osu-lazer/default.nix
+++ b/pkgs/games/osu-lazer/default.nix
@@ -17,13 +17,13 @@
 
 buildDotnetModule rec {
   pname = "osu-lazer";
-  version = "2023.207.0";
+  version = "2023.301.0";
 
   src = fetchFromGitHub {
     owner = "ppy";
     repo = "osu";
     rev = version;
-    sha256 = "sha256-s0gzSfj4+xk3joS7S68ZGjgatiJY2Y1FBCmrhptaWIk=";
+    sha256 = "sha256-SUVxe3PdUch8NYR7X4fatbmSpyYewI69usBDICcSq3s=";
   };
 
   projectFile = "osu.Desktop/osu.Desktop.csproj";
diff --git a/pkgs/games/osu-lazer/deps.nix b/pkgs/games/osu-lazer/deps.nix
index 908140259b1e5..753f1cd9af1a9 100644
--- a/pkgs/games/osu-lazer/deps.nix
+++ b/pkgs/games/osu-lazer/deps.nix
@@ -2,10 +2,10 @@
 # Please dont edit it manually, your changes might get overwritten!
 
 { fetchNuGet }: [
-  (fetchNuGet { pname = "AutoMapper"; version = "11.0.1"; sha256 = "1z1x5c1dkwk6142km5q6jglhpq9x82alwjjy5a72c8qnq9ppdfg3"; })
+  (fetchNuGet { pname = "AutoMapper"; version = "12.0.1"; sha256 = "0s0wjl4ck3sal8a50x786wxs9mbca7bxaqk3558yx5wpld4h4z3b"; })
   (fetchNuGet { pname = "Clowd.Squirrel"; version = "2.9.42"; sha256 = "1xxrr9jmgn343d467nz40569mkybinnmxaxyc4fhgy6yddvzk1y0"; })
   (fetchNuGet { pname = "DiffPlex"; version = "1.7.1"; sha256 = "1q78r70pirgb7j5wkh454ws237lihh0fig212cpbj02cz53c2h6j"; })
-  (fetchNuGet { pname = "DiscordRichPresence"; version = "1.1.1.14"; sha256 = "18adkrddjlci5ajs17ck1c8cd8id3cgjylqvfggyqwrmsh7yr4j6"; })
+  (fetchNuGet { pname = "DiscordRichPresence"; version = "1.1.3.18"; sha256 = "0p4bhaggjjfd4gl06yiphqgncxgcq2bws4sjkrw0n2ldf3hgrps3"; })
   (fetchNuGet { pname = "FFmpeg.AutoGen"; version = "4.3.0.1"; sha256 = "0n6x57mnnvcjnrs8zyvy07h5zm4bcfy9gh4n4bvd9fx5ys4pxkvv"; })
   (fetchNuGet { pname = "Fody"; version = "6.6.4"; sha256 = "1hhdwj0ska7dvak9hki8cnyfmmw5r8yw8w24gzsdwhqx68dnrvsx"; })
   (fetchNuGet { pname = "HidSharpCore"; version = "1.2.1.1"; sha256 = "1zkndglmz0s8rblfhnqcvv90rkq2i7lf4bc380g7z8h1avf2ikll"; })
@@ -63,38 +63,37 @@
   (fetchNuGet { pname = "JetBrains.Annotations"; version = "2021.3.0"; sha256 = "01ssylllbwpana2w3iybi533zlvcsbhzjc8kr0g4kg307kjbfn8v"; })
   (fetchNuGet { pname = "managed-midi"; version = "1.10.0"; sha256 = "1rih8iq8k4j6n3206d2j7z4vygp725kzs95c6yc7p1mlhfiiimvq"; })
   (fetchNuGet { pname = "Markdig"; version = "0.23.0"; sha256 = "1bwn885w7balwncmr764vidyyp9bixqlq6r3lhsapj8ykrpxxa70"; })
-  (fetchNuGet { pname = "MessagePack"; version = "2.4.35"; sha256 = "0y8pz073ync51cv39lxldc797nmcm39r4pdhy2il6r95rppjqg5h"; })
-  (fetchNuGet { pname = "MessagePack.Annotations"; version = "2.4.35"; sha256 = "1jny2r6rwq7xzwymm779w9x8a5rhyln97mxzplxwd53wwbb0wbzd"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.Connections.Abstractions"; version = "6.0.10"; sha256 = "1wic0bghgwg2r8q676miv3kk7ph5g46kvkw1iljr4b8s58mqbwas"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Client"; version = "6.0.10"; sha256 = "1a8m44qgjwfhmqpfsyyb1hgak3sh99s62hnfmphxsflfvx611mbb"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Common"; version = "6.0.10"; sha256 = "0vqc62xjiwlqwifx3nj0nwssjrdqka2avpqiiwylsbd48s1ahxdy"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client"; version = "6.0.10"; sha256 = "090ggwxv2j86hkmnzqxa728wpn5g30dfqd05widhd7n1m51igq71"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client.Core"; version = "6.0.10"; sha256 = "13i22fkai420fvr71c3pfnadspcv8jpf5bci9fn3yh580bfqw21a"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Common"; version = "6.0.10"; sha256 = "0kmy2h310hqpr6bgd128r4q7ny4i7qjfvgrv1swhqv2j9n1yriby"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.Json"; version = "6.0.10"; sha256 = "13q429kwbijyfgpb4dp04lr2c691ra5br5wf8g7s260pij10x1nz"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.MessagePack"; version = "6.0.10"; sha256 = "068gw5q25yaf5k5c96kswmna1jixpw6s82r7gmgnw54rcc8gdz3f"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.NewtonsoftJson"; version = "6.0.10"; sha256 = "1k7jvvvz8wwbd1bw1shcgrgz2gw3l877krhw39b9sj2vbwzc8bn7"; })
-  (fetchNuGet { pname = "Microsoft.CodeAnalysis.BannedApiAnalyzers"; version = "3.3.3"; sha256 = "1z6x0d8lpcfjr3sxy25493i17vvcg5bsay6c03qan6mnj5aqzw2k"; })
+  (fetchNuGet { pname = "MessagePack"; version = "2.4.59"; sha256 = "13igx5m5hkqqyhyw04z2nwfxn2jwlrpvvwx4c8qrayv9j4l31ajm"; })
+  (fetchNuGet { pname = "MessagePack.Annotations"; version = "2.4.59"; sha256 = "1y8mg95x87jddk0hyf58cc1zy666mqbla7479njkm7kmpwz61s8c"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.Connections.Abstractions"; version = "7.0.2"; sha256 = "1k5gjiwmcrbwfz54jafz6mmf4md7jgk3j8jdpp9ax72glwa7ia4a"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Client"; version = "7.0.2"; sha256 = "0rnra67gkg0qs7wys8bacm1raf9khb688ch2yr56m88kwdk5bhw4"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.Http.Connections.Common"; version = "7.0.2"; sha256 = "19dviyc68m56mmy05lylhp2bxvww2gqx1y07kc0yqp61rcjb1d85"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client"; version = "7.0.2"; sha256 = "0ms9syxlxk6f5pxjw23s2cz4ld60vk84v67l0bhnnb8v42rz97nn"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Client.Core"; version = "7.0.2"; sha256 = "15qs3pdji2sd629as4i8zd5bjbs165waim9jypxqjkb55bslz8d7"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Common"; version = "7.0.2"; sha256 = "0c3ia03m1shc2xslqln5m986kpvc1dqb15j85vqxbzb0jj6fr52y"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.Json"; version = "7.0.2"; sha256 = "028r8sk5dlxkfxw6wz2ys62rm9dqa85s6rfhilrfy1phsl47rkal"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.MessagePack"; version = "7.0.2"; sha256 = "1zkznsq5r7gg2pnlj9y7swrbvzyywf6q5xf9ggcwbvccwp0g6jr4"; })
+  (fetchNuGet { pname = "Microsoft.AspNetCore.SignalR.Protocols.NewtonsoftJson"; version = "7.0.2"; sha256 = "1x5pymqc315nb8z2414dvqdpcfd5zy5slcfa9b3vjhrbbbngaly7"; })
+  (fetchNuGet { pname = "Microsoft.CodeAnalysis.BannedApiAnalyzers"; version = "3.3.4"; sha256 = "1vzrni7n94f17bzc13lrvcxvgspx9s25ap1p005z6i1ikx6wgx30"; })
   (fetchNuGet { pname = "Microsoft.CSharp"; version = "4.5.0"; sha256 = "01i28nvzccxbqmiz217fxs6hnjwmd5fafs37rd49a6qp53y6623l"; })
   (fetchNuGet { pname = "Microsoft.CSharp"; version = "4.7.0"; sha256 = "0gd67zlw554j098kabg887b5a6pq9kzavpa3jjy5w53ccjzjfy8j"; })
-  (fetchNuGet { pname = "Microsoft.Data.Sqlite.Core"; version = "6.0.10"; sha256 = "1sdh5rw2pyg6c64z0haxf57bakd5kwaav624vlqif1m59iz26rag"; })
+  (fetchNuGet { pname = "Microsoft.Data.Sqlite.Core"; version = "7.0.2"; sha256 = "0xipbci6pshj825a1r8nlc19hf26n4ba33sx7dbx727ja5lyjv8m"; })
   (fetchNuGet { pname = "Microsoft.Diagnostics.NETCore.Client"; version = "0.2.61701"; sha256 = "1ic1607jj4ln8dbibf1fz5v9svk9x2kqlgvhndc6ijaqnbc4wcr1"; })
   (fetchNuGet { pname = "Microsoft.Diagnostics.Runtime"; version = "2.0.161401"; sha256 = "02qcm8nv1ch07g8b0i60ynrjn33b8y5ivyk4rxal3vd9zfi6pvwi"; })
   (fetchNuGet { pname = "Microsoft.DotNet.PlatformAbstractions"; version = "2.0.3"; sha256 = "020214swxm0hip1d9gjskrzmqzjnji7c6l5b3xcch8vp166066m9"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.Configuration.Abstractions"; version = "6.0.0"; sha256 = "0w6wwxv12nbc3sghvr68847wc9skkdgsicrz3fx4chgng1i3xy0j"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.Configuration.Abstractions"; version = "7.0.0"; sha256 = "1as8cygz0pagg17w22nsf6mb49lr2mcl1x8i3ad1wi8lyzygy1a3"; })
   (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "6.0.0-rc.1.21451.13"; sha256 = "0r6945jq7c2f1wjifq514zvngicndjqfnsjya6hqw0yzah0jr56c"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "6.0.1"; sha256 = "0kl5ypidmzllyxb91gwy3z950dc416p1y8wikzbdbp0l7aaaxq2p"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "6.0.0"; sha256 = "1vi67fw7q99gj7jd64gnnfr4d2c0ijpva7g9prps48ja6g91x6a9"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection"; version = "7.0.0"; sha256 = "121zs4jp8iimgbpzm3wsglhjwkc06irg1pxy8c1zcdlsg34cfq1p"; })
   (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "6.0.0-rc.1.21451.13"; sha256 = "11dg16x6g0gssb143qpghxz1s41himvhr7yhjwxs9hacx4ij2dm1"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.DependencyInjection.Abstractions"; version = "7.0.0"; sha256 = "181d7mp9307fs17lyy42f8cxnjwysddmpsalky4m0pqxcimnr6g7"; })
   (fetchNuGet { pname = "Microsoft.Extensions.DependencyModel"; version = "2.0.3"; sha256 = "0dpyjp0hy9kkvk2dd4dclfmb10yq5avsw2a6v8nra9g6ii2p1nla"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.Features"; version = "6.0.10"; sha256 = "10avgg7c4iggq3i7gba0srd01fip637mmc903ymdpa2c92qgkqr8"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.Logging"; version = "6.0.0"; sha256 = "0fd9jii3y3irfcwlsiww1y9npjgabzarh33rn566wpcz24lijszi"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "6.0.0"; sha256 = "0b75fmins171zi6bfdcq1kcvyrirs8n91mknjnxy4c3ygi1rrnj0"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "6.0.2"; sha256 = "1wv54f3p3r2zj1pr9a6z8zqrh2ihm6v6qcw2pjwis1lcc0qb472m"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.Features"; version = "7.0.2"; sha256 = "18ipxpw73wi5gdj7vxhmqgk8rl3l95w6h5ajxbccdfyv5p75v66d"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.Logging"; version = "7.0.0"; sha256 = "1bqd3pqn5dacgnkq0grc17cgb2i0w8z1raw12nwm3p3zhrfcvgxf"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.Logging.Abstractions"; version = "7.0.0"; sha256 = "1gn7d18i1wfy13vrwhmdv1rmsb4vrk26kqdld4cgvh77yigj90xs"; })
   (fetchNuGet { pname = "Microsoft.Extensions.ObjectPool"; version = "5.0.11"; sha256 = "0i7li76gmk6hml12aig4cvyvja9mgl16qr8pkwvx5vm6lc9a3nn4"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.Options"; version = "6.0.0"; sha256 = "008pnk2p50i594ahz308v81a41mbjz9mwcarqhmrjpl2d20c868g"; })
-  (fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "6.0.0"; sha256 = "1kjiw6s4yfz9gm7mx3wkhp06ghnbs95icj9hi505shz9rjrg42q2"; })
-  (fetchNuGet { pname = "Microsoft.NET.StringTools"; version = "1.0.0"; sha256 = "06yakiyzgss399giivfx6xdrnfxqfsvy5fzm90scjanvandv0sdj"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.Options"; version = "7.0.0"; sha256 = "0b90zkrsk5dw3wr749rbynhpxlg4bgqdnd7d5vdlw2g9c7zlhgx6"; })
+  (fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "7.0.0"; sha256 = "1b4km9fszid9vp2zb3gya5ni9fn8bq62bzaas2ck2r7gs0sdys80"; })
+  (fetchNuGet { pname = "Microsoft.NET.StringTools"; version = "17.4.0"; sha256 = "1smx30nq22plrn2mw4wb5vfgxk6hyx12b60c4wabmpnr81lq3nzv"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.0.1"; sha256 = "01al6cfxp68dscl15z7rxfw9zvhm64dncsw09a1vmdkacsa2v6lr"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.1.0"; sha256 = "08vh1r12g6ykjygq5d3vq09zylgb84l63k49jc4v8faw9g93iqqm"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "2.0.0"; sha256 = "1fk2fk2639i7nzy58m9dvpdnzql4vb8yl8vr19r2fp8lmj9w2jr0"; })
@@ -110,7 +109,7 @@
   (fetchNuGet { pname = "NativeLibraryLoader"; version = "1.0.12"; sha256 = "1nkn5iylxj8i7355cljfvrn3ha7ylf30dh8f63zhybc2vb8hbpkk"; })
   (fetchNuGet { pname = "NETStandard.Library"; version = "1.6.1"; sha256 = "1z70wvsx2d847a2cjfii7b83pjfs34q05gb037fdjikv5kbagml8"; })
   (fetchNuGet { pname = "NETStandard.Library"; version = "2.0.0"; sha256 = "1bc4ba8ahgk15m8k4nd7x406nhi0kwqzbgjk2dmw52ss553xz7iy"; })
-  (fetchNuGet { pname = "Newtonsoft.Json"; version = "12.0.2"; sha256 = "0w2fbji1smd2y7x25qqibf1qrznmv4s6s0jvrbvr6alb7mfyqvh5"; })
+  (fetchNuGet { pname = "Newtonsoft.Json"; version = "13.0.1"; sha256 = "0fijg0w6iwap8gvzyjnndds0q4b8anwxxvik7y8vgq97dram4srb"; })
   (fetchNuGet { pname = "Newtonsoft.Json"; version = "13.0.2"; sha256 = "1p9splg1min274dpz7xdfgzrwkyfd3xlkygwpr1xgjvvyjvs6b0i"; })
   (fetchNuGet { pname = "NuGet.Common"; version = "5.11.0"; sha256 = "1amf6scr5mcjdvd1fflag6i4qjwmydq5qwp6g3f099n901zq0dr3"; })
   (fetchNuGet { pname = "NuGet.Configuration"; version = "5.11.0"; sha256 = "1s9pbrh7xy9jz7npz0sahdsj1cw8gfx1fwf3knv0ms1n0c9bk53l"; })
@@ -130,15 +129,15 @@
   (fetchNuGet { pname = "ppy.ManagedBass"; version = "2022.1216.0"; sha256 = "19nnj1hq2v21mrplnivjr9c4y3wg4hhfnc062sjgzkmiv1cchvf8"; })
   (fetchNuGet { pname = "ppy.ManagedBass.Fx"; version = "2022.1216.0"; sha256 = "1vw573mkligpx9qiqasw1683cqaa1kgnxhlnbdcj9c4320b1pwjm"; })
   (fetchNuGet { pname = "ppy.ManagedBass.Mix"; version = "2022.1216.0"; sha256 = "185bpvgbnd8y20r7vxb1an4pd1aal9b7b5wvmv3knz0qg8j0chd9"; })
-  (fetchNuGet { pname = "ppy.osu.Framework"; version = "2023.131.0"; sha256 = "1mbgcg0c8w6114c36jxypz7z1yps5zgw3f2lxw75fra0rylwqm23"; })
+  (fetchNuGet { pname = "ppy.osu.Framework"; version = "2023.228.0"; sha256 = "1acr957wlpgwng6mvyh6m1wv59ljvk9wh2aclds8ary8li00skdb"; })
   (fetchNuGet { pname = "ppy.osu.Framework.NativeLibs"; version = "2022.525.0"; sha256 = "1zsqj3xng06bb46vg79xx35n2dsh3crqg951r1ga2gxqzgzy4nk0"; })
   (fetchNuGet { pname = "ppy.osu.Framework.SourceGeneration"; version = "2022.1222.1"; sha256 = "1pwwsp4rfzl6166mhrn5lsnyazpckhfh1m6ggf9d1lw2wb58vxfr"; })
-  (fetchNuGet { pname = "ppy.osu.Game.Resources"; version = "2023.202.0"; sha256 = "13apknxly9fqqchmdvkdgfq2jbimln0ixg2d7yn6jcfd235279mj"; })
+  (fetchNuGet { pname = "ppy.osu.Game.Resources"; version = "2023.228.0"; sha256 = "12i5z7pkm03zc34q162qjas20v4d9rd1qwbwz1l4iyv010riaa43"; })
   (fetchNuGet { pname = "ppy.osuTK.NS20"; version = "1.0.211"; sha256 = "0j4a9n39pqm0cgdcps47p5n2mqph3h94r7hmf0bs59imif4jxvjy"; })
   (fetchNuGet { pname = "ppy.SDL2-CS"; version = "1.0.630-alpha"; sha256 = "0jrf70jrz976b49ac0ygfy9qph2w7fnbfrqv0g0x7hlpaip33ra8"; })
-  (fetchNuGet { pname = "Realm"; version = "10.18.0"; sha256 = "0dzwpcqkp8x8zah1bpx8cf01w4j1vi4gvipmaxlxczrc8p0f9zws"; })
-  (fetchNuGet { pname = "Realm.Fody"; version = "10.18.0"; sha256 = "1d2y7kz1jp1b11kskgk0fpp6ci17aqkrhzdfq5vcr4y7a8hbi9j5"; })
-  (fetchNuGet { pname = "Realm.SourceGenerator"; version = "10.18.0"; sha256 = "10bj3mgxdxgwsnpgbvlpnsj5ha582dvkvjnhb4qk7558g262dia8"; })
+  (fetchNuGet { pname = "Realm"; version = "10.20.0"; sha256 = "0gy0l2r7726wb6i599n55dn9035h0g7k0binfiy2dy9bjwz60jqk"; })
+  (fetchNuGet { pname = "Realm.Fody"; version = "10.20.0"; sha256 = "0rwcbbzr41iww3k59rjgy5xy7bna1x906h5blbllpywgpc2l5afw"; })
+  (fetchNuGet { pname = "Realm.SourceGenerator"; version = "10.20.0"; sha256 = "0y0bwqg87pmsld7cmawwwz2ps5lpkbyyzkb9cj0fbynsn4jdygg0"; })
   (fetchNuGet { pname = "Remotion.Linq"; version = "2.2.0"; sha256 = "1y46ni0xswmmiryp8sydjgryafwn458dr91f9xn653w73kdyk4xf"; })
   (fetchNuGet { pname = "runtime.any.System.Collections"; version = "4.3.0"; sha256 = "0bv5qgm6vr47ynxqbnkc7i797fdi8gbjjxii173syrx14nmrkwg0"; })
   (fetchNuGet { pname = "runtime.any.System.Diagnostics.Tools"; version = "4.3.0"; sha256 = "1wl76vk12zhdh66vmagni66h5xbhgqq7zkdpgw21jhxhvlbcl8pk"; })
@@ -182,18 +181,18 @@
   (fetchNuGet { pname = "runtime.unix.System.Net.Sockets"; version = "4.3.0"; sha256 = "03npdxzy8gfv035bv1b9rz7c7hv0rxl5904wjz51if491mw0xy12"; })
   (fetchNuGet { pname = "runtime.unix.System.Private.Uri"; version = "4.3.0"; sha256 = "1jx02q6kiwlvfksq1q9qr17fj78y5v6mwsszav4qcz9z25d5g6vk"; })
   (fetchNuGet { pname = "runtime.unix.System.Runtime.Extensions"; version = "4.3.0"; sha256 = "0pnxxmm8whx38dp6yvwgmh22smknxmqs5n513fc7m4wxvs1bvi4p"; })
-  (fetchNuGet { pname = "Sentry"; version = "3.23.1"; sha256 = "0cch803ixx5vqfm2zv5qdkkyksh1184669r1109snbkvvv5qy1g9"; })
+  (fetchNuGet { pname = "Sentry"; version = "3.28.1"; sha256 = "09xl3bm5clqxnn8wyy36zwmj8ai8zci6ngw64d0r3rzgd95gbf61"; })
   (fetchNuGet { pname = "SharpCompress"; version = "0.31.0"; sha256 = "01az7amjkxjbya5rdcqwxzrh2d3kybf1gsd3617rsxvvxadyra1r"; })
   (fetchNuGet { pname = "SharpCompress"; version = "0.32.2"; sha256 = "1p198bl08ia89rf4n6yjpacj3yrz6s574snsfl40l8vlqcdrc1pm"; })
   (fetchNuGet { pname = "SharpFNT"; version = "2.0.0"; sha256 = "1bgacgh9hbck0qvji6frbb50sdiqfdng2fvvfgfw8b9qaql91mx0"; })
   (fetchNuGet { pname = "SharpGen.Runtime"; version = "2.0.0-beta.10"; sha256 = "0yxq0b4m96z71afc7sywfrlwz2pgr5nilacmssjk803v70f0ydr1"; })
   (fetchNuGet { pname = "SharpGen.Runtime.COM"; version = "2.0.0-beta.10"; sha256 = "1qvpphja72x9r3yi96bnmwwy30b1n155v2yy2gzlxjil6qg3xjmb"; })
   (fetchNuGet { pname = "SixLabors.ImageSharp"; version = "2.1.0"; sha256 = "0lmj3qs39v5jcf2rjwav43nqnc7g6sd4l226l2jw85nidzmpvkwr"; })
-  (fetchNuGet { pname = "SQLitePCLRaw.bundle_e_sqlite3"; version = "2.1.2"; sha256 = "07rc4pj3rphi8nhzkcvilnm0fv27qcdp68jdwk4g0zjk7yfvbcay"; })
-  (fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.0.6"; sha256 = "1w4iyg0v1v1z2m7akq7rv8lsgixp2m08732vr14vgpqs918bsy1i"; })
+  (fetchNuGet { pname = "SQLitePCLRaw.bundle_e_sqlite3"; version = "2.1.4"; sha256 = "0shdspl9cm71wwqg9103s44r0l01r3sgnpxr523y4a0wlgac50g0"; })
   (fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.1.2"; sha256 = "19hxv895lairrjmk4gkzd3mcb6b0na45xn4n551h4kckplqadg3d"; })
-  (fetchNuGet { pname = "SQLitePCLRaw.lib.e_sqlite3"; version = "2.1.2"; sha256 = "0jn98bkjk8h4smi09z31ib6s6392054lwmkziqmkqf5gf614k2fz"; })
-  (fetchNuGet { pname = "SQLitePCLRaw.provider.e_sqlite3"; version = "2.1.2"; sha256 = "0bnm2fhvcsyg5ry74gal2cziqnyf5a8d2cb491vsa7j41hbbx7kv"; })
+  (fetchNuGet { pname = "SQLitePCLRaw.core"; version = "2.1.4"; sha256 = "09akxz92qipr1cj8mk2hw99i0b81wwbwx26gpk21471zh543f8ld"; })
+  (fetchNuGet { pname = "SQLitePCLRaw.lib.e_sqlite3"; version = "2.1.4"; sha256 = "11l85ksv1ck46j8z08fyf0c3l572zmp9ynb7p5chm5iyrh8xwkkn"; })
+  (fetchNuGet { pname = "SQLitePCLRaw.provider.e_sqlite3"; version = "2.1.4"; sha256 = "0b8f51nrjkq0pmfzjaqk5rp7r0cp2lbdm2whynj3xsjklppzmn35"; })
   (fetchNuGet { pname = "StbiSharp"; version = "1.1.0"; sha256 = "0wbw20m7nyhxj32k153l668sxigamlwig0qpz8l8d0jqz35vizm0"; })
   (fetchNuGet { pname = "System.AppContext"; version = "4.1.0"; sha256 = "0fv3cma1jp4vgj7a8hqc9n7hr1f1kjp541s6z0q1r6nazb4iz9mz"; })
   (fetchNuGet { pname = "System.AppContext"; version = "4.3.0"; sha256 = "1649qvy3dar900z3g817h17nl8jp4ka5vcfmsr05kh0fshn7j3ya"; })
@@ -209,7 +208,6 @@
   (fetchNuGet { pname = "System.Diagnostics.Debug"; version = "4.0.11"; sha256 = "0gmjghrqmlgzxivd2xl50ncbglb7ljzb66rlx8ws6dv8jm0d5siz"; })
   (fetchNuGet { pname = "System.Diagnostics.Debug"; version = "4.3.0"; sha256 = "00yjlf19wjydyr6cfviaph3vsjzg3d5nvnya26i2fvfg53sknh3y"; })
   (fetchNuGet { pname = "System.Diagnostics.DiagnosticSource"; version = "4.3.0"; sha256 = "0z6m3pbiy0qw6rn3n209rrzf9x1k4002zh90vwcrsym09ipm2liq"; })
-  (fetchNuGet { pname = "System.Diagnostics.DiagnosticSource"; version = "6.0.0"; sha256 = "0rrihs9lnb1h6x4h0hn6kgfnh58qq7hx8qq99gh6fayx4dcnx3s5"; })
   (fetchNuGet { pname = "System.Diagnostics.Tools"; version = "4.3.0"; sha256 = "0in3pic3s2ddyibi8cvgl102zmvp9r9mchh82ns9f0ms4basylw1"; })
   (fetchNuGet { pname = "System.Diagnostics.Tracing"; version = "4.3.0"; sha256 = "1m3bx6c2s958qligl67q7grkwfz3w53hpy7nc97mh6f7j5k168c4"; })
   (fetchNuGet { pname = "System.Dynamic.Runtime"; version = "4.0.11"; sha256 = "1pla2dx8gkidf7xkciig6nifdsb494axjvzvann8g2lp3dbqasm9"; })
@@ -226,8 +224,8 @@
   (fetchNuGet { pname = "System.IO.FileSystem"; version = "4.0.1"; sha256 = "0kgfpw6w4djqra3w5crrg8xivbanh1w9dh3qapb28q060wb9flp1"; })
   (fetchNuGet { pname = "System.IO.FileSystem"; version = "4.3.0"; sha256 = "0z2dfrbra9i6y16mm9v1v6k47f0fm617vlb7s5iybjjsz6g1ilmw"; })
   (fetchNuGet { pname = "System.IO.FileSystem.Primitives"; version = "4.3.0"; sha256 = "0j6ndgglcf4brg2lz4wzsh1av1gh8xrzdsn9f0yznskhqn1xzj9c"; })
-  (fetchNuGet { pname = "System.IO.Packaging"; version = "6.0.0"; sha256 = "112nq0k2jc4vh71rifqqmpjxkaanxfapk7g8947jkfgq3lmfmaac"; })
-  (fetchNuGet { pname = "System.IO.Pipelines"; version = "6.0.3"; sha256 = "1jgdazpmwc21dd9naq3l9n5s8a1jnbwlvgkf1pnm0aji6jd4xqdz"; })
+  (fetchNuGet { pname = "System.IO.Packaging"; version = "7.0.0"; sha256 = "16fgj2ab5ci217shmfsi6c0rnmkh90h6vyb60503nhpmh7y8di13"; })
+  (fetchNuGet { pname = "System.IO.Pipelines"; version = "7.0.0"; sha256 = "1ila2vgi1w435j7g2y7ykp2pdbh9c5a02vm85vql89az93b7qvav"; })
   (fetchNuGet { pname = "System.Linq"; version = "4.1.0"; sha256 = "1ppg83svb39hj4hpp5k7kcryzrf3sfnm08vxd5sm2drrijsla2k5"; })
   (fetchNuGet { pname = "System.Linq"; version = "4.3.0"; sha256 = "1w0gmba695rbr80l1k2h4mrwzbzsyfl2z4klmpbsvsg5pm4a56s7"; })
   (fetchNuGet { pname = "System.Linq.Expressions"; version = "4.1.0"; sha256 = "1gpdxl6ip06cnab7n3zlcg6mqp7kknf73s8wjinzi4p0apw82fpg"; })
@@ -235,6 +233,7 @@
   (fetchNuGet { pname = "System.Linq.Queryable"; version = "4.0.1"; sha256 = "11jn9k34g245yyf260gr3ldzvaqa9477w2c5nhb1p8vjx4xm3qaw"; })
   (fetchNuGet { pname = "System.Memory"; version = "4.5.3"; sha256 = "0naqahm3wljxb5a911d37mwjqjdxv9l0b49p5dmfyijvni2ppy8a"; })
   (fetchNuGet { pname = "System.Memory"; version = "4.5.4"; sha256 = "14gbbs22mcxwggn0fcfs1b062521azb9fbb7c113x0mq6dzq9h6y"; })
+  (fetchNuGet { pname = "System.Memory"; version = "4.5.5"; sha256 = "08jsfwimcarfzrhlyvjjid61j02irx6xsklf32rv57x2aaikvx0h"; })
   (fetchNuGet { pname = "System.Net.Http"; version = "4.3.0"; sha256 = "1i4gc757xqrzflbk7kc5ksn20kwwfjhw9w7pgdkn19y3cgnl302j"; })
   (fetchNuGet { pname = "System.Net.NameResolution"; version = "4.3.0"; sha256 = "15r75pwc0rm3vvwsn8rvm2krf929mjfwliv0mpicjnii24470rkq"; })
   (fetchNuGet { pname = "System.Net.Primitives"; version = "4.3.0"; sha256 = "0c87k50rmdgmxx7df2khd9qj7q35j9rzdmm2572cc55dygmdk3ii"; })
@@ -294,10 +293,12 @@
   (fetchNuGet { pname = "System.Text.Encoding"; version = "4.3.0"; sha256 = "1f04lkir4iladpp51sdgmis9dj4y8v08cka0mbmsy0frc9a4gjqr"; })
   (fetchNuGet { pname = "System.Text.Encoding.CodePages"; version = "5.0.0"; sha256 = "1bn2pzaaq4wx9ixirr8151vm5hynn3lmrljcgjx9yghmm4k677k0"; })
   (fetchNuGet { pname = "System.Text.Encoding.Extensions"; version = "4.3.0"; sha256 = "11q1y8hh5hrp5a3kw25cb6l00v5l5dvirkz8jr3sq00h1xgcgrxy"; })
+  (fetchNuGet { pname = "System.Text.Encodings.Web"; version = "7.0.0"; sha256 = "1151hbyrcf8kyg1jz8k9awpbic98lwz9x129rg7zk1wrs6vjlpxl"; })
+  (fetchNuGet { pname = "System.Text.Json"; version = "7.0.1"; sha256 = "1lqh6nrrkx4sksvn5509y6j9z8zkhcls0yghd0n31zywmmy3pnf2"; })
   (fetchNuGet { pname = "System.Text.RegularExpressions"; version = "4.3.0"; sha256 = "1bgq51k7fwld0njylfn7qc5fmwrk2137gdq7djqdsw347paa9c2l"; })
   (fetchNuGet { pname = "System.Threading"; version = "4.0.11"; sha256 = "19x946h926bzvbsgj28csn46gak2crv2skpwsx80hbgazmkgb1ls"; })
   (fetchNuGet { pname = "System.Threading"; version = "4.3.0"; sha256 = "0rw9wfamvhayp5zh3j7p1yfmx9b5khbf4q50d8k5rk993rskfd34"; })
-  (fetchNuGet { pname = "System.Threading.Channels"; version = "6.0.0"; sha256 = "1qbyi7yymqc56frqy7awvcqc1m7x3xrpx87a37dgb3mbrjg9hlcj"; })
+  (fetchNuGet { pname = "System.Threading.Channels"; version = "7.0.0"; sha256 = "1qrmqa6hpzswlmyp3yqsbnmia9i5iz1y208xpqc1y88b1f6j1v8a"; })
   (fetchNuGet { pname = "System.Threading.Tasks"; version = "4.0.11"; sha256 = "0nr1r41rak82qfa5m0lhk9mp0k93bvfd7bbd9sdzwx9mb36g28p5"; })
   (fetchNuGet { pname = "System.Threading.Tasks"; version = "4.3.0"; sha256 = "134z3v9abw3a6jsw17xl3f6hqjpak5l682k2vz39spj4kmydg6k7"; })
   (fetchNuGet { pname = "System.Threading.Tasks.Extensions"; version = "4.3.0"; sha256 = "1xxcx2xh8jin360yjwm4x4cf5y3a2bwpn2ygkfkwkicz7zk50s2z"; })
diff --git a/pkgs/games/unciv/default.nix b/pkgs/games/unciv/default.nix
index 96a1ebf3f648d..e616033d685fd 100644
--- a/pkgs/games/unciv/default.nix
+++ b/pkgs/games/unciv/default.nix
@@ -25,11 +25,11 @@ let
 in
 stdenv.mkDerivation rec {
   pname = "unciv";
-  version = "4.5.1";
+  version = "4.5.2";
 
   src = fetchurl {
     url = "https://github.com/yairm210/Unciv/releases/download/${version}/Unciv.jar";
-    hash = "sha256-bghFcLwfEonsBjB9Erhib45soR2UIOr4cYz5ROUOTFo=";
+    hash = "sha256-HhMccVlpIJoGW3LLqg1clw+dWcRUVHwCgtrmBcOXFSE=";
   };
 
   dontUnpack = true;
diff --git a/pkgs/misc/fastly/default.nix b/pkgs/misc/fastly/default.nix
index 1432e37cfd213..e8db6a8eeccfe 100644
--- a/pkgs/misc/fastly/default.nix
+++ b/pkgs/misc/fastly/default.nix
@@ -8,13 +8,13 @@
 
 buildGoModule rec {
   pname = "fastly";
-  version = "7.0.0";
+  version = "7.0.1";
 
   src = fetchFromGitHub {
     owner = "fastly";
     repo = "cli";
     rev = "refs/tags/v${version}";
-    hash = "sha256-+fXx/fVXOAjwo+TLP1pDTVge3VSvTLSGzTv7dSTeXxc=";
+    hash = "sha256-6M1vdwtw+qjKldW642JN2sanaKV3u9J1qWAoLRNupzE=";
     # The git commit is part of the `fastly version` original output;
     # leave that output the same in nixpkgs. Use the `.git` directory
     # to retrieve the commit SHA, and remove the directory afterwards,
diff --git a/pkgs/os-specific/linux/firmware/system76-firmware/default.nix b/pkgs/os-specific/linux/firmware/system76-firmware/default.nix
index 24eb4dcdf61e3..c4be2982d57d6 100644
--- a/pkgs/os-specific/linux/firmware/system76-firmware/default.nix
+++ b/pkgs/os-specific/linux/firmware/system76-firmware/default.nix
@@ -2,13 +2,13 @@
 rustPlatform.buildRustPackage rec {
   pname = "system76-firmware";
   # Check Makefile when updating, make sure postInstall matches make install
-  version = "1.0.43";
+  version = "1.0.50";
 
   src = fetchFromGitHub {
     owner = "pop-os";
     repo = pname;
     rev = version;
-    sha256 = "sha256-0NlM5ugpJzwzXgm8TqM6/aj3b+lDYbLeYOHNHM3g8aw=";
+    sha256 = "sha256-nLbDhs+FxIcoVK66bwUAxAubikic5NT8yOA/mH/irgQ=";
   };
 
   nativeBuildInputs = [ pkg-config makeWrapper ];
@@ -17,7 +17,7 @@ rustPlatform.buildRustPackage rec {
 
   cargoBuildFlags = [ "--workspace" ];
 
-  cargoSha256 = "sha256-oyHnEWtQ0pl4SaJsnao+oTDBuu9PJdU3uqLTDowRWQw=";
+  cargoHash = "sha256-JQRbHIMfPw/vC2+DFQV86+hgHZJXtpB4JO6uLugHsNg=";
 
   # Purposefully don't install systemd unit file, that's for NixOS
   postInstall = ''
diff --git a/pkgs/os-specific/linux/fnotifystat/default.nix b/pkgs/os-specific/linux/fnotifystat/default.nix
index c5682c6942fce..fabfd47bca12b 100644
--- a/pkgs/os-specific/linux/fnotifystat/default.nix
+++ b/pkgs/os-specific/linux/fnotifystat/default.nix
@@ -5,13 +5,13 @@
 
 stdenv.mkDerivation rec {
   pname = "fnotifystat";
-  version = "0.02.09";
+  version = "0.02.10";
 
   src = fetchFromGitHub {
     owner = "ColinIanKing";
     repo = pname;
     rev = "V${version}";
-    hash = "sha256-YyIk7x0B3JB/iMF9OP767fVEBgcV0duV7xIiHZxpL0w=";
+    hash = "sha256-bcb1kSpNZV7eTcEIcaoiqxB68kTc0TGFMIr1Aehy/Rc=";
   };
 
   installFlags = [
diff --git a/pkgs/os-specific/linux/kernel/zen-kernels.nix b/pkgs/os-specific/linux/kernel/zen-kernels.nix
index 7dbf9c97291cd..9f172447f10e1 100644
--- a/pkgs/os-specific/linux/kernel/zen-kernels.nix
+++ b/pkgs/os-specific/linux/kernel/zen-kernels.nix
@@ -4,16 +4,16 @@ let
   # comments with variant added for update script
   # ./update-zen.py zen
   zenVariant = {
-    version = "6.2.1"; #zen
+    version = "6.2.2"; #zen
     suffix = "zen1"; #zen
-    sha256 = "1ypgdc4bz35cqqwp8nka6rx7m9dqfl6wzfb8ad27gqgxwzil3sjg"; #zen
+    sha256 = "004aghwdclky7w341yg9nkr5r58qnp4hxnmvxrp2z06pzcbsq933"; #zen
     isLqx = false;
   };
   # ./update-zen.py lqx
   lqxVariant = {
-    version = "6.1.13"; #lqx
-    suffix = "lqx2"; #lqx
-    sha256 = "1264cfkb3kfrava8g7byr10avkjg0k281annqppcqqjkyjf63q4y"; #lqx
+    version = "6.1.14"; #lqx
+    suffix = "lqx1"; #lqx
+    sha256 = "026nnmbpipk4gg7llsvm4fgws3ka0hjdywl7h0a8bvq6n9by15i6"; #lqx
     isLqx = true;
   };
   zenKernelsFor = { version, suffix, sha256, isLqx }: buildLinux (args // {
diff --git a/pkgs/servers/dns/bind/default.nix b/pkgs/servers/dns/bind/default.nix
index 9934a4710d769..952ef56902015 100644
--- a/pkgs/servers/dns/bind/default.nix
+++ b/pkgs/servers/dns/bind/default.nix
@@ -4,15 +4,16 @@
 , enablePython ? false, python3
 , enableGSSAPI ? true, libkrb5
 , buildPackages, nixosTests
+, cmocka, tzdata
 }:
 
 stdenv.mkDerivation rec {
   pname = "bind";
-  version = "9.18.11";
+  version = "9.18.12";
 
   src = fetchurl {
     url = "https://downloads.isc.org/isc/bind9/${version}/${pname}-${version}.tar.xz";
-    sha256 = "sha256-j/M1KBIjDLy9pC34fK2WH5QWPT2kV8XkvvgFf9XfIVg=";
+    sha256 = "sha256-R3Zrt7BjqrutBUOGsZCqf2wUUkQnr9Qnww7EJlEgJ+c=";
   };
 
   outputs = [ "out" "lib" "dev" "man" "dnsutils" "host" ];
@@ -59,8 +60,18 @@ stdenv.mkDerivation rec {
     EOF
   '';
 
-  doCheck = false; # requires root and the net
   enableParallelBuilding = true;
+  doCheck = !stdenv.hostPlatform.isStatic;
+  checkTarget = "unit";
+  checkInputs = [
+    cmocka
+  ] ++ lib.optionals (!stdenv.hostPlatform.isMusl) [
+    tzdata
+  ];
+  preCheck = lib.optionalString stdenv.hostPlatform.isMusl ''
+    # musl doesn't respect TZDIR, skip timezone-related tests
+    sed -i '/^ISC_TEST_ENTRY(isc_time_formatISO8601L/d' tests/isc/time_test.c
+  '';
 
   passthru.tests = {
     inherit (nixosTests) bind;
diff --git a/pkgs/servers/gotify/source-sha.nix b/pkgs/servers/gotify/source-sha.nix
index dbbf54fc43fc8..4ef0649df7a1f 100644
--- a/pkgs/servers/gotify/source-sha.nix
+++ b/pkgs/servers/gotify/source-sha.nix
@@ -1 +1 @@
-"0rpjn10ia47nia0g5a8khxy0r8grlfvcf1s5kdyj9512hcy25aca"
+"1kc4l95hrhi7lb9x8gy19xpwj12j4syg6w1kbllf3g3k83sr444f"
diff --git a/pkgs/servers/gotify/update.sh b/pkgs/servers/gotify/update.sh
index f0a40e30c9138..7df5f3711e1fa 100755
--- a/pkgs/servers/gotify/update.sh
+++ b/pkgs/servers/gotify/update.sh
@@ -30,7 +30,7 @@ echo running nix-build for ui
 nix-build -A gotify-server.ui
 echo running nix-build for gotify itself in order to get vendorSha256
 set +e
-vendorSha256="$(nix-build -A gotify-server 2>&1 | grep "got:" | cut -d':' -f3)"
+vendorSha256="$(nix-build -A gotify-server 2>&1 | grep "got:" | cut -d':' -f2)"
 set -e
 printf '"%s"\n' "$vendorSha256" > $dirname/vendor-sha.nix
 tput setaf 2
diff --git a/pkgs/servers/gotify/version.nix b/pkgs/servers/gotify/version.nix
index 5604878046788..d3eb9bb03b768 100644
--- a/pkgs/servers/gotify/version.nix
+++ b/pkgs/servers/gotify/version.nix
@@ -1 +1 @@
-"2.2.2"
+"2.2.4"
diff --git a/pkgs/servers/misc/gobgpd/default.nix b/pkgs/servers/misc/gobgpd/default.nix
index 68edff754a887..d20a60e5208c1 100644
--- a/pkgs/servers/misc/gobgpd/default.nix
+++ b/pkgs/servers/misc/gobgpd/default.nix
@@ -5,16 +5,16 @@
 
 buildGoModule rec {
   pname = "gobgpd";
-  version = "3.11.0";
+  version = "3.12.0";
 
   src = fetchFromGitHub {
     owner = "osrg";
     repo = "gobgp";
     rev = "refs/tags/v${version}";
-    hash = "sha256-UGRGJqeVWrt8NVf9d5Mk7k+k2Is/fwHv2X0hmyXvTZs=";
+    hash = "sha256-keev3DZ3xN5UARuYKfSdox0KKBjrM5RoMD273Aw0AGY=";
   };
 
-  vendorHash = "sha256-9Vi8qrcFC2SazcGVgAf1vbKvxd8rTMgye63wSCaFonk=";
+  vendorHash = "sha256-5lRW9gWQZRRqZoVB16kI1VEnr0XsiPtLUuioK/0f8w0=";
 
   postConfigure = ''
     export CGO_ENABLED=0
diff --git a/pkgs/servers/monitoring/grafana/default.nix b/pkgs/servers/monitoring/grafana/default.nix
index b0c9a538e049d..24813af5a29d6 100644
--- a/pkgs/servers/monitoring/grafana/default.nix
+++ b/pkgs/servers/monitoring/grafana/default.nix
@@ -2,7 +2,7 @@
 
 buildGoModule rec {
   pname = "grafana";
-  version = "9.4.2";
+  version = "9.4.3";
 
   excludedPackages = [ "alert_webhook_listener" "clean-swagger" "release_publisher" "slow_proxy" "slow_proxy_mac" "macaron" "devenv" ];
 
@@ -10,12 +10,12 @@ buildGoModule rec {
     rev = "v${version}";
     owner = "grafana";
     repo = "grafana";
-    sha256 = "sha256-dSKIQiav6y4P1e/7CptIdRuOrDdXdvItCaRBcbepadE=";
+    sha256 = "sha256-LYUbypPXoWwWA4u2JxhUS/lozQNo2DCFGDPCmNP3GoE=";
   };
 
   srcStatic = fetchurl {
     url = "https://dl.grafana.com/oss/release/grafana-${version}.linux-amd64.tar.gz";
-    sha256 = "sha256-dBp6V5ozu1koSoXIecjysSIdG0hL1K5lH9Z8yougUKo=";
+    sha256 = "sha256-aq6/sMfYVebxh46+zxphfWttFN4vBpUgCLXobLWVozk=";
   };
 
   vendorSha256 = "sha256-atnlEdGDiUqQkslvRlPSi6VC5rEvRVV6R2Wxur3geew=";
diff --git a/pkgs/servers/monitoring/grafana/plugins/doitintl-bigquery-datasource/default.nix b/pkgs/servers/monitoring/grafana/plugins/doitintl-bigquery-datasource/default.nix
index c5189720660a1..aa8520e69b8e1 100644
--- a/pkgs/servers/monitoring/grafana/plugins/doitintl-bigquery-datasource/default.nix
+++ b/pkgs/servers/monitoring/grafana/plugins/doitintl-bigquery-datasource/default.nix
@@ -2,8 +2,8 @@
 
 grafanaPlugin rec {
   pname = "doitintl-bigquery-datasource";
-  version = "2.0.2";
-  zipHash = "sha256-GE6DNuQ5WtS/2VmXbQBeRdVKDbLlLirWXW51i0RF6Cc=";
+  version = "2.0.3";
+  zipHash = "sha256-QxUNRsO1ony+6tVdpwx3P/63XNIdAVIren6hUwChf9E=";
   meta = with lib; {
     description = "BigQuery DataSource for Grafana";
     license = licenses.mit;
diff --git a/pkgs/servers/monitoring/grafana/plugins/grafadruid-druid-datasource/default.nix b/pkgs/servers/monitoring/grafana/plugins/grafadruid-druid-datasource/default.nix
index 9f1d501d23fdc..a5ed12fbfa475 100644
--- a/pkgs/servers/monitoring/grafana/plugins/grafadruid-druid-datasource/default.nix
+++ b/pkgs/servers/monitoring/grafana/plugins/grafadruid-druid-datasource/default.nix
@@ -2,8 +2,8 @@
 
 grafanaPlugin rec {
   pname = "grafadruid-druid-datasource";
-  version = "1.2.0";
-  zipHash = "sha256-DPeyV2jZquSQcSE+HzvxArWEefs9bFNPjZwDFp+dIjg=";
+  version = "1.4.1";
+  zipHash = "sha256-7atxqRqKqop6ABQ+ead6wR/YRpJaV8j/Ri4VB9FXMu8=";
   meta = with lib; {
     description = "Connects Grafana to Druid";
     license = licenses.asl20;
diff --git a/pkgs/servers/monitoring/grafana/plugins/grafana-clock-panel/default.nix b/pkgs/servers/monitoring/grafana/plugins/grafana-clock-panel/default.nix
index 365c188541b48..76493726c3a6d 100644
--- a/pkgs/servers/monitoring/grafana/plugins/grafana-clock-panel/default.nix
+++ b/pkgs/servers/monitoring/grafana/plugins/grafana-clock-panel/default.nix
@@ -2,8 +2,8 @@
 
 grafanaPlugin rec {
   pname = "grafana-clock-panel";
-  version = "1.1.3";
-  zipHash = "sha256-80JaMhY/EduSWvFrScfua99DGhT/FJUqY/kl0CafKCs=";
+  version = "2.1.2";
+  zipHash = "sha256-LRlyK0mv5gFNknjc9mDr84NDTIXDzlKV9spQVhZSv+I=";
   meta = with lib; {
     description = "Clock panel for Grafana";
     license = licenses.mit;
diff --git a/pkgs/servers/monitoring/grafana/plugins/grafana-piechart-panel/default.nix b/pkgs/servers/monitoring/grafana/plugins/grafana-piechart-panel/default.nix
index 3787c6cc5e1d6..168295c64cd5c 100644
--- a/pkgs/servers/monitoring/grafana/plugins/grafana-piechart-panel/default.nix
+++ b/pkgs/servers/monitoring/grafana/plugins/grafana-piechart-panel/default.nix
@@ -2,8 +2,8 @@
 
 grafanaPlugin rec {
   pname = "grafana-piechart-panel";
-  version = "1.6.2";
-  zipHash = "sha256-xKyVT092Ffgzl0BewQw5iZ14I/q6CviUR5t9BVM0bf0=";
+  version = "1.6.4";
+  zipHash = "sha256-bdAl3OmZgSNB+IxxlCb81abR+4dykKkRY3MpQUQyLks=";
   meta = with lib; {
     description = "Pie chart panel for Grafana";
     license = licenses.mit;
diff --git a/pkgs/servers/monitoring/grafana/plugins/grafana-polystat-panel/default.nix b/pkgs/servers/monitoring/grafana/plugins/grafana-polystat-panel/default.nix
index 0ef214975979c..0a886441eaf10 100644
--- a/pkgs/servers/monitoring/grafana/plugins/grafana-polystat-panel/default.nix
+++ b/pkgs/servers/monitoring/grafana/plugins/grafana-polystat-panel/default.nix
@@ -2,8 +2,8 @@
 
 grafanaPlugin rec {
   pname = "grafana-polystat-panel";
-  version = "1.2.6";
-  zipHash = "sha256-gbMD2o8A2YYZzkpYiXNkv8Oj958RP47fL6DXj1SBYF0=";
+  version = "2.0.4";
+  zipHash = "sha256-wkf/LK+kcjTbMJzlAg6zRWgEO5LjdDR/oy7/SrNuCXM=";
   meta = with lib; {
     description = "Hexagonal multi-stat panel for Grafana";
     license = licenses.asl20;
diff --git a/pkgs/servers/monitoring/grafana/plugins/grafana-worldmap-panel/default.nix b/pkgs/servers/monitoring/grafana/plugins/grafana-worldmap-panel/default.nix
index 4060a1c8dd88f..8d00551498679 100644
--- a/pkgs/servers/monitoring/grafana/plugins/grafana-worldmap-panel/default.nix
+++ b/pkgs/servers/monitoring/grafana/plugins/grafana-worldmap-panel/default.nix
@@ -2,8 +2,8 @@
 
 grafanaPlugin rec {
   pname = "grafana-worldmap-panel";
-  version = "0.3.3";
-  zipHash = "sha256-3n1p3SvcBQMmnbnHimLBP7hauVV1IS3SMwttUWTNvb8=";
+  version = "1.0.3";
+  zipHash = "sha256-xpcQTymxA4d8jRnHm4cHAFOzPT1BseOaX0Qeq5vDvac=";
   meta = with lib; {
     description = "World Map panel for Grafana";
     license = licenses.mit;
diff --git a/pkgs/servers/monitoring/prometheus/redis-exporter.nix b/pkgs/servers/monitoring/prometheus/redis-exporter.nix
index a4590277acc9e..a457a2ee39cf9 100644
--- a/pkgs/servers/monitoring/prometheus/redis-exporter.nix
+++ b/pkgs/servers/monitoring/prometheus/redis-exporter.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "redis_exporter";
-  version = "1.47.0";
+  version = "1.48.0";
 
   src = fetchFromGitHub {
     owner = "oliver006";
     repo = "redis_exporter";
     rev = "v${version}";
-    sha256 = "sha256-pSLFfArmG4DIgYUD8qz71P+7RYIQuUycnYzNFXNhZ8A=";
+    sha256 = "sha256-hBkekoVwNuRDGhpvbW57eR+UUMkntdEcHJAVQbwk7NE=";
   };
 
   vendorHash = "sha256-Owfxy7WkucQ6BM8yjnZg9/8CgopGTtbQTTUuxoT3RRE=";
@@ -28,7 +28,7 @@ buildGoModule rec {
     description = "Prometheus exporter for Redis metrics";
     inherit (src.meta) homepage;
     license = licenses.mit;
-    maintainers = with maintainers; [ eskytthe srhb ];
+    maintainers = with maintainers; [ eskytthe srhb ma27 ];
     platforms = platforms.unix;
   };
 }
diff --git a/pkgs/servers/nosql/redis/default.nix b/pkgs/servers/nosql/redis/default.nix
index 0f788dd9d3876..85e9fe5022e5e 100644
--- a/pkgs/servers/nosql/redis/default.nix
+++ b/pkgs/servers/nosql/redis/default.nix
@@ -7,11 +7,11 @@
 
 stdenv.mkDerivation rec {
   pname = "redis";
-  version = "7.0.8";
+  version = "7.0.9";
 
   src = fetchurl {
     url = "https://download.redis.io/releases/${pname}-${version}.tar.gz";
-    hash = "sha256-BqM55JEwZ4Pc9VuX8VpdvL3AHMvebcIwJ8R1yrc16RQ=";
+    hash = "sha256-93E1wqR8kVHUAov+o7NEcKtNMk0UhPeahMbzKjz7n2U=";
   };
 
   nativeBuildInputs = [ pkg-config ];
diff --git a/pkgs/servers/piping-server-rust/default.nix b/pkgs/servers/piping-server-rust/default.nix
index ff9c8ad710c3a..f29cf359d4eeb 100644
--- a/pkgs/servers/piping-server-rust/default.nix
+++ b/pkgs/servers/piping-server-rust/default.nix
@@ -2,16 +2,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "piping-server-rust";
-  version = "0.15.0";
+  version = "0.16.0";
 
   src = fetchFromGitHub {
     owner = "nwtgck";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-s7tfGt1P/GmvjkIcy8DEwz+ObPxoMsIL7meAc5vMkKo=";
+    sha256 = "sha256-cWBNO9V9DMbEhkjG8g/iswV04DeYh3tXv0+1hB/pf64=";
   };
 
-  cargoSha256 = "sha256-gqKEFqf49sKZy+L0X4MxUfx2+iYoNIU415xHqOy8MZA=";
+  cargoSha256 = "sha256-jZio6y2m14tVi3nTQqh+8W3hxft5PfAIWm2XpuyCKDU=";
 
   buildInputs = lib.optionals stdenv.isDarwin [ CoreServices Security ];
 
diff --git a/pkgs/servers/plex/raw.nix b/pkgs/servers/plex/raw.nix
index abc89d8e81294..5a0368d36f7d0 100644
--- a/pkgs/servers/plex/raw.nix
+++ b/pkgs/servers/plex/raw.nix
@@ -12,16 +12,16 @@
 # server, and the FHS userenv and corresponding NixOS module should
 # automatically pick up the changes.
 stdenv.mkDerivation rec {
-  version = "1.31.0.6654-02189b09f";
+  version = "1.31.1.6733-bc0674160";
   pname = "plexmediaserver";
 
   # Fetch the source
   src = if stdenv.hostPlatform.system == "aarch64-linux" then fetchurl {
     url = "https://downloads.plex.tv/plex-media-server-new/${version}/debian/plexmediaserver_${version}_arm64.deb";
-    sha256 = "sha256-ttkvYD+ALxfZpQutI1VyTbmQi/7hmvZ+YMUv3lskeWU=";
+    sha256 = "0nj9n250lhin58xlqvn2l0pjxdbajj0bla2wrgan8gs2m45nk3q9";
   } else fetchurl {
     url = "https://downloads.plex.tv/plex-media-server-new/${version}/debian/plexmediaserver_${version}_amd64.deb";
-    sha256 = "sha256-TTEcyIBFiuJTNHeJ9wu+4o2ol72oCvM9FdDPC83J3Mc=";
+    sha256 = "0a5h151gh1ja3frqzaqw3pj1kyh5p0wgnfmmxiz0q3zx1drjs611";
   };
 
   outputs = [ "out" "basedb" ];
diff --git a/pkgs/servers/roon-server/default.nix b/pkgs/servers/roon-server/default.nix
index 067f9b48d58ef..2f60d525dc071 100644
--- a/pkgs/servers/roon-server/default.nix
+++ b/pkgs/servers/roon-server/default.nix
@@ -15,7 +15,7 @@
 , stdenv
 }:
 let
-  version = "2.0-1202";
+  version = "2.0-1223";
   urlVersion = builtins.replaceStrings [ "." "-" ] [ "00" "0" ] version;
 in
 stdenv.mkDerivation {
@@ -24,7 +24,7 @@ stdenv.mkDerivation {
 
   src = fetchurl {
     url = "https://download.roonlabs.com/updates/production/RoonServer_linuxx64_${urlVersion}.tar.bz2";
-    hash = "sha256-YeBzXnw/BpJDUJ7fUf7TH0zQcpCjUm9peB7zPO2ZsYI=";
+    hash = "sha256-1jHNHj1tB80/CdE7GPCgRsI0+2Gfx4kiE6a0EOI/K5U=";
   };
 
   dontConfigure = true;
diff --git a/pkgs/servers/sql/postgresql/ext/temporal_tables.nix b/pkgs/servers/sql/postgresql/ext/temporal_tables.nix
index 66b8b10e87835..ec654c4a09fa4 100644
--- a/pkgs/servers/sql/postgresql/ext/temporal_tables.nix
+++ b/pkgs/servers/sql/postgresql/ext/temporal_tables.nix
@@ -2,15 +2,15 @@
 
 stdenv.mkDerivation rec {
   pname = "temporal_tables";
-  version = "1.2.0";
+  version = "unstable-2021-02-20";
 
   buildInputs = [ postgresql ];
 
   src = fetchFromGitHub {
-    owner  = "mlt";
+    owner  = "arkhipov";
     repo   = pname;
-    rev    = "6cc86eb03d618d6b9fc09ae523f1a1e5228d22b5";
-    sha256 = "0ykv37rm511n5955mbh9dcp7pgg88z1nwgszav7z6pziaj3nba8x";
+    rev    = "3ce22da51f2549e8f8b8fbf2850c63eb3a2f1fbb";
+    sha256 = "sha256-kmcl6vVHRZj2G5GijEyaZgDpZBDcdIUKzXv0rYYqUu4=";
   };
 
   installPhase = ''
@@ -22,7 +22,6 @@ stdenv.mkDerivation rec {
   '';
 
   meta = with lib; {
-    broken = stdenv.isDarwin;
     description = "Temporal Tables PostgreSQL Extension ";
     homepage    = "https://github.com/mlt/temporal_tables";
     maintainers = with maintainers; [ ggpeti ];
diff --git a/pkgs/servers/syncstorage-rs/default.nix b/pkgs/servers/syncstorage-rs/default.nix
index 65189094d0c76..e71d71dc78c9a 100644
--- a/pkgs/servers/syncstorage-rs/default.nix
+++ b/pkgs/servers/syncstorage-rs/default.nix
@@ -21,13 +21,13 @@ in
 
 rustPlatform.buildRustPackage rec {
   pname = "syncstorage-rs";
-  version = "0.13.2";
+  version = "0.13.5";
 
   src = fetchFromGitHub {
     owner = "mozilla-services";
     repo = pname;
-    rev = version;
-    hash = "sha256-zxpqQpzmPPU6V5QITK9SgAAI7l3/7+h0u3/bZgiU7y4=";
+    rev = "refs/tags/${version}";
+    hash = "sha256-eFrrZ/+8OsmIfCEoXPAKqVkZlgN8sfXueJQvQN8VCB0=";
   };
 
   nativeBuildInputs = [
@@ -47,7 +47,7 @@ rustPlatform.buildRustPackage rec {
       --prefix PATH : ${lib.makeBinPath [ pyFxADeps ]}
   '';
 
-  cargoHash = "sha256-U0xHqOh0ii4PE9UYKo+diqSoZ1ZjzBmHILvAhHSZD0A=";
+  cargoHash = "sha256-SgOxXzI6IZcP5Q06Aj5Pv6Rrvb7xVShUcGaViLuESOw=";
 
   buildFeatures = [ "grpcio/openssl" ];
 
@@ -57,6 +57,7 @@ rustPlatform.buildRustPackage rec {
   meta = {
     description = "Mozilla Sync Storage built with Rust";
     homepage = "https://github.com/mozilla-services/syncstorage-rs";
+    changelog = "https://github.com/mozilla-services/syncstorage-rs/releases/tag/${version}";
     license = lib.licenses.mpl20;
     maintainers = with lib.maintainers; [ pennae ];
     platforms = lib.platforms.linux;
diff --git a/pkgs/servers/web-apps/5etools/default.nix b/pkgs/servers/web-apps/5etools/default.nix
index 03335be5ae84d..856384aa173f0 100644
--- a/pkgs/servers/web-apps/5etools/default.nix
+++ b/pkgs/servers/web-apps/5etools/default.nix
@@ -15,5 +15,6 @@ fetchFromGitHub rec {
     changelog = "https://github.com/5etools-mirror-1/5etools-mirror-1.github.io/releases/tag/v${version}";
     license = [ licenses.mit ];
     maintainers = with maintainers; [ urandom ];
+    hydraPlatforms = []; # src tarball is 4.7G, unpackeed 4.8G, exceeds hydras output limit
   };
 }
diff --git a/pkgs/servers/x11/xorg/default.nix b/pkgs/servers/x11/xorg/default.nix
index 6effd2c26c371..afa864fdbf631 100644
--- a/pkgs/servers/x11/xorg/default.nix
+++ b/pkgs/servers/x11/xorg/default.nix
@@ -2220,11 +2220,11 @@ self: with self; {
   # THIS IS A GENERATED FILE.  DO NOT EDIT!
   xf86videoamdgpu = callPackage ({ stdenv, pkg-config, fetchurl, xorgproto, mesa, libGL, libdrm, udev, xorgserver }: stdenv.mkDerivation {
     pname = "xf86-video-amdgpu";
-    version = "21.0.0";
+    version = "23.0.0";
     builder = ./builder.sh;
     src = fetchurl {
-      url = "mirror://xorg/individual/driver/xf86-video-amdgpu-21.0.0.tar.bz2";
-      sha256 = "125dq85n46yqmnmr2hknxwcqicwlvz2b2phf0m963fpg9l1j6y30";
+      url = "mirror://xorg/individual/driver/xf86-video-amdgpu-23.0.0.tar.xz";
+      sha256 = "0qf0kjh6pww5abxmqa4c9sfa2qq1hq4p8qcgqpfd1kpkcvmg012g";
     };
     hardeningDisable = [ "bindnow" "relro" ];
     strictDeps = true;
diff --git a/pkgs/servers/x11/xorg/tarballs.list b/pkgs/servers/x11/xorg/tarballs.list
index 62509a1c92cd7..f2f7d14ddfaa1 100644
--- a/pkgs/servers/x11/xorg/tarballs.list
+++ b/pkgs/servers/x11/xorg/tarballs.list
@@ -86,7 +86,7 @@ mirror://xorg/individual/driver/xf86-input-mouse-1.9.3.tar.bz2
 mirror://xorg/individual/driver/xf86-input-synaptics-1.9.1.tar.bz2
 mirror://xorg/individual/driver/xf86-input-vmmouse-13.1.0.tar.bz2
 mirror://xorg/individual/driver/xf86-input-void-1.4.1.tar.bz2
-mirror://xorg/individual/driver/xf86-video-amdgpu-21.0.0.tar.bz2
+mirror://xorg/individual/driver/xf86-video-amdgpu-23.0.0.tar.xz
 mirror://xorg/individual/driver/xf86-video-apm-1.3.0.tar.bz2
 mirror://xorg/individual/driver/xf86-video-ark-0.7.5.tar.bz2
 mirror://xorg/individual/driver/xf86-video-ast-1.1.5.tar.bz2
diff --git a/pkgs/tools/admin/aliyun-cli/default.nix b/pkgs/tools/admin/aliyun-cli/default.nix
index 909b793cc62ab..d998abed7fc1e 100644
--- a/pkgs/tools/admin/aliyun-cli/default.nix
+++ b/pkgs/tools/admin/aliyun-cli/default.nix
@@ -2,17 +2,17 @@
 
 buildGoModule rec {
   pname = "aliyun-cli";
-  version = "3.0.150";
+  version = "3.0.152";
 
   src = fetchFromGitHub {
     rev = "v${version}";
     owner = "aliyun";
     repo = pname;
     fetchSubmodules = true;
-    sha256 = "sha256-NzdE3s3DKeh7/EG6Pvf6I2nU2ZfhUtRZIlW1+6cI2jc=";
+    sha256 = "sha256-n6S9ul9yD63l8Ge9Cs6QonRvvNxdke3u3P8TQOx5Z98=";
   };
 
-  vendorHash = "sha256-GVx0mgpbftyy9Eni3IYFmvWcaGnm5Nuqh4KvGeqhVu4=";
+  vendorHash = "sha256-Z8MDLUMgehoDEEBB2A5QWHGPqX5xt/16afa9T4v6kkQ=";
 
   subPackages = [ "main" ];
 
diff --git a/pkgs/tools/admin/aws-rotate-key/default.nix b/pkgs/tools/admin/aws-rotate-key/default.nix
index 35221b3fb5b50..5d228e93f1187 100644
--- a/pkgs/tools/admin/aws-rotate-key/default.nix
+++ b/pkgs/tools/admin/aws-rotate-key/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "aws-rotate-key";
-  version = "1.0.8";
+  version = "1.1.0";
 
   src = fetchFromGitHub {
     owner = "Fullscreen";
     repo = "aws-rotate-key";
     rev = "v${version}";
-    sha256 = "sha256-5kV87uQDSc/qpm79Pd2nXo/EcbMlhZqFYaw+gJQa2uo=";
+    sha256 = "sha256-PZ7+GC4P4bkT+DWOhW70KkhUCUjn4gIG+OKoOBSc/8c=";
   };
 
-  vendorSha256 = "sha256-h7tmJx/Um1Cy/ojiFjoKCH/LcOwhGU8ADb5WwmrkkJM=";
+  vendorHash = "sha256-Asfbv7avT+L8/WNQ6NS7gFcjA9MiTCu5PzsuA/PT6/k=";
 
   ldflags = [ "-s" "-w" ];
 
diff --git a/pkgs/tools/admin/coldsnap/default.nix b/pkgs/tools/admin/coldsnap/default.nix
index 3717550846c21..6ff4f443982d8 100644
--- a/pkgs/tools/admin/coldsnap/default.nix
+++ b/pkgs/tools/admin/coldsnap/default.nix
@@ -9,15 +9,15 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "coldsnap";
-  version = "0.4.2";
+  version = "0.4.3";
 
   src = fetchFromGitHub {
     owner = "awslabs";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-+JQjJ4F++S3eLnrqV1v4leepOvZBf8Vp575rnlDx2Cg=";
+    hash = "sha256-kL9u+IBlC9Pxm5yaJagY9dy0Pm0xlKfVxFVBmwDMSak=";
   };
-  cargoHash = "sha256-mAnoe9rK4+OpCzD7tzV+FQz+fFr8NapzsXtON3lS/tk";
+  cargoHash = "sha256-eYBmke0FQ9CK3cCaM7ecmp1vkNlZO3SHRnxFzmelYhU=";
 
   buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ Security ];
   nativeBuildInputs = [ pkg-config ];
diff --git a/pkgs/tools/admin/eksctl/default.nix b/pkgs/tools/admin/eksctl/default.nix
index 32c71f2d1ceb8..b00d8327b4190 100644
--- a/pkgs/tools/admin/eksctl/default.nix
+++ b/pkgs/tools/admin/eksctl/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "eksctl";
-  version = "0.131.0";
+  version = "0.132.0";
 
   src = fetchFromGitHub {
     owner = "weaveworks";
     repo = pname;
     rev = version;
-    sha256 = "sha256-xH7GUR42eMhF2SXCprFEE6ESiFBYnpGP3lLZ4sKIu5s=";
+    sha256 = "sha256-wKTESd+YKJd94yUuqm1v4biyGCABbhwTlKLzbiOg6H0=";
   };
 
-  vendorHash = "sha256-hF1J+HDpnWM55ga5ZOaApFmfx1/vF2SNl+JsDrMphtM=";
+  vendorHash = "sha256-A06vwsadznqe1xraMZnseRAJkrpCdciKBxPUBus39ws=";
 
   doCheck = false;
 
diff --git a/pkgs/tools/admin/pgadmin/default.nix b/pkgs/tools/admin/pgadmin/default.nix
index a83d96779de6a..2025b4290dca9 100644
--- a/pkgs/tools/admin/pgadmin/default.nix
+++ b/pkgs/tools/admin/pgadmin/default.nix
@@ -14,11 +14,11 @@
 
 let
   pname = "pgadmin";
-  version = "6.19";
+  version = "6.20";
 
   src = fetchurl {
     url = "https://ftp.postgresql.org/pub/pgadmin/pgadmin4/v${version}/source/pgadmin4-${version}.tar.gz";
-    sha256 = "sha256-xHvdqVpNU9ZzTA6Xl2Bv044l6Tbvf4fjqyz4TmS9gmI=";
+    sha256 = "sha256-6aQvg98LymZGAgAcNX5Xhw/aRdE5h4HOCPS+kQnkstU=";
   };
 
   yarnDeps = mkYarnModules {
diff --git a/pkgs/tools/admin/pgadmin/package.json b/pkgs/tools/admin/pgadmin/package.json
index 462d82e2134dd..f43473b081cd0 100644
--- a/pkgs/tools/admin/pgadmin/package.json
+++ b/pkgs/tools/admin/pgadmin/package.json
@@ -150,6 +150,7 @@
     "react-draggable": "^4.4.4",
     "react-dropzone": "^14.2.1",
     "react-leaflet": "^3.2.2",
+    "react-resize-detector": "^8.0.3",
     "react-rnd": "^10.3.5",
     "react-router-dom": "^6.2.2",
     "react-select": "^4.2.1",
@@ -164,6 +165,8 @@
     "styled-components": "^5.2.1",
     "tempusdominus-bootstrap-4": "^5.1.2",
     "tempusdominus-core": "^5.19.3",
+    "uplot": "^1.6.24",
+    "uplot-react": "^1.1.4",
     "valid-filename": "^2.0.1",
     "webcabin-docker": "git+https://github.com/pgadmin-org/wcdocker/#3df8aac825ee2892f4d824de273b779cc6dbcad8",
     "wkx": "^0.5.0",
@@ -188,7 +191,7 @@
     "pep8": "pycodestyle --config=../.pycodestyle ../docs && pycodestyle --config=../.pycodestyle ../pkg && pycodestyle --config=../.pycodestyle ../tools && pycodestyle --config=../.pycodestyle ../web",
     "auditjs-html": "yarn audit --json | yarn run yarn-audit-html --output ../auditjs.html",
     "auditjs": "yarn audit --groups dependencies",
-    "auditpy": "safety check --full-report -i 40493 -i 51668",
+    "auditpy": "safety check --full-report -i 51668 -i 52495",
     "audit": "yarn run auditjs && yarn run auditpy"
   }
 }
diff --git a/pkgs/tools/admin/pgadmin/yarn.lock b/pkgs/tools/admin/pgadmin/yarn.lock
index df20a984a8f70..f506196c20402 100644
--- a/pkgs/tools/admin/pgadmin/yarn.lock
+++ b/pkgs/tools/admin/pgadmin/yarn.lock
@@ -8609,6 +8609,13 @@ react-property@2.0.0:
   resolved "https://registry.yarnpkg.com/react-property/-/react-property-2.0.0.tgz#2156ba9d85fa4741faf1918b38efc1eae3c6a136"
   integrity sha512-kzmNjIgU32mO4mmH5+iUyrqlpFQhF8K2k7eZ4fdLSOPFrD1XgEuSBv9LDEgxRXTMBqMd8ppT0x6TIzqE5pdGdw==
 
+react-resize-detector@^8.0.3:
+  version "8.0.3"
+  resolved "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-8.0.3.tgz#dab4470aae23bb07deb857230ccf945d000ef99b"
+  integrity sha512-c3eqm5BVcluVhxHsBQnhyPO/5uYB3XHIHz6D1ZOHzU2WcnZF0Cr3KLl5OIozRC2RSsdQlu5vn1PHEqrvKRnIYA==
+  dependencies:
+    lodash "^4.17.21"
+
 react-rnd@^10.3.5:
   version "10.3.7"
   resolved "https://registry.yarnpkg.com/react-rnd/-/react-rnd-10.3.7.tgz#037ce277e6c5e682989b51278e44a6ba299990af"
@@ -10084,9 +10091,9 @@ typescript@^3.2.2:
   integrity sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==
 
 ua-parser-js@^0.7.30:
-  version "0.7.32"
-  resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.32.tgz#cd8c639cdca949e30fa68c44b7813ef13e36d211"
-  integrity sha512-f9BESNVhzlhEFf2CHMSj40NWOjYPl1YKYbrvIr/hFTDEmLq7SRbWvm7FcdcpCYT95zrOhC7gZSxjdnnTpBcwVw==
+  version "0.7.33"
+  resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532"
+  integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw==
 
 uglify-js@^3.1.4:
   version "3.17.4"
@@ -10192,6 +10199,16 @@ update-browserslist-db@^1.0.9:
     escalade "^3.1.1"
     picocolors "^1.0.0"
 
+uplot-react@^1.1.4:
+  version "1.1.4"
+  resolved "https://registry.yarnpkg.com/uplot-react/-/uplot-react-1.1.4.tgz#02b9918a199da9983fc0d375fb44e443749e2ac0"
+  integrity sha512-qO1UkQwjVKdj5vTm3O3yldvu1T6hwY4++rH4KznLhjqpnLdncq1zsRxq/zQz/HUHPVD0j7WBcEISbNM61JsuAQ==
+
+uplot@^1.6.24:
+  version "1.6.24"
+  resolved "https://registry.yarnpkg.com/uplot/-/uplot-1.6.24.tgz#dfa213fa7da92763261920ea972ed1a5f9f6af12"
+  integrity sha512-WpH2BsrFrqxkMu+4XBvc0eCDsRBhzoq9crttYeSI0bfxpzR5YoSVzZXOKFVWcVC7sp/aDXrdDPbDZGCtck2PVg==
+
 uri-js@^4.2.2:
   version "4.4.1"
   resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
diff --git a/pkgs/tools/admin/pgadmin/yarn.nix b/pkgs/tools/admin/pgadmin/yarn.nix
index d4c5e2c7b1ce1..0dc8367c7ca26 100644
--- a/pkgs/tools/admin/pgadmin/yarn.nix
+++ b/pkgs/tools/admin/pgadmin/yarn.nix
@@ -9224,6 +9224,14 @@
       };
     }
     {
+      name = "react_resize_detector___react_resize_detector_8.0.3.tgz";
+      path = fetchurl {
+        name = "react_resize_detector___react_resize_detector_8.0.3.tgz";
+        url  = "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-8.0.3.tgz";
+        sha512 = "c3eqm5BVcluVhxHsBQnhyPO/5uYB3XHIHz6D1ZOHzU2WcnZF0Cr3KLl5OIozRC2RSsdQlu5vn1PHEqrvKRnIYA==";
+      };
+    }
+    {
       name = "react_rnd___react_rnd_10.3.7.tgz";
       path = fetchurl {
         name = "react_rnd___react_rnd_10.3.7.tgz";
@@ -10912,11 +10920,11 @@
       };
     }
     {
-      name = "ua_parser_js___ua_parser_js_0.7.32.tgz";
+      name = "ua_parser_js___ua_parser_js_0.7.33.tgz";
       path = fetchurl {
-        name = "ua_parser_js___ua_parser_js_0.7.32.tgz";
-        url  = "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.32.tgz";
-        sha512 = "f9BESNVhzlhEFf2CHMSj40NWOjYPl1YKYbrvIr/hFTDEmLq7SRbWvm7FcdcpCYT95zrOhC7gZSxjdnnTpBcwVw==";
+        name = "ua_parser_js___ua_parser_js_0.7.33.tgz";
+        url  = "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz";
+        sha512 = "s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw==";
       };
     }
     {
@@ -11048,6 +11056,22 @@
       };
     }
     {
+      name = "uplot_react___uplot_react_1.1.4.tgz";
+      path = fetchurl {
+        name = "uplot_react___uplot_react_1.1.4.tgz";
+        url  = "https://registry.yarnpkg.com/uplot-react/-/uplot-react-1.1.4.tgz";
+        sha512 = "qO1UkQwjVKdj5vTm3O3yldvu1T6hwY4++rH4KznLhjqpnLdncq1zsRxq/zQz/HUHPVD0j7WBcEISbNM61JsuAQ==";
+      };
+    }
+    {
+      name = "uplot___uplot_1.6.24.tgz";
+      path = fetchurl {
+        name = "uplot___uplot_1.6.24.tgz";
+        url  = "https://registry.yarnpkg.com/uplot/-/uplot-1.6.24.tgz";
+        sha512 = "WpH2BsrFrqxkMu+4XBvc0eCDsRBhzoq9crttYeSI0bfxpzR5YoSVzZXOKFVWcVC7sp/aDXrdDPbDZGCtck2PVg==";
+      };
+    }
+    {
       name = "uri_js___uri_js_4.4.1.tgz";
       path = fetchurl {
         name = "uri_js___uri_js_4.4.1.tgz";
diff --git a/pkgs/tools/admin/qovery-cli/default.nix b/pkgs/tools/admin/qovery-cli/default.nix
index 3b35207e84991..fa31ac964ff9b 100644
--- a/pkgs/tools/admin/qovery-cli/default.nix
+++ b/pkgs/tools/admin/qovery-cli/default.nix
@@ -8,16 +8,16 @@
 
 buildGoModule rec {
   pname = "qovery-cli";
-  version = "0.49.0";
+  version = "0.50.3";
 
   src = fetchFromGitHub {
     owner = "Qovery";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-O5JUWD7Wbe/5BM5fr6z76Re7PpRwFJV++lze+pv5el0=";
+    hash = "sha256-kvIY6BBkyV5TmpT8bhrn+OIP3/rbCy0EKxsFLIIFp8U=";
   };
 
-  vendorHash = "sha256-Hb4bqOK4h68ZCN/bTPQLd4hC7oZUrj21DupVA4GrlNA=";
+  vendorHash = "sha256-595Z6/jt+d81QMIKcbg7Y5UMtF8hnZipiBkt1LQt2AI=";
 
   nativeBuildInputs = [ installShellFiles ];
 
diff --git a/pkgs/tools/archivers/snzip/default.nix b/pkgs/tools/archivers/snzip/default.nix
index f3a9400687348..c759a7ac07f1e 100644
--- a/pkgs/tools/archivers/snzip/default.nix
+++ b/pkgs/tools/archivers/snzip/default.nix
@@ -6,13 +6,13 @@
 
 stdenv.mkDerivation rec {
   pname = "snzip";
-  version = "1.0.4";
+  version = "1.0.5";
 
   src = fetchFromGitHub {
     owner = "kubo";
     repo = "snzip";
-    rev = version;
-    sha256 = "1v8li1zv9f2g31iyi9y9zx42rjvwkaw221g60pmkbv53y667i325";
+    rev = "v${version}";
+    hash = "sha256-trxCGVNw2MugE7kmth62Qrp7JZcHeP1gdTZk32c3hFg=";
   };
 
   buildInputs = [ snappy ];
diff --git a/pkgs/tools/cd-dvd/vobsub2srt/default.nix b/pkgs/tools/cd-dvd/vobsub2srt/default.nix
index 159be4e14b619..d56cfdf86c8ea 100644
--- a/pkgs/tools/cd-dvd/vobsub2srt/default.nix
+++ b/pkgs/tools/cd-dvd/vobsub2srt/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, fetchFromGitHub, cmake, libtiff, pkg-config, tesseract }:
+{ lib, stdenv, fetchFromGitHub, cmake, libtiff, pkg-config, tesseract3 }:
 
 stdenv.mkDerivation rec {
   pname = "vobsub2srt";
@@ -15,7 +15,7 @@ stdenv.mkDerivation rec {
 
   nativeBuildInputs = [ cmake pkg-config ];
   buildInputs = [ libtiff ];
-  propagatedBuildInputs = [ tesseract ];
+  propagatedBuildInputs = [ tesseract3 ];
 
   meta = {
     homepage = "https://github.com/ruediger/VobSub2SRT";
diff --git a/pkgs/tools/graphics/gmic-qt/default.nix b/pkgs/tools/graphics/gmic-qt/default.nix
index f9766650ec419..3d1a9aa6607ac 100644
--- a/pkgs/tools/graphics/gmic-qt/default.nix
+++ b/pkgs/tools/graphics/gmic-qt/default.nix
@@ -1,7 +1,8 @@
 { lib
 , mkDerivation
 , variant ? "standalone"
-, fetchFromGitHub
+, fetchzip
+, fetchpatch
 , cmake
 , pkg-config
 , ninja
@@ -16,7 +17,6 @@
 , curl
 , gimp ? null
 , gmic
-, cimg
 , qtbase
 , qttools
 , writeShellScript
@@ -54,13 +54,21 @@ mkDerivation rec {
   pname = "gmic-qt${lib.optionalString (variant != "standalone") "-${variant}"}";
   version = "3.2.1";
 
-  src = fetchFromGitHub {
-    owner = "c-koi";
-    repo = "gmic-qt";
-    rev = "v.${version}";
-    sha256 = "sha256-z+GtYLBcHVufXwdeSd8WKmPmU1+/EKMv26kNaEgyt5w=";
+  src = fetchzip {
+    url = "https://gmic.eu/files/source/gmic_${version}.tar.gz";
+    hash = "sha256-2lMnn19FcFKnfIjSxOObqxIjqLMUoWgi0ADZBCBePY4=";
   };
 
+  patches = [
+    (fetchpatch {
+      name = "gmic-qt-3.2.1-fix-system-gmic.patch";
+      url = "https://github.com/c-koi/gmic-qt/commit/e8d7a3523753ff592da63b1d54edf0921c54fe53.patch";
+      hash = "sha256-kBFZo2qvod4pH3oK8gvnmw39x6eMH9zjr4mMcY74mFo=";
+    })
+  ];
+
+  sourceRoot = "source/gmic-qt";
+
   nativeBuildInputs = [
     cmake
     pkg-config
@@ -69,7 +77,6 @@ mkDerivation rec {
 
   buildInputs = [
     gmic
-    cimg
     qtbase
     qttools
     fftw
@@ -85,7 +92,8 @@ mkDerivation rec {
 
   cmakeFlags = [
     "-DGMIC_QT_HOST=${if variant == "standalone" then "none" else variant}"
-    "-DENABLE_SYSTEM_GMIC:BOOL=ON"
+    "-DENABLE_SYSTEM_GMIC=ON"
+    "-DENABLE_DYNAMIC_LINKING=ON"
   ];
 
   postPatch = ''
@@ -108,8 +116,6 @@ mkDerivation rec {
   };
 
   meta = with lib; {
-    # Broken since 3.2.0 update, cannot handle system gmic and cimg.
-    broken = true;
     description = variants.${variant}.description;
     homepage = "http://gmic.eu/";
     license = licenses.gpl3Plus;
diff --git a/pkgs/tools/graphics/gmic/default.nix b/pkgs/tools/graphics/gmic/default.nix
index 52499196bba4b..68d07ed734b68 100644
--- a/pkgs/tools/graphics/gmic/default.nix
+++ b/pkgs/tools/graphics/gmic/default.nix
@@ -1,6 +1,7 @@
 { stdenv
 , lib
 , fetchFromGitHub
+, fetchpatch
 , fetchurl
 , cmake
 , ninja
@@ -37,6 +38,14 @@ stdenv.mkDerivation rec {
     hash = "sha256-oEH4GlSV+642TGSJJhV4yzydh1hAQZfzwaiPAZFNQtI=";
   };
 
+  patches = [
+    (fetchpatch {
+      name = "gmic-3.2.1-fix-system-gmic.patch";
+      url = "https://github.com/GreycLab/gmic/commit/9db3f6a39d9ed67b4279654da88993a8057575ff.patch";
+      hash = "sha256-JznKCs56t6cJ4HLqlhMZjSOupEB8cdkn3j6RgZpcpzo=";
+    })
+  ];
+
   # TODO: build this from source
   # https://github.com/dtschump/gmic/blob/b36b2428db5926af5eea5454f822f369c2d9907e/src/Makefile#L675-L729
   gmic_stdlib = fetchurl {
diff --git a/pkgs/tools/misc/atuin/default.nix b/pkgs/tools/misc/atuin/default.nix
index 9d5a9fc927049..a1f7df1d45973 100644
--- a/pkgs/tools/misc/atuin/default.nix
+++ b/pkgs/tools/misc/atuin/default.nix
@@ -34,11 +34,11 @@ rustPlatform.buildRustPackage rec {
       --zsh <($out/bin/atuin gen-completions -s zsh)
   '';
 
-  nativeCheckInputs = [
+  nativeCheckInputs = lib.optionals xvfb-run.meta.available [
     xvfb-run
   ];
 
-  checkPhase = ''
+  checkPhase = lib.optionalString xvfb-run.meta.available ''
     runHook preCheck
     xvfb-run cargo test
     runHook postCheck
diff --git a/pkgs/tools/misc/bash_unit/default.nix b/pkgs/tools/misc/bash_unit/default.nix
index 90e34286d38e7..5857bea6251d2 100644
--- a/pkgs/tools/misc/bash_unit/default.nix
+++ b/pkgs/tools/misc/bash_unit/default.nix
@@ -4,13 +4,13 @@
 
 stdenv.mkDerivation rec {
   pname = "bash_unit";
-  version = "2.0.1";
+  version = "2.1.0";
 
   src = fetchFromGitHub {
     owner = "pgrange";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-P3fDfv7SexrNMynZBbgwwZcH2H/t4bwFM4HULlLaiM4=";
+    sha256 = "sha256-c1C+uBo5PSH07VjulCxkmvfj7UYm6emdDAaN00uvAcg=";
   };
 
   installPhase = ''
diff --git a/pkgs/tools/misc/fortune/default.nix b/pkgs/tools/misc/fortune/default.nix
index e82b13d12c65e..e69161541ff39 100644
--- a/pkgs/tools/misc/fortune/default.nix
+++ b/pkgs/tools/misc/fortune/default.nix
@@ -1,17 +1,17 @@
-{ lib, stdenv, fetchurl, cmake, recode, perl, withOffensive ? false }:
+{ lib, stdenv, fetchurl, cmake, recode, perl, rinutils, withOffensive ? false }:
 
 stdenv.mkDerivation rec {
   pname = "fortune-mod";
-  version = "3.14.1";
+  version = "3.16.0";
 
   # We use fetchurl instead of fetchFromGitHub because the release pack has some
   # special files.
   src = fetchurl {
     url = "https://github.com/shlomif/fortune-mod/releases/download/${pname}-${version}/${pname}-${version}.tar.xz";
-    sha256 = "sha256-NnAj9dsB1ZUuTm2W8mPdK2h15Dtro8ve6c+tPoKUsXs=";
+    sha256 = "sha256-dkpkTBulXaN52BHaV4MWEIoQFkmWaG66O9Ppes/GLPo=";
   };
 
-  nativeBuildInputs = [ cmake perl ];
+  nativeBuildInputs = [ cmake perl rinutils ];
 
   buildInputs = [ recode ];
 
diff --git a/pkgs/tools/misc/vector/default.nix b/pkgs/tools/misc/vector/default.nix
index 359b6b434b397..6d4389683e2dc 100644
--- a/pkgs/tools/misc/vector/default.nix
+++ b/pkgs/tools/misc/vector/default.nix
@@ -33,7 +33,7 @@
 
 let
   pname = "vector";
-  version = "0.27.0";
+  version = "0.28.0";
 in
 rustPlatform.buildRustPackage {
   inherit pname version;
@@ -42,10 +42,10 @@ rustPlatform.buildRustPackage {
     owner = "vectordotdev";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-+jap7cexevEky3H+Ct9LXXUwHR5tnbzdN+b13pv3f70=";
+    sha256 = "sha256-1HNOfIB1HmGthNkJca4tfxcq7nDTjN+tsbsP3rLlXi4=";
   };
 
-  cargoSha256 = "sha256-KehBEwoz5N0zQLDk+9vwFSrn1TrVwljFj+asr7q7hmw=";
+  cargoSha256 = "sha256-m4+9vET4VVgeVVN+nuFGi54OvdiubLO5gZE2mj5c+hc=";
   nativeBuildInputs = [ pkg-config cmake perl ];
   buildInputs = [ oniguruma openssl protobuf rdkafka zstd ]
     ++ lib.optionals stdenv.isDarwin [ Security libiconv coreutils CoreServices ];
diff --git a/pkgs/tools/misc/yt-dlp/default.nix b/pkgs/tools/misc/yt-dlp/default.nix
index d9331a8c28afd..f0d8dc7697b6c 100644
--- a/pkgs/tools/misc/yt-dlp/default.nix
+++ b/pkgs/tools/misc/yt-dlp/default.nix
@@ -21,11 +21,11 @@ buildPythonPackage rec {
   # The websites yt-dlp deals with are a very moving target. That means that
   # downloads break constantly. Because of that, updates should always be backported
   # to the latest stable release.
-  version = "2023.2.17";
+  version = "2023.3.3";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "sha256-mvkt5e/8GTvbUSFtnr8oh02WGA0gL651Kw2fKmM4Dzo=";
+    sha256 = "sha256-d/017Y9ZtYDP9RYbE9137pklYZpzVTt8A+srCt6nH7c=";
   };
 
   propagatedBuildInputs = [ brotli certifi mutagen pycryptodomex websockets ];
diff --git a/pkgs/tools/networking/libreswan/default.nix b/pkgs/tools/networking/libreswan/default.nix
index ab3249e57a873..47838f7566354 100644
--- a/pkgs/tools/networking/libreswan/default.nix
+++ b/pkgs/tools/networking/libreswan/default.nix
@@ -45,11 +45,11 @@ in
 
 stdenv.mkDerivation rec {
   pname = "libreswan";
-  version = "4.9";
+  version = "4.10";
 
   src = fetchurl {
     url = "https://download.libreswan.org/${pname}-${version}.tar.gz";
-    sha256 = "sha256-9kLctjXpCVZMqP2Z6kSrQ/YHI7TXbBWO2BKXjEWzmLk=";
+    sha256 = "sha256-WpQAwlqO26B0IEJvtV3Lqv2qNwLlsPLBkgWmxWckins=";
   };
 
   strictDeps = true;
diff --git a/pkgs/tools/networking/ookla-speedtest/default.nix b/pkgs/tools/networking/ookla-speedtest/default.nix
index 0ad02a53ee6cc..d889414bc118d 100644
--- a/pkgs/tools/networking/ookla-speedtest/default.nix
+++ b/pkgs/tools/networking/ookla-speedtest/default.nix
@@ -9,10 +9,18 @@ let
       url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-x86_64.tgz";
       sha256 = "sha256-VpBZbFT/m+1j+jcy+BigXbwtsZrTbtaPIcpfZNXP7rc=";
     };
+    i686-linux = fetchurl {
+      url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-i386.tgz";
+      sha256 = "sha256-n/fhjbrn7g4DxmEIRFovts7qbIb2ZILhOS9ViBt3L+g=";
+    };
     aarch64-linux = fetchurl {
       url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-aarch64.tgz";
       sha256 = "sha256-OVPSMdo3g+K/iQS23XJ2fFxuUz4WPTdC/QQ3r/pDG9M=";
     };
+    armv7l-linux = fetchurl {
+      url = "https://install.speedtest.net/app/cli/${pname}-${version}-linux-armhf.tgz";
+      sha256 = "sha256-5F/N672KGFVTU1Uz3QMtaxC8jGTu5BObEUe5wJg10I0=";
+    };
     x86_64-darwin = fetchurl {
       url = "https://install.speedtest.net/app/cli/${pname}-${version}-macosx-universal.tgz";
       sha256 = "sha256-yfgZIUnryI+GmZmM7Ksc4UQUQEWQfs5vU89Qh39N5m8=";
diff --git a/pkgs/tools/networking/pacparser/default.nix b/pkgs/tools/networking/pacparser/default.nix
index c55a0dae34c32..8caf4568c99bc 100644
--- a/pkgs/tools/networking/pacparser/default.nix
+++ b/pkgs/tools/networking/pacparser/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "pacparser";
-  version = "1.4.0";
+  version = "1.4.1";
 
   src = fetchFromGitHub {
     owner = "manugarg";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-XtYXUqmBnsqI+0b7Dnynni544d49z1eGH8ihAAqQe7Q=";
+    sha256 = "sha256-tEbkMRHCdiKXpz9Ksg2LEzfOVhF8xbUHWMeExPMlGVM=";
   };
 
   makeFlags = [ "NO_INTERNET=1" ];
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py
index 637185227e83f..7fc14c1631ef0 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py
@@ -1,13 +1,11 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
 from dataclasses import dataclass
 from typing import Any, cast, Optional
 from urllib.parse import quote
 
 from .md import Renderer
 
-import markdown_it
 from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
 
 _asciidoc_escapes = {
     # escape all dots, just in case one is pasted at SOL
@@ -59,8 +57,8 @@ class AsciiDocRenderer(Renderer):
     _list_stack: list[List]
     _attrspans: list[str]
 
-    def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
-        super().__init__(manpage_urls, parser)
+    def __init__(self, manpage_urls: Mapping[str, str]):
+        super().__init__(manpage_urls)
         self._parstack = [ Par("\n\n", "====") ]
         self._list_stack = []
         self._attrspans = []
@@ -96,142 +94,103 @@ class AsciiDocRenderer(Renderer):
         self._list_stack.pop()
         return ""
 
-    def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-             env: MutableMapping[str, Any]) -> str:
+    def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         return asciidoc_escape(token.content)
-    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._break()
-    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return " +\n"
-    def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f" "
-    def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         return f"``{asciidoc_escape(token.content)}``"
-    def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
-        return self.fence(token, tokens, i, options, env)
-    def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return self.fence(token, tokens, i)
+    def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         return f"link:{quote(cast(str, token.attrs['href']), safe='/:')}["
-    def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "]"
-    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._enter_block(True)
         # allow the next token to be a block or an inline.
         return f'\n{self._list_stack[-1].head} {{empty}}'
-    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return "\n"
-    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._list_open(token, '*')
-    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._list_close()
-    def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "__"
-    def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "__"
-    def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "**"
-    def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "**"
-    def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-              env: MutableMapping[str, Any]) -> str:
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         attrs = f"[source,{token.info}]\n" if token.info else ""
         code = token.content
         if code.endswith('\n'):
             code = code[:-1]
         return f"{self._break(True)}{attrs}----\n{code}\n----"
-    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         pbreak = self._break(True)
         self._enter_block(False)
         return f"{pbreak}[quote]\n{self._parstack[-2].block_delim}\n"
-    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return f"\n{self._parstack[-1].block_delim}"
-    def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("NOTE")
-    def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("CAUTION")
-    def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("IMPORTANT")
-    def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("TIP")
-    def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("WARNING")
-    def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"{self._break()}[]"
-    def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._break()
-    def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._enter_block(True)
         return ":: {empty}"
-    def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return "\n"
-    def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         content = asciidoc_escape(token.content)
         if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
             return f"link:{quote(url, safe='/:')}[{content}]"
         return f"[.{token.meta['name']}]``{asciidoc_escape(token.content)}``"
-    def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         return f"[[{token.attrs['id']}]]"
-    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         (id_part, class_part) = ("", "")
         if id := token.attrs.get('id'):
@@ -241,22 +200,17 @@ class AsciiDocRenderer(Renderer):
                 class_part = "kbd:["
                 self._attrspans.append("]")
             else:
-                return super().attr_span_begin(token, tokens, i, options, env)
+                return super().attr_span_begin(token, tokens, i)
         else:
             self._attrspans.append("")
         return id_part + class_part
-    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._attrspans.pop()
-    def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return token.markup.replace("#", "=") + " "
-    def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "\n"
-    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._list_open(token, '.')
-    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                           env: MutableMapping[str, Any]) -> str:
+    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._list_close()
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py
index 4a708b1f92c66..9649eb653d444 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py
@@ -1,12 +1,10 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
 from dataclasses import dataclass
 from typing import Any, cast, Optional
 
 from .md import md_escape, md_make_code, Renderer
 
-import markdown_it
 from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
 
 @dataclass(kw_only=True)
 class List:
@@ -26,8 +24,8 @@ class CommonMarkRenderer(Renderer):
     _link_stack: list[str]
     _list_stack: list[List]
 
-    def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
-        super().__init__(manpage_urls, parser)
+    def __init__(self, manpage_urls: Mapping[str, str]):
+        super().__init__(manpage_urls)
         self._parstack = [ Par("") ]
         self._link_stack = []
         self._list_stack = []
@@ -58,39 +56,29 @@ class CommonMarkRenderer(Renderer):
             return s
         return f"\n{self._parstack[-1].indent}".join(s.splitlines())
 
-    def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-             env: MutableMapping[str, Any]) -> str:
+    def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         return self._indent_raw(md_escape(token.content))
-    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._maybe_parbreak()
-    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"  {self._break()}"
-    def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._break()
-    def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         return md_make_code(token.content)
-    def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
-        return self.fence(token, tokens, i, options, env)
-    def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return self.fence(token, tokens, i)
+    def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         self._link_stack.append(cast(str, token.attrs['href']))
         return "["
-    def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"]({md_escape(self._link_stack.pop())})"
-    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         lst = self._list_stack[-1]
         lbreak = "" if not lst.first_item_seen else self._break() * (1 if lst.compact else 2)
         lst.first_item_seen = True
@@ -100,132 +88,99 @@ class CommonMarkRenderer(Renderer):
             lst.next_idx += 1
         self._enter_block(" " * (len(head) + 1))
         return f'{lbreak}{head} '
-    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return ""
-    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.append(List(compact=bool(token.meta['compact'])))
         return self._maybe_parbreak()
-    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.pop()
         return ""
-    def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "*"
-    def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "*"
-    def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "**"
-    def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "**"
-    def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-              env: MutableMapping[str, Any]) -> str:
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         code = token.content
         if code.endswith('\n'):
             code = code[:-1]
         pbreak = self._maybe_parbreak()
         return pbreak + self._indent_raw(md_make_code(code, info=token.info, multiline=True))
-    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         pbreak = self._maybe_parbreak()
         self._enter_block("> ")
         return pbreak + "> "
-    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return ""
-    def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("Note")
-    def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("Caution")
-    def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("Important")
-    def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("Tip")
-    def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("Warning")
-    def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.append(List(compact=False))
         return ""
-    def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.pop()
         return ""
-    def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         pbreak = self._maybe_parbreak()
         self._enter_block("   ")
         # add an opening zero-width non-joiner to separate *our* emphasis from possible
         # emphasis in the provided term
         return f'{pbreak} - *{chr(0x200C)}'
-    def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"{chr(0x200C)}*"
-    def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         return ""
-    def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return ""
-    def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._parstack[-1].continuing = True
         content = md_make_code(token.content)
         if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
             return f"[{content}]({url})"
         return content # no roles in regular commonmark
-    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         # there's no way we can emit attrspans correctly in all cases. we could use inline
         # html for ids, but that would not round-trip. same holds for classes. since this
         # renderer is only used for approximate options export and all of these things are
         # not allowed in options we can ignore them for now.
         return ""
-    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return token.markup + " "
-    def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "\n"
-    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.append(
             List(next_idx = cast(int, token.attrs.get('start', 1)),
                  compact  = bool(token.meta['compact'])))
         return self._maybe_parbreak()
-    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                           env: MutableMapping[str, Any]) -> str:
+    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.pop()
         return ""
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
index e6a761dcf13fd..4c90606ff4558 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
@@ -1,9 +1,8 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
 from typing import Any, cast, Optional, NamedTuple
 
 import markdown_it
 from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
 from xml.sax.saxutils import escape, quoteattr
 
 from .md import Renderer
@@ -32,26 +31,23 @@ class Heading(NamedTuple):
     partintro_closed: bool = False
 
 class DocBookRenderer(Renderer):
-    __output__ = "docbook"
     _link_tags: list[str]
     _deflists: list[Deflist]
     _headings: list[Heading]
     _attrspans: list[str]
 
-    def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
-        super().__init__(manpage_urls, parser)
+    def __init__(self, manpage_urls: Mapping[str, str]):
+        super().__init__(manpage_urls)
         self._link_tags = []
         self._deflists = []
         self._headings = []
         self._attrspans = []
 
-    def render(self, tokens: Sequence[Token], options: OptionsDict,
-               env: MutableMapping[str, Any]) -> str:
-        result = super().render(tokens, options, env)
-        result += self._close_headings(None, env)
+    def render(self, tokens: Sequence[Token]) -> str:
+        result = super().render(tokens)
+        result += self._close_headings(None)
         return result
-    def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def renderInline(self, tokens: Sequence[Token]) -> str:
         # HACK to support docbook links and xrefs. link handling is only necessary because the docbook
         # manpage stylesheet converts - in urls to a mathematical minus, which may be somewhat incorrect.
         for i, token in enumerate(tokens):
@@ -65,135 +61,98 @@ class DocBookRenderer(Renderer):
             if tokens[i + 1].type == 'text' and tokens[i + 1].content == token.attrs['href']:
                 tokens[i + 1].content = ''
 
-        return super().renderInline(tokens, options, env)
+        return super().renderInline(tokens)
 
-    def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-             env: MutableMapping[str, Any]) -> str:
+    def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return escape(token.content)
-    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<para>"
-    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</para>"
-    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<literallayout>\n</literallayout>"
-    def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         # should check options.breaks() and emit hard break if so
         return "\n"
-    def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"<literal>{escape(token.content)}</literal>"
-    def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"<programlisting>{escape(token.content)}</programlisting>"
-    def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._link_tags.append(token.tag)
         href = cast(str, token.attrs['href'])
         (attr, start) = ('linkend', 1) if href[0] == '#' else ('xlink:href', 0)
         return f"<{token.tag} {attr}={quoteattr(href[start:])}>"
-    def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"</{self._link_tags.pop()}>"
-    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<listitem>"
-    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</listitem>\n"
     # HACK open and close para for docbook change size. remove soon.
-    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
         return f"<para><itemizedlist{spacing}>\n"
-    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "\n</itemizedlist></para>"
-    def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<emphasis>"
-    def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</emphasis>"
-    def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<emphasis role=\"strong\">"
-    def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</emphasis>"
-    def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-              env: MutableMapping[str, Any]) -> str:
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         info = f" language={quoteattr(token.info)}" if token.info != "" else ""
         return f"<programlisting{info}>{escape(token.content)}</programlisting>"
-    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<para><blockquote>"
-    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</blockquote></para>"
-    def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<para><note>"
-    def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</note></para>"
-    def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<para><caution>"
-    def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</caution></para>"
-    def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<para><important>"
-    def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</important></para>"
-    def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<para><tip>"
-    def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</tip></para>"
-    def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "<para><warning>"
-    def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</warning></para>"
     # markdown-it emits tokens based on the html syntax tree, but docbook is
     # slightly different. html has <dl>{<dt/>{<dd/>}}</dl>,
     # docbook has <variablelist>{<varlistentry><term/><listitem/></varlistentry>}<variablelist>
     # we have to reject multiple definitions for the same term for time being.
-    def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._deflists.append(Deflist())
         return "<para><variablelist>"
-    def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._deflists.pop()
         return "</variablelist></para>"
-    def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._deflists[-1].has_dd = False
         return "<varlistentry><term>"
-    def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</term>"
-    def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         if self._deflists[-1].has_dd:
             raise Exception("multiple definitions per term not supported")
         self._deflists[-1].has_dd = True
         return "<listitem>"
-    def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return "</listitem></varlistentry>"
-    def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         if token.meta['name'] == 'command':
             return f"<command>{escape(token.content)}</command>"
         if token.meta['name'] == 'file':
@@ -216,8 +175,7 @@ class DocBookRenderer(Renderer):
             else:
                 return ref
         raise NotImplementedError("md node not supported yet", token)
-    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         # we currently support *only* inline anchors and the special .keycap class to produce
         # <keycap> docbook elements.
         (id_part, class_part) = ("", "")
@@ -228,31 +186,26 @@ class DocBookRenderer(Renderer):
                 class_part = "<keycap>"
                 self._attrspans.append("</keycap>")
             else:
-                return super().attr_span_begin(token, tokens, i, options, env)
+                return super().attr_span_begin(token, tokens, i)
         else:
             self._attrspans.append("")
         return id_part + class_part
-    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._attrspans.pop()
-    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         start = f' startingnumber="{token.attrs["start"]}"' if 'start' in token.attrs else ""
         spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
         return f"<orderedlist{start}{spacing}>"
-    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                           env: MutableMapping[str, Any]) -> str:
+    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return f"</orderedlist>"
-    def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         hlevel = int(token.tag[1:])
-        result = self._close_headings(hlevel, env)
-        (tag, attrs) = self._heading_tag(token, tokens, i, options, env)
+        result = self._close_headings(hlevel)
+        (tag, attrs) = self._heading_tag(token, tokens, i)
         self._headings.append(Heading(tag, hlevel))
         attrs_str = "".join([ f" {k}={quoteattr(v)}" for k, v in attrs.items() ])
         return result + f'<{tag}{attrs_str}>\n<title>'
-    def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         heading = self._headings[-1]
         result = '</title>'
         if heading.container_tag == 'part':
@@ -264,16 +217,14 @@ class DocBookRenderer(Renderer):
                 maybe_id = " xml:id=" + quoteattr(id + "-intro")
             result += f"<partintro{maybe_id}>"
         return result
-    def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         if id := token.attrs.get('id'):
             return f"<anchor xml:id={quoteattr(cast(str, id))} />"
         return ""
-    def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
 
-    def _close_headings(self, level: Optional[int], env: MutableMapping[str, Any]) -> str:
+    def _close_headings(self, level: Optional[int]) -> str:
         # we rely on markdown-it producing h{1..6} tags in token.tag for this to work
         result = []
         while len(self._headings):
@@ -286,8 +237,7 @@ class DocBookRenderer(Renderer):
                 break
         return "\n".join(result)
 
-    def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
+    def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
         attrs = {}
         if id := token.attrs.get('id'):
             attrs['xml:id'] = cast(str, id)
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
new file mode 100644
index 0000000000000..39d2da6adf8c0
--- /dev/null
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
@@ -0,0 +1,245 @@
+from collections.abc import Mapping, Sequence
+from typing import cast, Optional, NamedTuple
+
+from html import escape
+from markdown_it.token import Token
+
+from .manual_structure import XrefTarget
+from .md import Renderer
+
+class UnresolvedXrefError(Exception):
+    pass
+
+class Heading(NamedTuple):
+    container_tag: str
+    level: int
+    html_tag: str
+    # special handling for part content: whether partinfo div was already closed from
+    # elsewhere or still needs closing.
+    partintro_closed: bool
+    # tocs are generated when the heading opens, but have to be emitted into the file
+    # after the heading titlepage (and maybe partinfo) has been closed.
+    toc_fragment: str
+
+_bullet_list_styles = [ 'disc', 'circle', 'square' ]
+_ordered_list_styles = [ '1', 'a', 'i', 'A', 'I' ]
+
+class HTMLRenderer(Renderer):
+    _xref_targets: Mapping[str, XrefTarget]
+
+    _headings: list[Heading]
+    _attrspans: list[str]
+    _hlevel_offset: int = 0
+    _bullet_list_nesting: int = 0
+    _ordered_list_nesting: int = 0
+
+    def __init__(self, manpage_urls: Mapping[str, str], xref_targets: Mapping[str, XrefTarget]):
+        super().__init__(manpage_urls)
+        self._headings = []
+        self._attrspans = []
+        self._xref_targets = xref_targets
+
+    def render(self, tokens: Sequence[Token]) -> str:
+        result = super().render(tokens)
+        result += self._close_headings(None)
+        return result
+
+    def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return escape(token.content)
+    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "<p>"
+    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</p>"
+    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "<br />"
+    def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "\n"
+    def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return f'<code class="literal">{escape(token.content)}</code>'
+    def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return self.fence(token, tokens, i)
+    def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        href = escape(cast(str, token.attrs['href']), True)
+        tag, title, target, text = "link", "", 'target="_top"', ""
+        if href.startswith('#'):
+            if not (xref := self._xref_targets.get(href[1:])):
+                raise UnresolvedXrefError(f"bad local reference, id {href} not known")
+            if tokens[i + 1].type == 'link_close':
+                tag, text = "xref", xref.title_html
+            if xref.title:
+                title = f'title="{escape(xref.title, True)}"'
+            target, href = "", xref.href()
+        return f'<a class="{tag}" href="{href}" {title} {target}>{text}'
+    def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</a>"
+    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<li class="listitem">'
+    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</li>"
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        extra = 'compact' if token.meta.get('compact', False) else ''
+        style = _bullet_list_styles[self._bullet_list_nesting % len(_bullet_list_styles)]
+        self._bullet_list_nesting += 1
+        return f'<div class="itemizedlist"><ul class="itemizedlist {extra}" style="list-style-type: {style};">'
+    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        self._bullet_list_nesting -= 1
+        return "</ul></div>"
+    def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<span class="emphasis"><em>'
+    def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</em></span>"
+    def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<span class="strong"><strong>'
+    def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</strong></span>"
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        # TODO use token.info. docbook doesn't so we can't yet.
+        return f'<pre class="programlisting">\n{escape(token.content)}</pre>'
+    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<div class="blockquote"><blockquote class="blockquote">'
+    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</blockquote></div>"
+    def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<div class="note"><h3 class="title">Note</h3>'
+    def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</div>"
+    def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<div class="caution"><h3 class="title">Caution</h3>'
+    def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</div>"
+    def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<div class="important"><h3 class="title">Important</h3>'
+    def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</div>"
+    def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<div class="tip"><h3 class="title">Tip</h3>'
+    def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</div>"
+    def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<div class="warning"><h3 class="title">Warning</h3>'
+    def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</div>"
+    def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<div class="variablelist"><dl class="variablelist">'
+    def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</dl></div>"
+    def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return '<dt><span class="term">'
+    def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</span></dt>"
+    def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "<dd>"
+    def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "</dd>"
+    def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        if token.meta['name'] == 'command':
+            return f'<span class="command"><strong>{escape(token.content)}</strong></span>'
+        if token.meta['name'] == 'file':
+            return f'<code class="filename">{escape(token.content)}</code>'
+        if token.meta['name'] == 'var':
+            return f'<code class="varname">{escape(token.content)}</code>'
+        if token.meta['name'] == 'env':
+            return f'<code class="envar">{escape(token.content)}</code>'
+        if token.meta['name'] == 'option':
+            return f'<code class="option">{escape(token.content)}</code>'
+        if token.meta['name'] == 'manpage':
+            [page, section] = [ s.strip() for s in token.content.rsplit('(', 1) ]
+            section = section[:-1]
+            man = f"{page}({section})"
+            title = f'<span class="refentrytitle">{escape(page)}</span>'
+            vol = f"({escape(section)})"
+            ref = f'<span class="citerefentry">{title}{vol}</span>'
+            if man in self._manpage_urls:
+                return f'<a class="link" href="{escape(self._manpage_urls[man], True)}" target="_top">{ref}</a>'
+            else:
+                return ref
+        return super().myst_role(token, tokens, i)
+    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        # we currently support *only* inline anchors and the special .keycap class to produce
+        # keycap-styled spans.
+        (id_part, class_part) = ("", "")
+        if s := token.attrs.get('id'):
+            id_part = f'<a id="{escape(cast(str, s), True)}" />'
+        if s := token.attrs.get('class'):
+            if s == 'keycap':
+                class_part = '<span class="keycap"><strong>'
+                self._attrspans.append("</strong></span>")
+            else:
+                return super().attr_span_begin(token, tokens, i)
+        else:
+            self._attrspans.append("")
+        return id_part + class_part
+    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return self._attrspans.pop()
+    def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        hlevel = int(token.tag[1:])
+        htag, hstyle = self._make_hN(hlevel)
+        if hstyle:
+            hstyle = f'style="{escape(hstyle, True)}"'
+        if anchor := cast(str, token.attrs.get('id', '')):
+            anchor = f'<a id="{escape(anchor, True)}"></a>'
+        result = self._close_headings(hlevel)
+        tag = self._heading_tag(token, tokens, i)
+        toc_fragment = self._build_toc(tokens, i)
+        self._headings.append(Heading(tag, hlevel, htag, tag != 'part', toc_fragment))
+        return (
+            f'{result}'
+            f'<div class="{tag}">'
+            f' <div class="titlepage">'
+            f'  <div>'
+            f'   <div>'
+            f'    <{htag} class="title" {hstyle}>'
+            f'     {anchor}'
+        )
+    def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        heading = self._headings[-1]
+        result = (
+            f'   </{heading.html_tag}>'
+            f'  </div>'
+            f' </div>'
+            f'</div>'
+        )
+        if heading.container_tag == 'part':
+            result += '<div class="partintro">'
+        else:
+            result += heading.toc_fragment
+        return result
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        extra = 'compact' if token.meta.get('compact', False) else ''
+        start = f'start="{token.attrs["start"]}"' if 'start' in token.attrs else ""
+        style = _ordered_list_styles[self._ordered_list_nesting % len(_ordered_list_styles)]
+        self._ordered_list_nesting += 1
+        return f'<div class="orderedlist"><ol class="orderedlist {extra}" {start} type="{style}">'
+    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        self._ordered_list_nesting -= 1;
+        return "</ol></div>"
+    def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        if id := token.attrs.get('id'):
+            return f'<a id="{escape(cast(str, id), True)}" />'
+        return ""
+    def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return ""
+
+    def _make_hN(self, level: int) -> tuple[str, str]:
+        return f"h{min(6, max(1, level + self._hlevel_offset))}", ""
+
+    def _maybe_close_partintro(self) -> str:
+        if self._headings:
+            heading = self._headings[-1]
+            if heading.container_tag == 'part' and not heading.partintro_closed:
+                self._headings[-1] = heading._replace(partintro_closed=True)
+                return heading.toc_fragment + "</div>"
+        return ""
+
+    def _close_headings(self, level: Optional[int]) -> str:
+        result = []
+        while len(self._headings) and (level is None or self._headings[-1].level >= level):
+            result.append(self._maybe_close_partintro())
+            result.append("</div>")
+            self._headings.pop()
+        return "\n".join(result)
+
+    def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "section"
+    def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
+        return ""
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py
index 1b796d9f04861..a01aa1b4634b4 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py
@@ -1,4 +1,4 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
 from dataclasses import dataclass
 from typing import Any, cast, Iterable, Optional
 
@@ -6,7 +6,6 @@ import re
 
 import markdown_it
 from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
 
 from .md import Renderer
 
@@ -75,8 +74,6 @@ class List:
 # horizontal motion in a line) we do attempt to copy the style of mdoc(7) semantic requests
 # as appropriate for each markup element.
 class ManpageRenderer(Renderer):
-    __output__ = "man"
-
     # whether to emit mdoc .Ql equivalents for inline code or just the contents. this is
     # mainly used by the options manpage converter to not emit extra quotes in defaults
     # and examples where it's already clear from context that the following text is code.
@@ -90,9 +87,8 @@ class ManpageRenderer(Renderer):
     _list_stack: list[List]
     _font_stack: list[str]
 
-    def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str],
-                 parser: Optional[markdown_it.MarkdownIt] = None):
-        super().__init__(manpage_urls, parser)
+    def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str]):
+        super().__init__(manpage_urls)
         self._href_targets = href_targets
         self._link_stack = []
         self._do_parbreak_stack = []
@@ -126,36 +122,27 @@ class ManpageRenderer(Renderer):
         self._leave_block()
         return ".RE"
 
-    def render(self, tokens: Sequence[Token], options: OptionsDict,
-               env: MutableMapping[str, Any]) -> str:
+    def render(self, tokens: Sequence[Token]) -> str:
         self._do_parbreak_stack = [ False ]
         self._font_stack = [ "\\fR" ]
-        return super().render(tokens, options, env)
+        return super().render(tokens)
 
-    def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-             env: MutableMapping[str, Any]) -> str:
+    def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return man_escape(token.content)
-    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._maybe_parbreak()
-    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ".br"
-    def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return " "
-    def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         s = _protect_spaces(man_escape(token.content))
         return f"\\fR\\(oq{s}\\(cq\\fP" if self.inline_code_is_quoted else s
-    def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
-        return self.fence(token, tokens, i, options, env)
-    def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return self.fence(token, tokens, i)
+    def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         href = cast(str, token.attrs['href'])
         self._link_stack.append(href)
         text = ""
@@ -164,8 +151,7 @@ class ManpageRenderer(Renderer):
             text = self._href_targets[href]
         self._font_stack.append("\\fB")
         return f"\\fB{text}\0 <"
-    def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         href = self._link_stack.pop()
         text = ""
         if self.link_footnotes is not None:
@@ -177,8 +163,7 @@ class ManpageRenderer(Renderer):
             text = "\\fR" + man_escape(f"[{idx}]")
         self._font_stack.pop()
         return f">\0 {text}{self._font_stack[-1]}"
-    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._enter_block()
         lst = self._list_stack[-1]
         maybe_space = '' if lst.compact or not lst.first_item_seen else '.sp\n'
@@ -192,36 +177,28 @@ class ManpageRenderer(Renderer):
             f'.RS {lst.width}\n'
             f"\\h'-{len(head) + 1}'\\fB{man_escape(head)}\\fP\\h'1'\\c"
         )
-    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return ".RE"
-    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.append(List(width=4, compact=bool(token.meta['compact'])))
         return self._maybe_parbreak()
-    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.pop()
         return ""
-    def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._font_stack.append("\\fI")
         return "\\fI"
-    def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._font_stack.pop()
         return self._font_stack[-1]
-    def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._font_stack.append("\\fB")
         return "\\fB"
-    def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._font_stack.pop()
         return self._font_stack[-1]
-    def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-              env: MutableMapping[str, Any]) -> str:
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         s = man_escape(token.content).rstrip('\n')
         return (
             '.sp\n'
@@ -231,8 +208,7 @@ class ManpageRenderer(Renderer):
             '.fi\n'
             '.RE'
         )
-    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         maybe_par = self._maybe_parbreak("\n")
         self._enter_block()
         return (
@@ -240,62 +216,44 @@ class ManpageRenderer(Renderer):
             ".RS 4\n"
             f"\\h'-3'\\fI\\(lq\\(rq\\fP\\h'1'\\c"
         )
-    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return ".RE"
-    def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open("Note")
-    def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open( "Caution")
-    def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open( "Important")
-    def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open( "Tip")
-    def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_open( "Warning")
-    def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return self._admonition_close()
-    def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ".RS 4"
-    def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ".RE"
-    def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ".PP"
-    def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._enter_block()
         return ".RS 4"
-    def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._leave_block()
         return ".RE"
-    def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         if token.meta['name'] in [ 'command', 'env', 'option' ]:
             return f'\\fB{man_escape(token.content)}\\fP'
         elif token.meta['name'] in [ 'file', 'var' ]:
@@ -306,23 +264,18 @@ class ManpageRenderer(Renderer):
             return f'\\fB{man_escape(page)}\\fP\\fR({man_escape(section)})\\fP'
         else:
             raise NotImplementedError("md node not supported yet", token)
-    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         # mdoc knows no anchors so we can drop those, but classes must be rejected.
         if 'class' in token.attrs:
-            return super().attr_span_begin(token, tokens, i, options, env)
+            return super().attr_span_begin(token, tokens, i)
         return ""
-    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         return ""
-    def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported in manpages", token)
-    def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported in manpages", token)
-    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         # max item head width for a number, a dot, and one leading space and one trailing space
         width = 3 + len(str(cast(int, token.meta['end'])))
         self._list_stack.append(
@@ -330,7 +283,6 @@ class ManpageRenderer(Renderer):
                  next_idx = cast(int, token.attrs.get('start', 1)),
                  compact  = bool(token.meta['compact'])))
         return self._maybe_parbreak()
-    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                           env: MutableMapping[str, Any]) -> str:
+    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         self._list_stack.pop()
         return ""
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
index efc8b02e8d6b2..40dea3c7d1d85 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
@@ -1,160 +1,85 @@
 import argparse
+import html
 import json
+import re
+import xml.sax.saxutils as xml
 
 from abc import abstractmethod
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
 from pathlib import Path
-from typing import Any, cast, NamedTuple, Optional, Union
-from xml.sax.saxutils import escape, quoteattr
+from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple, Optional, Union
 
 import markdown_it
 from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
 
-from . import options
-from .docbook import DocBookRenderer, Heading
-from .md import Converter
+from . import md, options
+from .docbook import DocBookRenderer, Heading, make_xml_id
+from .html import HTMLRenderer, UnresolvedXrefError
+from .manual_structure import check_structure, FragmentType, is_include, TocEntry, TocEntryType, XrefTarget
+from .md import Converter, Renderer
+from .utils import Freezeable
 
-class ManualDocBookRenderer(DocBookRenderer):
-    _toplevel_tag: str
-
-    def __init__(self, toplevel_tag: str, manpage_urls: Mapping[str, str],
-                 parser: Optional[markdown_it.MarkdownIt] = None):
-        super().__init__(manpage_urls, parser)
-        self._toplevel_tag = toplevel_tag
-        self.rules |= {
-            'included_sections': lambda *args: self._included_thing("section", *args),
-            'included_chapters': lambda *args: self._included_thing("chapter", *args),
-            'included_preface': lambda *args: self._included_thing("preface", *args),
-            'included_parts': lambda *args: self._included_thing("part", *args),
-            'included_appendix': lambda *args: self._included_thing("appendix", *args),
-            'included_options': self.included_options,
-        }
-
-    def render(self, tokens: Sequence[Token], options: OptionsDict,
-               env: MutableMapping[str, Any]) -> str:
-        wanted = { 'h1': 'title' }
-        wanted |= { 'h2': 'subtitle' } if self._toplevel_tag == 'book' else {}
-        for (i, (tag, kind)) in enumerate(wanted.items()):
-            if len(tokens) < 3 * (i + 1):
-                raise RuntimeError(f"missing {kind} ({tag}) heading")
-            token = tokens[3 * i]
-            if token.type != 'heading_open' or token.tag != tag:
-                assert token.map
-                raise RuntimeError(f"expected {kind} ({tag}) heading in line {token.map[0] + 1}", token)
-        for t in tokens[3 * len(wanted):]:
-            if t.type != 'heading_open' or (info := wanted.get(t.tag)) is None:
-                continue
-            assert t.map
-            raise RuntimeError(
-                f"only one {info[0]} heading ({t.markup} [text...]) allowed per "
-                f"{self._toplevel_tag}, but found a second in lines [{t.map[0] + 1}..{t.map[1]}]. "
-                "please remove all such headings except the first or demote the subsequent headings.",
-                t)
-
-        # books get special handling because they have *two* title tags. doing this with
-        # generic code is more complicated than it's worth. the checks above have verified
-        # that both titles actually exist.
-        if self._toplevel_tag == 'book':
-            assert tokens[1].children
-            assert tokens[4].children
-            if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
-                maybe_id = "xml:id=" + quoteattr(maybe_id)
-            return (f'<book xmlns="http://docbook.org/ns/docbook"'
-                    f'      xmlns:xlink="http://www.w3.org/1999/xlink"'
-                    f'      {maybe_id} version="5.0">'
-                    f'  <title>{self.renderInline(tokens[1].children, options, env)}</title>'
-                    f'  <subtitle>{self.renderInline(tokens[4].children, options, env)}</subtitle>'
-                    f'  {super().render(tokens[6:], options, env)}'
-                    f'</book>')
-
-        return super().render(tokens, options, env)
-
-    def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
-        (tag, attrs) = super()._heading_tag(token, tokens, i, options, env)
-        # render() has already verified that we don't have supernumerary headings and since the
-        # book tag is handled specially we can leave the check this simple
-        if token.tag != 'h1':
-            return (tag, attrs)
-        return (self._toplevel_tag, attrs | {
-            'xmlns': "http://docbook.org/ns/docbook",
-            'xmlns:xlink': "http://www.w3.org/1999/xlink",
-        })
-
-    def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int,
-                        options: OptionsDict, env: MutableMapping[str, Any]) -> str:
-        result = []
-        # close existing partintro. the generic render doesn't really need this because
-        # it doesn't have a concept of structure in the way the manual does.
-        if self._headings and self._headings[-1] == Heading('part', 1):
-            result.append("</partintro>")
-            self._headings[-1] = self._headings[-1]._replace(partintro_closed=True)
-        # must nest properly for structural includes. this requires saving at least
-        # the headings stack, but creating new renderers is cheap and much easier.
-        r = ManualDocBookRenderer(tag, self._manpage_urls, None)
-        for (included, path) in token.meta['included']:
-            try:
-                result.append(r.render(included, options, env))
-            except Exception as e:
-                raise RuntimeError(f"rendering {path}") from e
-        return "".join(result)
-    def included_options(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
-        return cast(str, token.meta['rendered-options'])
-
-    # TODO minimize docbook diffs with existing conversions. remove soon.
-    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
-        return super().paragraph_open(token, tokens, i, options, env) + "\n "
-    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
-        return "\n" + super().paragraph_close(token, tokens, i, options, env)
-    def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
-        return f"<programlisting>\n{escape(token.content)}</programlisting>"
-    def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-              env: MutableMapping[str, Any]) -> str:
-        info = f" language={quoteattr(token.info)}" if token.info != "" else ""
-        return f"<programlisting{info}>\n{escape(token.content)}</programlisting>"
-
-class DocBookConverter(Converter):
-    def __renderer__(self, manpage_urls: Mapping[str, str],
-                     parser: Optional[markdown_it.MarkdownIt]) -> ManualDocBookRenderer:
-        return ManualDocBookRenderer('book', manpage_urls, parser)
+class BaseConverter(Converter[md.TR], Generic[md.TR]):
+    # per-converter configuration for ns:arg=value arguments to include blocks, following
+    # the include type. html converters need something like this to support chunking, or
+    # another external method like the chunktocs docbook uses (but block options seem like
+    # a much nicer of doing this).
+    INCLUDE_ARGS_NS: ClassVar[str]
+    INCLUDE_FRAGMENT_ALLOWED_ARGS: ClassVar[set[str]] = set()
+    INCLUDE_OPTIONS_ALLOWED_ARGS: ClassVar[set[str]] = set()
 
     _base_paths: list[Path]
-    _revision: str
-
-    def __init__(self, manpage_urls: Mapping[str, str], revision: str):
-        super().__init__(manpage_urls)
-        self._revision = revision
+    _current_type: list[TocEntryType]
 
-    def convert(self, file: Path) -> str:
-        self._base_paths = [ file ]
+    def convert(self, infile: Path, outfile: Path) -> None:
+        self._base_paths = [ infile ]
+        self._current_type = ['book']
         try:
-            with open(file, 'r') as f:
-                return self._render(f.read())
+            tokens = self._parse(infile.read_text())
+            self._postprocess(infile, outfile, tokens)
+            converted = self._renderer.render(tokens)
+            outfile.write_text(converted)
         except Exception as e:
-            raise RuntimeError(f"failed to render manual {file}") from e
+            raise RuntimeError(f"failed to render manual {infile}") from e
+
+    def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
+        pass
 
-    def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]:
-        tokens = super()._parse(src, env)
+    def _parse(self, src: str) -> list[Token]:
+        tokens = super()._parse(src)
+        check_structure(self._current_type[-1], tokens)
         for token in tokens:
-            if token.type != "fence" or not token.info.startswith("{=include=} "):
+            if not is_include(token):
+                continue
+            directive = token.info[12:].split()
+            if not directive:
                 continue
-            typ = token.info[12:].strip()
+            args = { k: v for k, _sep, v in map(lambda s: s.partition('='), directive[1:]) }
+            typ = directive[0]
             if typ == 'options':
                 token.type = 'included_options'
-                self._parse_options(token)
-            elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]:
-                token.type = 'included_' + typ
-                self._parse_included_blocks(token, env)
+                self._process_include_args(token, args, self.INCLUDE_OPTIONS_ALLOWED_ARGS)
+                self._parse_options(token, args)
             else:
-                raise RuntimeError(f"unsupported structural include type '{typ}'")
+                fragment_type = typ.removesuffix('s')
+                if fragment_type not in get_args(FragmentType):
+                    raise RuntimeError(f"unsupported structural include type '{typ}'")
+                self._current_type.append(cast(FragmentType, fragment_type))
+                token.type = 'included_' + typ
+                self._process_include_args(token, args, self.INCLUDE_FRAGMENT_ALLOWED_ARGS)
+                self._parse_included_blocks(token, args)
+                self._current_type.pop()
         return tokens
 
-    def _parse_included_blocks(self, token: Token, env: Optional[MutableMapping[str, Any]]) -> None:
+    def _process_include_args(self, token: Token, args: dict[str, str], allowed: set[str]) -> None:
+        ns = self.INCLUDE_ARGS_NS + ":"
+        args = { k[len(ns):]: v for k, v in args.items() if k.startswith(ns) }
+        if unknown := set(args.keys()) - allowed:
+            assert token.map
+            raise RuntimeError(f"unrecognized include argument in line {token.map[0] + 1}", unknown)
+        token.meta['include-args'] = args
+
+    def _parse_included_blocks(self, token: Token, block_args: dict[str, str]) -> None:
         assert token.map
         included = token.meta['included'] = []
         for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
@@ -165,13 +90,13 @@ class DocBookConverter(Converter):
             try:
                 self._base_paths.append(path)
                 with open(path, 'r') as f:
-                    tokens = self._parse(f.read(), env)
+                    tokens = self._parse(f.read())
                     included.append((tokens, path))
                 self._base_paths.pop()
             except Exception as e:
                 raise RuntimeError(f"processing included file {path} from line {lnum}") from e
 
-    def _parse_options(self, token: Token) -> None:
+    def _parse_options(self, token: Token, block_args: dict[str, str]) -> None:
         assert token.map
 
         items = {}
@@ -194,14 +119,479 @@ class DocBookConverter(Converter):
                 " ".join(items.keys()))
 
         try:
-            conv = options.DocBookConverter(
-                self._manpage_urls, self._revision, False, 'fragment', varlist_id, id_prefix)
             with open(self._base_paths[-1].parent / source, 'r') as f:
-                conv.add_options(json.load(f))
-            token.meta['rendered-options'] = conv.finalize(fragment=True)
+                token.meta['id-prefix'] = id_prefix
+                token.meta['list-id'] = varlist_id
+                token.meta['source'] = json.load(f)
         except Exception as e:
             raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e
 
+class RendererMixin(Renderer):
+    _toplevel_tag: str
+    _revision: str
+
+    def __init__(self, toplevel_tag: str, revision: str, *args: Any, **kwargs: Any):
+        super().__init__(*args, **kwargs)
+        self._toplevel_tag = toplevel_tag
+        self._revision = revision
+        self.rules |= {
+            'included_sections': lambda *args: self._included_thing("section", *args),
+            'included_chapters': lambda *args: self._included_thing("chapter", *args),
+            'included_preface': lambda *args: self._included_thing("preface", *args),
+            'included_parts': lambda *args: self._included_thing("part", *args),
+            'included_appendix': lambda *args: self._included_thing("appendix", *args),
+            'included_options': self.included_options,
+        }
+
+    def render(self, tokens: Sequence[Token]) -> str:
+        # books get special handling because they have *two* title tags. doing this with
+        # generic code is more complicated than it's worth. the checks above have verified
+        # that both titles actually exist.
+        if self._toplevel_tag == 'book':
+            return self._render_book(tokens)
+
+        return super().render(tokens)
+
+    @abstractmethod
+    def _render_book(self, tokens: Sequence[Token]) -> str:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        raise NotImplementedError()
+
+class ManualDocBookRenderer(RendererMixin, DocBookRenderer):
+    def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]):
+        super().__init__(toplevel_tag, revision, manpage_urls)
+
+    def _render_book(self, tokens: Sequence[Token]) -> str:
+        assert tokens[1].children
+        assert tokens[4].children
+        if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
+            maybe_id = "xml:id=" + xml.quoteattr(maybe_id)
+        return (f'<book xmlns="http://docbook.org/ns/docbook"'
+                f'      xmlns:xlink="http://www.w3.org/1999/xlink"'
+                f'      {maybe_id} version="5.0">'
+                f'  <title>{self.renderInline(tokens[1].children)}</title>'
+                f'  <subtitle>{self.renderInline(tokens[4].children)}</subtitle>'
+                f'  {super(DocBookRenderer, self).render(tokens[6:])}'
+                f'</book>')
+
+    def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
+        (tag, attrs) = super()._heading_tag(token, tokens, i)
+        # render() has already verified that we don't have supernumerary headings and since the
+        # book tag is handled specially we can leave the check this simple
+        if token.tag != 'h1':
+            return (tag, attrs)
+        return (self._toplevel_tag, attrs | {
+            'xmlns': "http://docbook.org/ns/docbook",
+            'xmlns:xlink': "http://www.w3.org/1999/xlink",
+        })
+
+    def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
+        result = []
+        # close existing partintro. the generic render doesn't really need this because
+        # it doesn't have a concept of structure in the way the manual does.
+        if self._headings and self._headings[-1] == Heading('part', 1):
+            result.append("</partintro>")
+            self._headings[-1] = self._headings[-1]._replace(partintro_closed=True)
+        # must nest properly for structural includes. this requires saving at least
+        # the headings stack, but creating new renderers is cheap and much easier.
+        r = ManualDocBookRenderer(tag, self._revision, self._manpage_urls)
+        for (included, path) in token.meta['included']:
+            try:
+                result.append(r.render(included))
+            except Exception as e:
+                raise RuntimeError(f"rendering {path}") from e
+        return "".join(result)
+    def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        conv = options.DocBookConverter(self._manpage_urls, self._revision, False, 'fragment',
+                                        token.meta['list-id'], token.meta['id-prefix'])
+        conv.add_options(token.meta['source'])
+        return conv.finalize(fragment=True)
+
+    # TODO minimize docbook diffs with existing conversions. remove soon.
+    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return super().paragraph_open(token, tokens, i) + "\n "
+    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return "\n" + super().paragraph_close(token, tokens, i)
+    def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return f"<programlisting>\n{xml.escape(token.content)}</programlisting>"
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        info = f" language={xml.quoteattr(token.info)}" if token.info != "" else ""
+        return f"<programlisting{info}>\n{xml.escape(token.content)}</programlisting>"
+
+class DocBookConverter(BaseConverter[ManualDocBookRenderer]):
+    INCLUDE_ARGS_NS = "docbook"
+
+    def __init__(self, manpage_urls: Mapping[str, str], revision: str):
+        super().__init__()
+        self._renderer = ManualDocBookRenderer('book', revision, manpage_urls)
+
+
+class HTMLParameters(NamedTuple):
+    generator: str
+    stylesheets: Sequence[str]
+    scripts: Sequence[str]
+    toc_depth: int
+    chunk_toc_depth: int
+
+class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
+    _base_path: Path
+    _html_params: HTMLParameters
+
+    def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters,
+                 manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget],
+                 base_path: Path):
+        super().__init__(toplevel_tag, revision, manpage_urls, xref_targets)
+        self._base_path, self._html_params = base_path, html_params
+
+    def _push(self, tag: str, hlevel_offset: int) -> Any:
+        result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset)
+        self._hlevel_offset += hlevel_offset
+        self._toplevel_tag, self._headings, self._attrspans = tag, [], []
+        return result
+
+    def _pop(self, state: Any) -> None:
+        (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) = state
+
+    def _render_book(self, tokens: Sequence[Token]) -> str:
+        assert tokens[4].children
+        title_id = cast(str, tokens[0].attrs.get('id', ""))
+        title = self._xref_targets[title_id].title
+        # subtitles don't have IDs, so we can't use xrefs to get them
+        subtitle = self.renderInline(tokens[4].children)
+
+        toc = TocEntry.of(tokens[0])
+        return "\n".join([
+            self._file_header(toc),
+            ' <div class="book">',
+            '  <div class="titlepage">',
+            '   <div>',
+            f'   <div><h1 class="title"><a id="{html.escape(title_id, True)}"></a>{title}</h1></div>',
+            f'   <div><h2 class="subtitle">{subtitle}</h2></div>',
+            '   </div>',
+            "   <hr />",
+            '  </div>',
+            self._build_toc(tokens, 0),
+            super(HTMLRenderer, self).render(tokens[6:]),
+            ' </div>',
+            self._file_footer(toc),
+        ])
+
+    def _file_header(self, toc: TocEntry) -> str:
+        prev_link, up_link, next_link = "", "", ""
+        prev_a, next_a, parent_title = "", "", "&nbsp;"
+        home = toc.root
+        if toc.prev:
+            prev_link = f'<link rel="prev" href="{toc.prev.target.href()}" title="{toc.prev.target.title}" />'
+            prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>'
+        if toc.parent:
+            up_link = (
+                f'<link rel="up" href="{toc.parent.target.href()}" '
+                f'title="{toc.parent.target.title}" />'
+            )
+            if (part := toc.parent) and part.kind != 'book':
+                assert part.target.title
+                parent_title = part.target.title
+        if toc.next:
+            next_link = f'<link rel="next" href="{toc.next.target.href()}" title="{toc.next.target.title}" />'
+            next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
+        return "\n".join([
+            '<?xml version="1.0" encoding="utf-8" standalone="no"?>',
+            '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"',
+            '  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">',
+            '<html xmlns="http://www.w3.org/1999/xhtml">',
+            ' <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />',
+            f' <title>{toc.target.title}</title>',
+            "".join((f'<link rel="stylesheet" type="text/css" href="{html.escape(style, True)}" />'
+                     for style in self._html_params.stylesheets)),
+            "".join((f'<script src="{html.escape(script, True)}" type="text/javascript"></script>'
+                     for script in self._html_params.scripts)),
+            f' <meta name="generator" content="{html.escape(self._html_params.generator, True)}" />',
+            f' <link rel="home" href="{home.target.href()}" title="{home.target.title}" />',
+            f' {up_link}{prev_link}{next_link}',
+            ' </head>',
+            ' <body>',
+            '  <div class="navheader">',
+            '   <table width="100%" summary="Navigation header">',
+            '    <tr>',
+            f'    <th colspan="3" align="center">{toc.target.title}</th>',
+            '    </tr>',
+            '    <tr>',
+            f'    <td width="20%" align="left">{prev_a}&nbsp;</td>',
+            f'    <th width="60%" align="center">{parent_title}</th>',
+            f'    <td width="20%" align="right">&nbsp;{next_a}</td>',
+            '    </tr>',
+            '   </table>',
+            '   <hr />',
+            '  </div>',
+        ])
+
+    def _file_footer(self, toc: TocEntry) -> str:
+        # prev, next = self._get_prev_and_next()
+        prev_a, up_a, home_a, next_a = "", "&nbsp;", "&nbsp;", ""
+        prev_text, up_text, next_text = "", "", ""
+        home = toc.root
+        if toc.prev:
+            prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>'
+            assert toc.prev.target.title
+            prev_text = toc.prev.target.title
+        if toc.parent:
+            home_a = f'<a accesskey="h" href="{home.target.href()}">Home</a>'
+            if toc.parent != home:
+                up_a = f'<a accesskey="u" href="{toc.parent.target.href()}">Up</a>'
+        if toc.next:
+            next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
+            assert toc.next.target.title
+            next_text = toc.next.target.title
+        return "\n".join([
+            '  <div class="navfooter">',
+            '   <hr />',
+            '   <table width="100%" summary="Navigation footer">',
+            '    <tr>',
+            f'    <td width="40%" align="left">{prev_a}&nbsp;</td>',
+            f'    <td width="20%" align="center">{up_a}</td>',
+            f'    <td width="40%" align="right">&nbsp;{next_a}</td>',
+            '    </tr>',
+            '    <tr>',
+            f'     <td width="40%" align="left" valign="top">{prev_text}&nbsp;</td>',
+            f'     <td width="20%" align="center">{home_a}</td>',
+            f'     <td width="40%" align="right" valign="top">&nbsp;{next_text}</td>',
+            '    </tr>',
+            '   </table>',
+            '  </div>',
+            ' </body>',
+            '</html>',
+        ])
+
+    def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        if token.tag == 'h1':
+            return self._toplevel_tag
+        return super()._heading_tag(token, tokens, i)
+    def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
+        toc = TocEntry.of(tokens[i])
+        if toc.kind == 'section':
+            return ""
+        def walk_and_emit(toc: TocEntry, depth: int) -> list[str]:
+            if depth <= 0:
+                return []
+            result = []
+            for child in toc.children:
+                result.append(
+                    f'<dt>'
+                    f' <span class="{html.escape(child.kind, True)}">'
+                    f'  <a href="{child.target.href()}">{child.target.toc_html}</a>'
+                    f' </span>'
+                    f'</dt>'
+                )
+                # we want to look straight through parts because docbook-xsl does too, but it
+                # also makes for more uesful top-level tocs.
+                next_level = walk_and_emit(child, depth - (0 if child.kind == 'part' else 1))
+                if next_level:
+                    result.append(f'<dd><dl>{"".join(next_level)}</dl></dd>')
+            return result
+        toc_depth = (
+            self._html_params.chunk_toc_depth
+            if toc.starts_new_chunk and toc.kind != 'book'
+            else self._html_params.toc_depth
+        )
+        if not (items := walk_and_emit(toc, toc_depth)):
+            return ""
+        return (
+            f'<div class="toc">'
+            f' <p><strong>Table of Contents</strong></p>'
+            f' <dl class="toc">'
+            f'  {"".join(items)}'
+            f' </dl>'
+            f'</div>'
+        )
+
+    def _make_hN(self, level: int) -> tuple[str, str]:
+        # for some reason chapters don't increase the hN nesting count in docbook xslts. duplicate
+        # this for consistency.
+        if self._toplevel_tag == 'chapter':
+            level -= 1
+        # TODO docbook compat. these are never useful for us, but not having them breaks manual
+        # compare workflows while docbook is still allowed.
+        style = ""
+        if level + self._hlevel_offset < 3 \
+           and (self._toplevel_tag == 'section' or (self._toplevel_tag == 'chapter' and level > 0)):
+            style = "clear: both"
+        tag, hstyle = super()._make_hN(max(1, level))
+        return tag, style
+
+    def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
+        outer, inner = [], []
+        # since books have no non-include content the toplevel book wrapper will not count
+        # towards nesting depth. other types will have at least a title+id heading which
+        # *does* count towards the nesting depth. chapters give a -1 to included sections
+        # mirroring the special handing in _make_hN. sigh.
+        hoffset = (
+            0 if not self._headings
+            else self._headings[-1].level - 1 if self._toplevel_tag == 'chapter'
+            else self._headings[-1].level
+        )
+        outer.append(self._maybe_close_partintro())
+        into = token.meta['include-args'].get('into-file')
+        fragments = token.meta['included']
+        state = self._push(tag, hoffset)
+        if into:
+            toc = TocEntry.of(fragments[0][0][0])
+            inner.append(self._file_header(toc))
+            # we do not set _hlevel_offset=0 because docbook doesn't either.
+        else:
+            inner = outer
+        for included, path in fragments:
+            try:
+                inner.append(self.render(included))
+            except Exception as e:
+                raise RuntimeError(f"rendering {path}") from e
+        if into:
+            inner.append(self._file_footer(toc))
+            (self._base_path / into).write_text("".join(inner))
+        self._pop(state)
+        return "".join(outer)
+
+    def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        conv = options.HTMLConverter(self._manpage_urls, self._revision, False,
+                                     token.meta['list-id'], token.meta['id-prefix'],
+                                     self._xref_targets)
+        conv.add_options(token.meta['source'])
+        return conv.finalize()
+
+def _to_base26(n: int) -> str:
+    return (_to_base26(n // 26) if n > 26 else "") + chr(ord("A") + n % 26)
+
+class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
+    INCLUDE_ARGS_NS = "html"
+    INCLUDE_FRAGMENT_ALLOWED_ARGS = { 'into-file' }
+
+    _revision: str
+    _html_params: HTMLParameters
+    _manpage_urls: Mapping[str, str]
+    _xref_targets: dict[str, XrefTarget]
+    _redirection_targets: set[str]
+    _appendix_count: int = 0
+
+    def _next_appendix_id(self) -> str:
+        self._appendix_count += 1
+        return _to_base26(self._appendix_count - 1)
+
+    def __init__(self, revision: str, html_params: HTMLParameters, manpage_urls: Mapping[str, str]):
+        super().__init__()
+        self._revision, self._html_params, self._manpage_urls = revision, html_params, manpage_urls
+        self._xref_targets = {}
+        self._redirection_targets = set()
+        # renderer not set on purpose since it has a dependency on the output path!
+
+    def convert(self, infile: Path, outfile: Path) -> None:
+        self._renderer = ManualHTMLRenderer('book', self._revision, self._html_params,
+                                            self._manpage_urls, self._xref_targets, outfile.parent)
+        super().convert(infile, outfile)
+
+    def _parse(self, src: str) -> list[Token]:
+        tokens = super()._parse(src)
+        for token in tokens:
+            if not token.type.startswith('included_') \
+               or not (into := token.meta['include-args'].get('into-file')):
+                continue
+            assert token.map
+            if len(token.meta['included']) == 0:
+                raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is empty!")
+            # we use blender-style //path to denote paths relative to the origin file
+            # (usually index.html). this makes everything a lot easier and clearer.
+            if not into.startswith("//") or '/' in into[2:]:
+                raise RuntimeError(f"html:into-file must be a relative-to-origin //filename", into)
+            into = token.meta['include-args']['into-file'] = into[2:]
+            if into in self._redirection_targets:
+                raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is already in use")
+            self._redirection_targets.add(into)
+        return tokens
+
+    # xref | (id, type, heading inlines, file, starts new file)
+    def _collect_ids(self, tokens: Sequence[Token], target_file: str, typ: str, file_changed: bool
+                     ) -> list[XrefTarget | tuple[str, str, Token, str, bool]]:
+        result: list[XrefTarget | tuple[str, str, Token, str, bool]] = []
+        # collect all IDs and their xref substitutions. headings are deferred until everything
+        # has been parsed so we can resolve links in headings. if that's even used anywhere.
+        for (i, bt) in enumerate(tokens):
+            if bt.type == 'heading_open' and (id := cast(str, bt.attrs.get('id', ''))):
+                result.append((id, typ if bt.tag == 'h1' else 'section', tokens[i + 1], target_file,
+                               i == 0 and file_changed))
+            elif bt.type == 'included_options':
+                id_prefix = bt.meta['id-prefix']
+                for opt in bt.meta['source'].keys():
+                    id = make_xml_id(f"{id_prefix}{opt}")
+                    name = html.escape(opt)
+                    result.append(XrefTarget(id, f'<code class="option">{name}</code>', name, None, target_file))
+            elif bt.type.startswith('included_'):
+                sub_file = bt.meta['include-args'].get('into-file', target_file)
+                subtyp = bt.type.removeprefix('included_').removesuffix('s')
+                for si, (sub, _path) in enumerate(bt.meta['included']):
+                    result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file)
+            elif bt.type == 'inline':
+                assert bt.children
+                result += self._collect_ids(bt.children, target_file, typ, False)
+            elif id := cast(str, bt.attrs.get('id', '')):
+                # anchors and examples have no titles we could use, but we'll have to put
+                # *something* here to communicate that there's no title.
+                result.append(XrefTarget(id, "???", None, None, target_file))
+        return result
+
+    def _render_xref(self, id: str, typ: str, inlines: Token, path: str, drop_fragment: bool) -> XrefTarget:
+        assert inlines.children
+        title_html = self._renderer.renderInline(inlines.children)
+        if typ == 'appendix':
+            # NOTE the docbook compat is strong here
+            n = self._next_appendix_id()
+            prefix = f"Appendix\u00A0{n}.\u00A0"
+            # HACK for docbook compat: prefix the title inlines with appendix id if
+            # necessary. the alternative is to mess with titlepage rendering in headings,
+            # which seems just a lot worse than this
+            prefix_tokens = [Token(type='text', tag='', nesting=0, content=prefix)]
+            inlines.children = prefix_tokens + list(inlines.children)
+            title = prefix + title_html
+            toc_html = f"{n}. {title_html}"
+            title_html = f"Appendix&nbsp;{n}"
+        else:
+            toc_html, title = title_html, title_html
+            title_html = (
+                f"<em>{title_html}</em>"
+                if typ == 'chapter'
+                else title_html if typ in [ 'book', 'part' ]
+                else f'the section called “{title_html}”'
+            )
+        return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment)
+
+    def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
+        xref_queue = self._collect_ids(tokens, outfile.name, 'book', True)
+
+        failed = False
+        deferred = []
+        while xref_queue:
+            for item in xref_queue:
+                try:
+                    target = item if isinstance(item, XrefTarget) else self._render_xref(*item)
+                except UnresolvedXrefError as e:
+                    if failed:
+                        raise
+                    deferred.append(item)
+                    continue
+
+                if target.id in self._xref_targets:
+                    raise RuntimeError(f"found duplicate id #{target.id}")
+                self._xref_targets[target.id] = target
+            if len(deferred) == len(xref_queue):
+                failed = True # do another round and report the first error
+            xref_queue = deferred
+
+        TocEntry.collect_and_link(self._xref_targets, tokens)
+
 
 
 def _build_cli_db(p: argparse.ArgumentParser) -> None:
@@ -210,18 +600,40 @@ def _build_cli_db(p: argparse.ArgumentParser) -> None:
     p.add_argument('infile', type=Path)
     p.add_argument('outfile', type=Path)
 
+def _build_cli_html(p: argparse.ArgumentParser) -> None:
+    p.add_argument('--manpage-urls', required=True)
+    p.add_argument('--revision', required=True)
+    p.add_argument('--generator', default='nixos-render-docs')
+    p.add_argument('--stylesheet', default=[], action='append')
+    p.add_argument('--script', default=[], action='append')
+    p.add_argument('--toc-depth', default=1, type=int)
+    p.add_argument('--chunk-toc-depth', default=1, type=int)
+    p.add_argument('infile', type=Path)
+    p.add_argument('outfile', type=Path)
+
 def _run_cli_db(args: argparse.Namespace) -> None:
     with open(args.manpage_urls, 'r') as manpage_urls:
         md = DocBookConverter(json.load(manpage_urls), args.revision)
-        converted = md.convert(args.infile)
-        args.outfile.write_text(converted)
+        md.convert(args.infile, args.outfile)
+
+def _run_cli_html(args: argparse.Namespace) -> None:
+    with open(args.manpage_urls, 'r') as manpage_urls:
+        md = HTMLConverter(
+            args.revision,
+            HTMLParameters(args.generator, args.stylesheet, args.script, args.toc_depth,
+                           args.chunk_toc_depth),
+            json.load(manpage_urls))
+        md.convert(args.infile, args.outfile)
 
 def build_cli(p: argparse.ArgumentParser) -> None:
     formats = p.add_subparsers(dest='format', required=True)
     _build_cli_db(formats.add_parser('docbook'))
+    _build_cli_html(formats.add_parser('html'))
 
 def run_cli(args: argparse.Namespace) -> None:
     if args.format == 'docbook':
         _run_cli_db(args)
+    elif args.format == 'html':
+        _run_cli_html(args)
     else:
         raise RuntimeError('format not hooked up', args)
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py
new file mode 100644
index 0000000000000..c271ca3c5aa5f
--- /dev/null
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py
@@ -0,0 +1,186 @@
+from __future__ import annotations
+
+import dataclasses as dc
+import html
+import itertools
+
+from typing import cast, get_args, Iterable, Literal, Sequence
+
+from markdown_it.token import Token
+
+from .utils import Freezeable
+
+# FragmentType is used to restrict structural include blocks.
+FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix']
+
+# in the TOC all fragments are allowed, plus the all-encompassing book.
+TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix']
+
+def is_include(token: Token) -> bool:
+    return token.type == "fence" and token.info.startswith("{=include=} ")
+
+# toplevel file must contain only the title headings and includes, anything else
+# would cause strange rendering.
+def _check_book_structure(tokens: Sequence[Token]) -> None:
+    for token in tokens[6:]:
+        if not is_include(token):
+            assert token.map
+            raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, "
+                               "expected structural include")
+
+# much like books, parts may not contain headings other than their title heading.
+# this is a limitation of the current renderers and TOC generators that do not handle
+# this case well even though it is supported in docbook (and probably supportable
+# anywhere else).
+def _check_part_structure(tokens: Sequence[Token]) -> None:
+    _check_fragment_structure(tokens)
+    for token in tokens[3:]:
+        if token.type == 'heading_open':
+            assert token.map
+            raise RuntimeError(f"unexpected heading in line {token.map[0] + 1}")
+
+# two include blocks must either be adjacent or separated by a heading, otherwise
+# we cannot generate a correct TOC (since there'd be nothing to link to between
+# the two includes).
+def _check_fragment_structure(tokens: Sequence[Token]) -> None:
+    for i, token in enumerate(tokens):
+        if is_include(token) \
+           and i + 1 < len(tokens) \
+           and not (is_include(tokens[i + 1]) or tokens[i + 1].type == 'heading_open'):
+            assert token.map
+            raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, "
+                               "expected heading or structural include")
+
+def check_structure(kind: TocEntryType, tokens: Sequence[Token]) -> None:
+    wanted = { 'h1': 'title' }
+    wanted |= { 'h2': 'subtitle' } if kind == 'book' else {}
+    for (i, (tag, role)) in enumerate(wanted.items()):
+        if len(tokens) < 3 * (i + 1):
+            raise RuntimeError(f"missing {role} ({tag}) heading")
+        token = tokens[3 * i]
+        if token.type != 'heading_open' or token.tag != tag:
+            assert token.map
+            raise RuntimeError(f"expected {role} ({tag}) heading in line {token.map[0] + 1}", token)
+    for t in tokens[3 * len(wanted):]:
+        if t.type != 'heading_open' or not (role := wanted.get(t.tag, '')):
+            continue
+        assert t.map
+        raise RuntimeError(
+            f"only one {role} heading ({t.markup} [text...]) allowed per "
+            f"{kind}, but found a second in line {t.map[0] + 1}. "
+            "please remove all such headings except the first or demote the subsequent headings.",
+            t)
+
+    last_heading_level = 0
+    for token in tokens:
+        if token.type != 'heading_open':
+            continue
+
+        # book subtitle headings do not need an id, only book title headings do.
+        # every other headings needs one too. we need this to build a TOC and to
+        # provide stable links if the manual changes shape.
+        if 'id' not in token.attrs and (kind != 'book' or token.tag != 'h2'):
+            assert token.map
+            raise RuntimeError(f"heading in line {token.map[0] + 1} does not have an id")
+
+        level = int(token.tag[1:]) # because tag = h1..h6
+        if level > last_heading_level + 1:
+            assert token.map
+            raise RuntimeError(f"heading in line {token.map[0] + 1} skips one or more heading levels, "
+                               "which is currently not allowed")
+        last_heading_level = level
+
+    if kind == 'book':
+        _check_book_structure(tokens)
+    elif kind == 'part':
+        _check_part_structure(tokens)
+    else:
+        _check_fragment_structure(tokens)
+
+@dc.dataclass(frozen=True)
+class XrefTarget:
+    id: str
+    """link label for `[](#local-references)`"""
+    title_html: str
+    """toc label"""
+    toc_html: str | None
+    """text for `<title>` tags and `title="..."` attributes"""
+    title: str | None
+    """path to file that contains the anchor"""
+    path: str
+    """whether to drop the `#anchor` from links when expanding xrefs"""
+    drop_fragment: bool = False
+
+    def href(self) -> str:
+        path = html.escape(self.path, True)
+        return path if self.drop_fragment else f"{path}#{html.escape(self.id, True)}"
+
+@dc.dataclass
+class TocEntry(Freezeable):
+    kind: TocEntryType
+    target: XrefTarget
+    parent: TocEntry | None = None
+    prev: TocEntry | None = None
+    next: TocEntry | None = None
+    children: list[TocEntry] = dc.field(default_factory=list)
+    starts_new_chunk: bool = False
+
+    @property
+    def root(self) -> TocEntry:
+        return self.parent.root if self.parent else self
+
+    @classmethod
+    def of(cls, token: Token) -> TocEntry:
+        entry = token.meta.get('TocEntry')
+        if not isinstance(entry, TocEntry):
+            raise RuntimeError('requested toc entry, none found', token)
+        return entry
+
+    @classmethod
+    def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry:
+        result = cls._collect_entries(xrefs, tokens, 'book')
+
+        def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]:
+            this.parent = parent
+            return itertools.chain([this], *[ flatten_with_parent(c, this) for c in this.children ])
+
+        flat = list(flatten_with_parent(result, None))
+        prev = flat[0]
+        prev.starts_new_chunk = True
+        paths_seen = set([prev.target.path])
+        for c in flat[1:]:
+            if prev.target.path != c.target.path and c.target.path not in paths_seen:
+                c.starts_new_chunk = True
+                c.prev, prev.next = prev, c
+                prev = c
+            paths_seen.add(c.target.path)
+
+        for c in flat:
+            c.freeze()
+
+        return result
+
+    @classmethod
+    def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token],
+                         kind: TocEntryType) -> TocEntry:
+        # we assume that check_structure has been run recursively over the entire input.
+        # list contains (tag, entry) pairs that will collapse to a single entry for
+        # the full sequence.
+        entries: list[tuple[str, TocEntry]] = []
+        for token in tokens:
+            if token.type.startswith('included_') and (included := token.meta.get('included')):
+                fragment_type_str = token.type[9:].removesuffix('s')
+                assert fragment_type_str in get_args(TocEntryType)
+                fragment_type = cast(TocEntryType, fragment_type_str)
+                for fragment, _path in included:
+                    entries[-1][1].children.append(cls._collect_entries(xrefs, fragment, fragment_type))
+            elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))):
+                while len(entries) > 1 and entries[-1][0] >= token.tag:
+                    entries[-2][1].children.append(entries.pop()[1])
+                entries.append((token.tag,
+                                TocEntry(kind if token.tag == 'h1' else 'section', xrefs[id])))
+                token.meta['TocEntry'] = entries[-1][1]
+
+        while len(entries) > 1:
+            entries[-2][1].children.append(entries.pop()[1])
+        return entries[0][1]
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py
index 96cc8af69bce9..e8fee1b713282 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py
@@ -1,6 +1,6 @@
 from abc import ABC
 from collections.abc import Mapping, MutableMapping, Sequence
-from typing import Any, Callable, cast, get_args, Iterable, Literal, NoReturn, Optional
+from typing import Any, Callable, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar
 
 import dataclasses
 import re
@@ -44,11 +44,11 @@ AttrBlockKind = Literal['admonition', 'example']
 
 AdmonitionKind = Literal["note", "caution", "tip", "important", "warning"]
 
-class Renderer(markdown_it.renderer.RendererProtocol):
+class Renderer:
     _admonitions: dict[AdmonitionKind, tuple[RenderFn, RenderFn]]
     _admonition_stack: list[AdmonitionKind]
 
-    def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
+    def __init__(self, manpage_urls: Mapping[str, str]):
         self._manpage_urls = manpage_urls
         self.rules = {
             'text': self.text,
@@ -104,169 +104,120 @@ class Renderer(markdown_it.renderer.RendererProtocol):
     def _join_inline(self, ls: Iterable[str]) -> str:
         return "".join(ls)
 
-    def admonition_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def admonition_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         tag = token.meta['kind']
         self._admonition_stack.append(tag)
-        return self._admonitions[tag][0](token, tokens, i, options, env)
-    def admonition_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
-        return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i, options, env)
+        return self._admonitions[tag][0](token, tokens, i)
+    def admonition_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i)
 
-    def render(self, tokens: Sequence[Token], options: OptionsDict,
-               env: MutableMapping[str, Any]) -> str:
+    def render(self, tokens: Sequence[Token]) -> str:
         def do_one(i: int, token: Token) -> str:
             if token.type == "inline":
                 assert token.children is not None
-                return self.renderInline(token.children, options, env)
+                return self.renderInline(token.children)
             elif token.type in self.rules:
-                return self.rules[token.type](tokens[i], tokens, i, options, env)
+                return self.rules[token.type](tokens[i], tokens, i)
             else:
                 raise NotImplementedError("md token not supported yet", token)
         return self._join_block(map(lambda arg: do_one(*arg), enumerate(tokens)))
-    def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def renderInline(self, tokens: Sequence[Token]) -> str:
         def do_one(i: int, token: Token) -> str:
             if token.type in self.rules:
-                return self.rules[token.type](tokens[i], tokens, i, options, env)
+                return self.rules[token.type](tokens[i], tokens, i)
             else:
                 raise NotImplementedError("md token not supported yet", token)
         return self._join_inline(map(lambda arg: do_one(*arg), enumerate(tokens)))
 
-    def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-             env: MutableMapping[str, Any]) -> str:
+    def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                    env: MutableMapping[str, Any]) -> str:
+    def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-              env: MutableMapping[str, Any]) -> str:
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+    def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                   env: MutableMapping[str, Any]) -> str:
+    def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                       env: MutableMapping[str, Any]) -> str:
+    def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                env: MutableMapping[str, Any]) -> str:
+    def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                 env: MutableMapping[str, Any]) -> str:
+    def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                  env: MutableMapping[str, Any]) -> str:
+    def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                           env: MutableMapping[str, Any]) -> str:
+    def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
-    def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported", token)
 
 def _is_escaped(src: str, pos: int) -> bool:
@@ -466,12 +417,26 @@ def _block_attr(md: markdown_it.MarkdownIt) -> None:
 
     md.core.ruler.push("block_attr", block_attr)
 
-class Converter(ABC):
-    __renderer__: Callable[[Mapping[str, str], markdown_it.MarkdownIt], Renderer]
+TR = TypeVar('TR', bound='Renderer')
 
-    def __init__(self, manpage_urls: Mapping[str, str]):
-        self._manpage_urls = manpage_urls
+class Converter(ABC, Generic[TR]):
+    # we explicitly disable markdown-it rendering support and use our own entirely.
+    # rendering is well separated from parsing and our renderers carry much more state than
+    # markdown-it easily acknowledges as 'good' (unless we used the untyped env args to
+    # shuttle that state around, which is very fragile)
+    class ForbiddenRenderer(markdown_it.renderer.RendererProtocol):
+        __output__ = "none"
+
+        def __init__(self, parser: Optional[markdown_it.MarkdownIt]):
+            pass
+
+        def render(self, tokens: Sequence[Token], options: OptionsDict,
+                   env: MutableMapping[str, Any]) -> str:
+            raise NotImplementedError("do not use Converter._md.renderer. 'tis a silly place")
+
+    _renderer: TR
 
+    def __init__(self) -> None:
         self._md = markdown_it.MarkdownIt(
             "commonmark",
             {
@@ -479,7 +444,7 @@ class Converter(ABC):
                 'html': False,       # not useful since we target many formats
                 'typographer': True, # required for smartquotes
             },
-            renderer_cls=lambda parser: self.__renderer__(self._manpage_urls, parser)
+            renderer_cls=self.ForbiddenRenderer
         )
         self._md.use(
             container_plugin,
@@ -496,10 +461,9 @@ class Converter(ABC):
         self._md.use(_block_attr)
         self._md.enable(["smartquotes", "replacements"])
 
-    def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]:
-        return self._md.parse(src, env if env is not None else {})
+    def _parse(self, src: str) -> list[Token]:
+        return self._md.parse(src, {})
 
-    def _render(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> str:
-        env = {} if env is None else env
-        tokens = self._parse(src, env)
-        return self._md.renderer.render(tokens, self._md.options, env) # type: ignore[no-any-return]
+    def _render(self, src: str) -> str:
+        tokens = self._parse(src)
+        return self._renderer.render(tokens)
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py
index f29d8fdb89682..06e5f97112168 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/options.py
@@ -1,23 +1,26 @@
 from __future__ import annotations
 
 import argparse
+import html
 import json
+import xml.sax.saxutils as xml
 
 from abc import abstractmethod
-from collections.abc import Mapping, MutableMapping, Sequence
-from markdown_it.utils import OptionsDict
+from collections.abc import Mapping, Sequence
 from markdown_it.token import Token
-from typing import Any, Optional
+from typing import Any, Generic, Optional
 from urllib.parse import quote
-from xml.sax.saxutils import escape, quoteattr
 
 import markdown_it
 
+from . import md
 from . import parallel
 from .asciidoc import AsciiDocRenderer, asciidoc_escape
 from .commonmark import CommonMarkRenderer
 from .docbook import DocBookRenderer, make_xml_id
+from .html import HTMLRenderer
 from .manpage import ManpageRenderer, man_escape
+from .manual_structure import XrefTarget
 from .md import Converter, md_escape, md_make_code
 from .types import OptionLoc, Option, RenderedOption
 
@@ -30,15 +33,13 @@ def option_is(option: Option, key: str, typ: str) -> Optional[dict[str, str]]:
         return None
     return option[key] # type: ignore[return-value]
 
-class BaseConverter(Converter):
+class BaseConverter(Converter[md.TR], Generic[md.TR]):
     __option_block_separator__: str
 
     _options: dict[str, RenderedOption]
 
-    def __init__(self, manpage_urls: Mapping[str, str],
-                 revision: str,
-                 markdown_by_default: bool):
-        super().__init__(manpage_urls)
+    def __init__(self, revision: str, markdown_by_default: bool):
+        super().__init__()
         self._options = {}
         self._revision = revision
         self._markdown_by_default = markdown_by_default
@@ -153,7 +154,7 @@ class BaseConverter(Converter):
     # since it's good enough so far.
     @classmethod
     @abstractmethod
-    def _parallel_render_init_worker(cls, a: Any) -> BaseConverter: raise NotImplementedError()
+    def _parallel_render_init_worker(cls, a: Any) -> BaseConverter[md.TR]: raise NotImplementedError()
 
     def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
         try:
@@ -162,7 +163,7 @@ class BaseConverter(Converter):
             raise Exception(f"Failed to render option {name}") from e
 
     @classmethod
-    def _parallel_render_step(cls, s: BaseConverter, a: Any) -> RenderedOption:
+    def _parallel_render_step(cls, s: BaseConverter[md.TR], a: Any) -> RenderedOption:
         return s._render_option(*a)
 
     def add_options(self, options: dict[str, Any]) -> None:
@@ -175,32 +176,25 @@ class BaseConverter(Converter):
     def finalize(self) -> str: raise NotImplementedError()
 
 class OptionDocsRestrictions:
-    def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported in options doc", token)
-    def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                      env: MutableMapping[str, Any]) -> str:
+    def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported in options doc", token)
-    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                        env: MutableMapping[str, Any]) -> str:
+    def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported in options doc", token)
-    def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                     env: MutableMapping[str, Any]) -> str:
+    def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         raise RuntimeError("md token not supported in options doc", token)
 
 class OptionsDocBookRenderer(OptionDocsRestrictions, DocBookRenderer):
     # TODO keep optionsDocBook diff small. remove soon if rendering is still good.
-    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                          env: MutableMapping[str, Any]) -> str:
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         token.meta['compact'] = False
-        return super().ordered_list_open(token, tokens, i, options, env)
-    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
-                         env: MutableMapping[str, Any]) -> str:
+        return super().ordered_list_open(token, tokens, i)
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
         token.meta['compact'] = False
-        return super().bullet_list_open(token, tokens, i, options, env)
+        return super().bullet_list_open(token, tokens, i)
 
-class DocBookConverter(BaseConverter):
-    __renderer__ = OptionsDocBookRenderer
+class DocBookConverter(BaseConverter[OptionsDocBookRenderer]):
     __option_block_separator__ = ""
 
     def __init__(self, manpage_urls: Mapping[str, str],
@@ -209,13 +203,14 @@ class DocBookConverter(BaseConverter):
                  document_type: str,
                  varlist_id: str,
                  id_prefix: str):
-        super().__init__(manpage_urls, revision, markdown_by_default)
+        super().__init__(revision, markdown_by_default)
+        self._renderer = OptionsDocBookRenderer(manpage_urls)
         self._document_type = document_type
         self._varlist_id = varlist_id
         self._id_prefix = id_prefix
 
     def _parallel_render_prepare(self) -> Any:
-        return (self._manpage_urls, self._revision, self._markdown_by_default, self._document_type,
+        return (self._renderer._manpage_urls, self._revision, self._markdown_by_default, self._document_type,
                 self._varlist_id, self._id_prefix)
     @classmethod
     def _parallel_render_init_worker(cls, a: Any) -> DocBookConverter:
@@ -248,10 +243,10 @@ class DocBookConverter(BaseConverter):
 
     def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]:
         if href is not None:
-            href = " xlink:href=" + quoteattr(href)
+            href = " xlink:href=" + xml.quoteattr(href)
         return [
             f"<member><filename{href}>",
-            escape(name),
+            xml.escape(name),
             "</filename></member>"
         ]
 
@@ -281,8 +276,8 @@ class DocBookConverter(BaseConverter):
             result += [
                 "<varlistentry>",
                 # NOTE adding extra spaces here introduces spaces into xref link expansions
-                (f"<term xlink:href={quoteattr('#' + id)} xml:id={quoteattr(id)}>" +
-                 f"<option>{escape(name)}</option></term>"),
+                (f"<term xlink:href={xml.quoteattr('#' + id)} xml:id={xml.quoteattr(id)}>" +
+                 f"<option>{xml.escape(name)}</option></term>"),
                 "<listitem>"
             ]
             result += opt.lines
@@ -300,11 +295,7 @@ class DocBookConverter(BaseConverter):
 class OptionsManpageRenderer(OptionDocsRestrictions, ManpageRenderer):
     pass
 
-class ManpageConverter(BaseConverter):
-    def __renderer__(self, manpage_urls: Mapping[str, str],
-                     parser: Optional[markdown_it.MarkdownIt] = None) -> OptionsManpageRenderer:
-        return OptionsManpageRenderer(manpage_urls, self._options_by_id, parser)
-
+class ManpageConverter(BaseConverter[OptionsManpageRenderer]):
     __option_block_separator__ = ".sp"
 
     _options_by_id: dict[str, str]
@@ -314,8 +305,9 @@ class ManpageConverter(BaseConverter):
                  *,
                  # only for parallel rendering
                  _options_by_id: Optional[dict[str, str]] = None):
+        super().__init__(revision, markdown_by_default)
         self._options_by_id = _options_by_id or {}
-        super().__init__({}, revision, markdown_by_default)
+        self._renderer = OptionsManpageRenderer({}, self._options_by_id)
 
     def _parallel_render_prepare(self) -> Any:
         return ((self._revision, self._markdown_by_default), { '_options_by_id': self._options_by_id })
@@ -324,10 +316,9 @@ class ManpageConverter(BaseConverter):
         return cls(*a[0], **a[1])
 
     def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
-        assert isinstance(self._md.renderer, OptionsManpageRenderer)
-        links = self._md.renderer.link_footnotes = []
+        links = self._renderer.link_footnotes = []
         result = super()._render_option(name, option)
-        self._md.renderer.link_footnotes = None
+        self._renderer.link_footnotes = None
         return result._replace(links=links)
 
     def add_options(self, options: dict[str, Any]) -> None:
@@ -339,12 +330,11 @@ class ManpageConverter(BaseConverter):
         if lit := option_is(option, key, 'literalDocBook'):
             raise RuntimeError("can't render manpages in the presence of docbook")
         else:
-            assert isinstance(self._md.renderer, OptionsManpageRenderer)
             try:
-                self._md.renderer.inline_code_is_quoted = False
+                self._renderer.inline_code_is_quoted = False
                 return super()._render_code(option, key)
             finally:
-                self._md.renderer.inline_code_is_quoted = True
+                self._renderer.inline_code_is_quoted = True
 
     def _render_description(self, desc: str | dict[str, Any]) -> list[str]:
         if isinstance(desc, str) and not self._markdown_by_default:
@@ -428,12 +418,15 @@ class ManpageConverter(BaseConverter):
 class OptionsCommonMarkRenderer(OptionDocsRestrictions, CommonMarkRenderer):
     pass
 
-class CommonMarkConverter(BaseConverter):
-    __renderer__ = OptionsCommonMarkRenderer
+class CommonMarkConverter(BaseConverter[OptionsCommonMarkRenderer]):
     __option_block_separator__ = ""
 
+    def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool):
+        super().__init__(revision, markdown_by_default)
+        self._renderer = OptionsCommonMarkRenderer(manpage_urls)
+
     def _parallel_render_prepare(self) -> Any:
-        return (self._manpage_urls, self._revision, self._markdown_by_default)
+        return (self._renderer._manpage_urls, self._revision, self._markdown_by_default)
     @classmethod
     def _parallel_render_init_worker(cls, a: Any) -> CommonMarkConverter:
         return cls(*a)
@@ -481,12 +474,15 @@ class CommonMarkConverter(BaseConverter):
 class OptionsAsciiDocRenderer(OptionDocsRestrictions, AsciiDocRenderer):
     pass
 
-class AsciiDocConverter(BaseConverter):
-    __renderer__ = AsciiDocRenderer
+class AsciiDocConverter(BaseConverter[OptionsAsciiDocRenderer]):
     __option_block_separator__ = ""
 
+    def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool):
+        super().__init__(revision, markdown_by_default)
+        self._renderer = OptionsAsciiDocRenderer(manpage_urls)
+
     def _parallel_render_prepare(self) -> Any:
-        return (self._manpage_urls, self._revision, self._markdown_by_default)
+        return (self._renderer._manpage_urls, self._revision, self._markdown_by_default)
     @classmethod
     def _parallel_render_init_worker(cls, a: Any) -> AsciiDocConverter:
         return cls(*a)
@@ -531,6 +527,109 @@ class AsciiDocConverter(BaseConverter):
 
         return "\n".join(result)
 
+class OptionsHTMLRenderer(OptionDocsRestrictions, HTMLRenderer):
+    # TODO docbook compat. must be removed together with the matching docbook handlers.
+    def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        token.meta['compact'] = False
+        return super().ordered_list_open(token, tokens, i)
+    def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        token.meta['compact'] = False
+        return super().bullet_list_open(token, tokens, i)
+    def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+        # TODO use token.info. docbook doesn't so we can't yet.
+        return f'<pre class="programlisting">{html.escape(token.content)}</pre>'
+
+class HTMLConverter(BaseConverter[OptionsHTMLRenderer]):
+    __option_block_separator__ = ""
+
+    def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool,
+                 varlist_id: str, id_prefix: str, xref_targets: Mapping[str, XrefTarget]):
+        super().__init__(revision, markdown_by_default)
+        self._xref_targets = xref_targets
+        self._varlist_id = varlist_id
+        self._id_prefix = id_prefix
+        self._renderer = OptionsHTMLRenderer(manpage_urls, self._xref_targets)
+
+    def _parallel_render_prepare(self) -> Any:
+        return (self._renderer._manpage_urls, self._revision, self._markdown_by_default,
+                self._varlist_id, self._id_prefix, self._xref_targets)
+    @classmethod
+    def _parallel_render_init_worker(cls, a: Any) -> HTMLConverter:
+        return cls(*a)
+
+    def _render_code(self, option: dict[str, Any], key: str) -> list[str]:
+        if lit := option_is(option, key, 'literalDocBook'):
+            raise RuntimeError("can't render html in the presence of docbook")
+        else:
+            return super()._render_code(option, key)
+
+    def _render_description(self, desc: str | dict[str, Any]) -> list[str]:
+        if isinstance(desc, str) and not self._markdown_by_default:
+            raise RuntimeError("can't render html in the presence of docbook")
+        else:
+            return super()._render_description(desc)
+
+    def _related_packages_header(self) -> list[str]:
+        return [
+            '<p><span class="emphasis"><em>Related packages:</em></span></p>',
+        ]
+
+    def _decl_def_header(self, header: str) -> list[str]:
+        return [
+            f'<p><span class="emphasis"><em>{header}:</em></span></p>',
+            '<table border="0" summary="Simple list" class="simplelist">'
+        ]
+
+    def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]:
+        if href is not None:
+            href = f' href="{html.escape(href, True)}"'
+        return [
+            "<tr><td>",
+            f'<code class="filename"><a class="filename" {href} target="_top">',
+            f'{html.escape(name)}',
+            '</a></code>',
+            "</td></tr>"
+        ]
+
+    def _decl_def_footer(self) -> list[str]:
+        return [ "</table>" ]
+
+    def finalize(self) -> str:
+        result = []
+
+        result += [
+            '<div class="variablelist">',
+            f'<a id="{html.escape(self._varlist_id, True)}"></a>',
+            ' <dl class="variablelist">',
+        ]
+
+        for (name, opt) in self._sorted_options():
+            id = make_xml_id(self._id_prefix + name)
+            target = self._xref_targets[id]
+            result += [
+                '<dt>',
+                ' <span class="term">',
+                # docbook compat, these could be one tag
+                f' <a id="{html.escape(id, True)}"></a><a class="term" href="{target.href()}">'
+                # no spaces here (and string merging) for docbook output compat
+                f'<code class="option">{html.escape(name)}</code>',
+                '  </a>',
+                ' </span>',
+                '</dt>',
+                '<dd>',
+            ]
+            result += opt.lines
+            result += [
+                "</dd>",
+            ]
+
+        result += [
+            " </dl>",
+            "</div>"
+        ]
+
+        return "\n".join(result)
+
 def _build_cli_db(p: argparse.ArgumentParser) -> None:
     p.add_argument('--manpage-urls', required=True)
     p.add_argument('--revision', required=True)
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py
index d20e056aacdc7..c6146429ea02d 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/types.py
@@ -1,8 +1,7 @@
-from collections.abc import Sequence, MutableMapping
+from collections.abc import Sequence
 from typing import Any, Callable, Optional, Tuple, NamedTuple
 
 from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
 
 OptionLoc = str | dict[str, str]
 Option = dict[str, str | dict[str, str] | list[OptionLoc]]
@@ -12,4 +11,4 @@ class RenderedOption(NamedTuple):
     lines: list[str]
     links: Optional[list[str]] = None
 
-RenderFn = Callable[[Token, Sequence[Token], int, OptionsDict, MutableMapping[str, Any]], str]
+RenderFn = Callable[[Token, Sequence[Token], int], str]
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py
new file mode 100644
index 0000000000000..3377d1fa4fe18
--- /dev/null
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/utils.py
@@ -0,0 +1,21 @@
+from typing import Any
+
+_frozen_classes: dict[type, type] = {}
+
+# make a derived class freezable (ie, disallow modifications).
+# we do this by changing the class of an instance at runtime when freeze()
+# is called, providing a derived class that is exactly the same except
+# for a __setattr__ that raises an error when called. this beats having
+# a field for frozenness and an unconditional __setattr__ that checks this
+# field because it does not insert anything into the class dict.
+class Freezeable:
+    def freeze(self) -> None:
+        cls = type(self)
+        if not (frozen := _frozen_classes.get(cls)):
+            def __setattr__(instance: Any, n: str, v: Any) -> None:
+                raise TypeError(f'{cls.__name__} is frozen')
+            frozen = type(cls.__name__, (cls,), {
+                '__setattr__': __setattr__,
+            })
+            _frozen_classes[cls] = frozen
+        self.__class__ = frozen
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py
index 487506469954b..3cf5b208f3923 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_asciidoc.py
@@ -1,9 +1,11 @@
-import nixos_render_docs
+import nixos_render_docs as nrd
 
 from sample_md import sample1
 
-class Converter(nixos_render_docs.md.Converter):
-    __renderer__ = nixos_render_docs.asciidoc.AsciiDocRenderer
+class Converter(nrd.md.Converter[nrd.asciidoc.AsciiDocRenderer]):
+    def __init__(self, manpage_urls: dict[str, str]):
+        super().__init__()
+        self._renderer = nrd.asciidoc.AsciiDocRenderer(manpage_urls)
 
 def test_lists() -> None:
     c = Converter({})
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py
index 5e0d63eb6723d..72700d3dbab38 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py
@@ -1,4 +1,4 @@
-import nixos_render_docs
+import nixos_render_docs as nrd
 
 from sample_md import sample1
 
@@ -6,8 +6,10 @@ from typing import Mapping, Optional
 
 import markdown_it
 
-class Converter(nixos_render_docs.md.Converter):
-    __renderer__ = nixos_render_docs.commonmark.CommonMarkRenderer
+class Converter(nrd.md.Converter[nrd.commonmark.CommonMarkRenderer]):
+    def __init__(self, manpage_urls: Mapping[str, str]):
+        super().__init__()
+        self._renderer = nrd.commonmark.CommonMarkRenderer(manpage_urls)
 
 # NOTE: in these tests we represent trailing spaces by ` ` and replace them with real space later,
 # since a number of editors will strip trailing whitespace on save and that would break the tests.
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py
index 0b73cdc8e7c72..8cbf3dabcea24 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_headings.py
@@ -1,10 +1,12 @@
-import nixos_render_docs
+import nixos_render_docs as nrd
 
 from markdown_it.token import Token
 
-class Converter(nixos_render_docs.md.Converter):
+class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
     # actual renderer doesn't matter, we're just parsing.
-    __renderer__ = nixos_render_docs.docbook.DocBookRenderer
+    def __init__(self, manpage_urls: dict[str, str]) -> None:
+        super().__init__()
+        self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
 
 def test_heading_id_absent() -> None:
     c = Converter({})
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py
new file mode 100644
index 0000000000000..df366a8babd7e
--- /dev/null
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py
@@ -0,0 +1,179 @@
+import nixos_render_docs as nrd
+import pytest
+
+from sample_md import sample1
+
+class Converter(nrd.md.Converter[nrd.html.HTMLRenderer]):
+    def __init__(self, manpage_urls: dict[str, str], xrefs: dict[str, nrd.manual_structure.XrefTarget]):
+        super().__init__()
+        self._renderer = nrd.html.HTMLRenderer(manpage_urls, xrefs)
+
+def unpretty(s: str) -> str:
+    return "".join(map(str.strip, s.splitlines())).replace('␣', ' ').replace('↵', '\n')
+
+def test_lists_styles() -> None:
+    # nested lists rotate through a number of list style
+    c = Converter({}, {})
+    assert c._render("- - - - foo") == unpretty("""
+      <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;">
+       <li class="listitem">
+        <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: circle;">
+         <li class="listitem">
+          <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: square;">
+           <li class="listitem">
+            <div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;">
+             <li class="listitem"><p>foo</p></li>
+            </ul></div>
+           </li>
+          </ul></div>
+         </li>
+        </ul></div>
+       </li>
+      </ul></div>
+    """)
+    assert c._render("1. 1. 1. 1. 1. 1. foo") == unpretty("""
+      <div class="orderedlist"><ol class="orderedlist compact"  type="1">
+       <li class="listitem">
+        <div class="orderedlist"><ol class="orderedlist compact"  type="a">
+         <li class="listitem">
+          <div class="orderedlist"><ol class="orderedlist compact"  type="i">
+           <li class="listitem">
+            <div class="orderedlist"><ol class="orderedlist compact"  type="A">
+             <li class="listitem">
+              <div class="orderedlist"><ol class="orderedlist compact"  type="I">
+               <li class="listitem">
+                <div class="orderedlist"><ol class="orderedlist compact"  type="1">
+                 <li class="listitem"><p>foo</p></li>
+                </ol></div>
+               </li>
+              </ol></div>
+             </li>
+            </ol></div>
+           </li>
+          </ol></div>
+         </li>
+        </ol></div>
+       </li>
+      </ol></div>
+    """)
+
+def test_xrefs() -> None:
+    # nested lists rotate through a number of list style
+    c = Converter({}, {
+        'foo': nrd.manual_structure.XrefTarget('foo', '<hr/>', 'toc1', 'title1', 'index.html'),
+        'bar': nrd.manual_structure.XrefTarget('bar', '<br/>', 'toc2', 'title2', 'index.html', True),
+    })
+    assert c._render("[](#foo)") == '<p><a class="xref" href="index.html#foo" title="title1" ><hr/></a></p>'
+    assert c._render("[](#bar)") == '<p><a class="xref" href="index.html" title="title2" ><br/></a></p>'
+    with pytest.raises(nrd.html.UnresolvedXrefError) as exc:
+        c._render("[](#baz)")
+    assert exc.value.args[0] == 'bad local reference, id #baz not known'
+
+def test_full() -> None:
+    c = Converter({ 'man(1)': 'http://example.org' }, {})
+    assert c._render(sample1) == unpretty("""
+        <div class="warning">
+         <h3 class="title">Warning</h3>
+         <p>foo</p>
+         <div class="note">
+          <h3 class="title">Note</h3>
+          <p>nested</p>
+         </div>
+        </div>
+        <p>
+         <a class="link" href="link"  target="_top">↵
+          multiline↵
+         </a>
+        </p>
+        <p>
+         <a class="link" href="http://example.org" target="_top">
+          <span class="citerefentry"><span class="refentrytitle">man</span>(1)</span>
+         </a> reference
+        </p>
+        <p><a id="b" />some <a id="a" />nested anchors</p>
+        <p>
+         <span class="emphasis"><em>emph</em></span>␣
+         <span class="strong"><strong>strong</strong></span>␣
+         <span class="emphasis"><em>nesting emph <span class="strong"><strong>and strong</strong></span>␣
+         and <code class="literal">code</code></em></span>
+        </p>
+        <div class="itemizedlist">
+         <ul class="itemizedlist " style="list-style-type: disc;">
+          <li class="listitem"><p>wide bullet</p></li>
+          <li class="listitem"><p>list</p></li>
+         </ul>
+        </div>
+        <div class="orderedlist">
+         <ol class="orderedlist "  type="1">
+          <li class="listitem"><p>wide ordered</p></li>
+          <li class="listitem"><p>list</p></li>
+         </ol>
+        </div>
+        <div class="itemizedlist">
+         <ul class="itemizedlist compact" style="list-style-type: disc;">
+          <li class="listitem"><p>narrow bullet</p></li>
+          <li class="listitem"><p>list</p></li>
+         </ul>
+        </div>
+        <div class="orderedlist">
+         <ol class="orderedlist compact"  type="1">
+          <li class="listitem"><p>narrow ordered</p></li>
+          <li class="listitem"><p>list</p></li>
+         </ol>
+        </div>
+        <div class="blockquote">
+         <blockquote class="blockquote">
+          <p>quotes</p>
+          <div class="blockquote">
+           <blockquote class="blockquote">
+            <p>with <span class="emphasis"><em>nesting</em></span></p>
+            <pre class="programlisting">↵
+             nested code block↵
+            </pre>
+           </blockquote>
+          </div>
+          <div class="itemizedlist">
+           <ul class="itemizedlist compact" style="list-style-type: disc;">
+            <li class="listitem"><p>and lists</p></li>
+            <li class="listitem">
+             <pre class="programlisting">↵
+              containing code↵
+             </pre>
+            </li>
+           </ul>
+          </div>
+          <p>and more quote</p>
+         </blockquote>
+        </div>
+        <div class="orderedlist">
+         <ol class="orderedlist compact" start="100" type="1">
+          <li class="listitem"><p>list starting at 100</p></li>
+          <li class="listitem"><p>goes on</p></li>
+         </ol>
+        </div>
+        <div class="variablelist">
+         <dl class="variablelist">
+          <dt><span class="term">deflist</span></dt>
+          <dd>
+           <div class="blockquote">
+            <blockquote class="blockquote">
+             <p>
+              with a quote↵
+              and stuff
+             </p>
+            </blockquote>
+           </div>
+           <pre class="programlisting">↵
+            code block↵
+           </pre>
+           <pre class="programlisting">↵
+            fenced block↵
+           </pre>
+           <p>text</p>
+          </dd>
+          <dt><span class="term">more stuff in same deflist</span></dt>
+          <dd>
+           <p>foo</p>
+          </dd>
+         </dl>
+        </div>""")
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py
index 660c410a85cca..f53442a96d4cf 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_lists.py
@@ -1,11 +1,13 @@
-import nixos_render_docs
+import nixos_render_docs as nrd
 import pytest
 
 from markdown_it.token import Token
 
-class Converter(nixos_render_docs.md.Converter):
+class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
     # actual renderer doesn't matter, we're just parsing.
-    __renderer__ = nixos_render_docs.docbook.DocBookRenderer
+    def __init__(self, manpage_urls: dict[str, str]) -> None:
+        super().__init__()
+        self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
 
 @pytest.mark.parametrize("ordered", [True, False])
 def test_list_wide(ordered: bool) -> None:
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py
index fbfd21358a857..9b7e1652f0f66 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_manpage.py
@@ -1,4 +1,4 @@
-import nixos_render_docs
+import nixos_render_docs as nrd
 
 from sample_md import sample1
 
@@ -6,15 +6,10 @@ from typing import Mapping, Optional
 
 import markdown_it
 
-class Converter(nixos_render_docs.md.Converter):
-    def __renderer__(self, manpage_urls: Mapping[str, str],
-                     parser: Optional[markdown_it.MarkdownIt] = None
-                     ) -> nixos_render_docs.manpage.ManpageRenderer:
-        return nixos_render_docs.manpage.ManpageRenderer(manpage_urls, self.options_by_id, parser)
-
+class Converter(nrd.md.Converter[nrd.manpage.ManpageRenderer]):
     def __init__(self, manpage_urls: Mapping[str, str], options_by_id: dict[str, str] = {}):
-        self.options_by_id = options_by_id
-        super().__init__(manpage_urls)
+        super().__init__()
+        self._renderer = nrd.manpage.ManpageRenderer(manpage_urls, options_by_id)
 
 def test_inline_code() -> None:
     c = Converter({})
@@ -32,17 +27,15 @@ def test_expand_link_targets() -> None:
 
 def test_collect_links() -> None:
     c = Converter({}, { '#foo': "bar" })
-    assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer)
-    c._md.renderer.link_footnotes = []
+    c._renderer.link_footnotes = []
     assert c._render("[a](link1) [b](link2)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[2]\\fR"
-    assert c._md.renderer.link_footnotes == ['link1', 'link2']
+    assert c._renderer.link_footnotes == ['link1', 'link2']
 
 def test_dedup_links() -> None:
     c = Converter({}, { '#foo': "bar" })
-    assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer)
-    c._md.renderer.link_footnotes = []
+    c._renderer.link_footnotes = []
     assert c._render("[a](link) [b](link)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[1]\\fR"
-    assert c._md.renderer.link_footnotes == ['link']
+    assert c._renderer.link_footnotes == ['link']
 
 def test_full() -> None:
     c = Converter({ 'man(1)': 'http://example.org' })
diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py
index 1d836a916d964..f94ede6382bf0 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py
@@ -1,10 +1,12 @@
-import nixos_render_docs
+import nixos_render_docs as nrd
 
 from markdown_it.token import Token
 
-class Converter(nixos_render_docs.md.Converter):
+class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
     # actual renderer doesn't matter, we're just parsing.
-    __renderer__ = nixos_render_docs.docbook.DocBookRenderer
+    def __init__(self, manpage_urls: dict[str, str]) -> None:
+        super().__init__()
+        self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
 
 def test_attr_span_parsing() -> None:
     c = Converter({})
diff --git a/pkgs/tools/package-management/libdnf/default.nix b/pkgs/tools/package-management/libdnf/default.nix
index 6fee9ea187ca3..f006bb60190e7 100644
--- a/pkgs/tools/package-management/libdnf/default.nix
+++ b/pkgs/tools/package-management/libdnf/default.nix
@@ -3,13 +3,13 @@
 
 stdenv.mkDerivation rec {
   pname = "libdnf";
-  version = "0.69.0";
+  version = "0.70.0";
 
   src = fetchFromGitHub {
     owner = "rpm-software-management";
     repo = pname;
     rev = version;
-    sha256 = "sha256-Mc9yI18D4OYv8l4axQ8W0XZ8HfmEZ5IhHC6/uKkv0Ec=";
+    sha256 = "sha256-tuHrkL3tL+sCLPxNElVgnb4zQ6OTu65X9pb/cX6vD/w=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/tools/package-management/nix-update/default.nix b/pkgs/tools/package-management/nix-update/default.nix
index a24fd43bc6fa0..8b8e6d811d53b 100644
--- a/pkgs/tools/package-management/nix-update/default.nix
+++ b/pkgs/tools/package-management/nix-update/default.nix
@@ -8,14 +8,14 @@
 
 buildPythonApplication rec {
   pname = "nix-update";
-  version = "0.15.0";
+  version = "0.15.1";
   format = "setuptools";
 
   src = fetchFromGitHub {
     owner = "Mic92";
     repo = pname;
     rev = version;
-    sha256 = "sha256-Q3yExefODBrrziRnCYETrJgSn42BOR7ZsL8pu3q5D/w=";
+    sha256 = "sha256-AYw2czg8HwA/ATQZO0snfb5GRsz77J6cPGDQ8b4W6AI=";
   };
 
   makeWrapperArgs = [
diff --git a/pkgs/tools/security/aiodnsbrute/default.nix b/pkgs/tools/security/aiodnsbrute/default.nix
index f1d170e7d5992..c11255e6ab802 100644
--- a/pkgs/tools/security/aiodnsbrute/default.nix
+++ b/pkgs/tools/security/aiodnsbrute/default.nix
@@ -1,44 +1,38 @@
 { lib
-, buildPythonApplication
 , fetchFromGitHub
-, aiodns
-, click
-, tqdm
-, uvloop
+, python3
 }:
 
-buildPythonApplication rec {
+python3.pkgs.buildPythonApplication rec {
   pname = "aiodnsbrute";
   version = "0.3.3";
+  format = "setuptools";
 
   src = fetchFromGitHub {
     owner = "blark";
     repo = pname;
-    rev = "v${version}";
-    sha256 = "sha256-cEpk71VoQJZfKeAZummkk7yjtXKSMndgo0VleYiMlWE=";
+    rev = "refs/tags/v${version}";
+    hash = "sha256-cEpk71VoQJZfKeAZummkk7yjtXKSMndgo0VleYiMlWE=";
   };
 
-  # https://github.com/blark/aiodnsbrute/pull/8
-  prePatch = ''
-    substituteInPlace setup.py --replace " 'asyncio', " ""
-  '';
-
-  propagatedBuildInputs = [
-     aiodns
-     click
-     tqdm
-     uvloop
+  propagatedBuildInputs = with python3.pkgs; [
+    aiodns
+    click
+    tqdm
+    uvloop
   ];
 
-  # no tests present
+  # Project no tests
   doCheck = false;
 
-  pythonImportsCheck = [ "aiodnsbrute.cli" ];
+  pythonImportsCheck = [
+    "aiodnsbrute.cli"
+  ];
 
   meta = with lib; {
     description = "DNS brute force utility";
     homepage = "https://github.com/blark/aiodnsbrute";
-    # https://github.com/blark/aiodnsbrute/issues/5
+    changelog = "https://github.com/blark/aiodnsbrute/releases/tag/v${version}";
     license = with licenses; [ gpl3Only ];
     maintainers = with maintainers; [ fab ];
   };
diff --git a/pkgs/tools/security/cloudfox/default.nix b/pkgs/tools/security/cloudfox/default.nix
index f03a1a17171a5..dc58effa2ac2c 100644
--- a/pkgs/tools/security/cloudfox/default.nix
+++ b/pkgs/tools/security/cloudfox/default.nix
@@ -5,16 +5,16 @@
 
 buildGoModule rec {
   pname = "cloudfox";
-  version = "1.9.1";
+  version = "1.10.0";
 
   src = fetchFromGitHub {
     owner = "BishopFox";
     repo = pname;
     rev = "refs/tags/v${version}";
-    hash = "sha256-TV2knPG5n5l8APeAmpDfu6vQLtEhjqH21JXAZLk0DDI=";
+    hash = "sha256-kB6nH/5/76r9SGyaFPXjwgZ+b5ha85Z7v1GFNgqluDY=";
   };
 
-  vendorHash = "sha256-xMHlooXuLECQi7co2/WvY0TIoV0S5OgcBklICCFk3ls=";
+  vendorHash = "sha256-v8rEsp2mDgfjCO2VvWNIxex8F350MDnZ40bR4szv+3o=";
 
   # Some tests are failing because of wrong filename/path
   doCheck = false;
diff --git a/pkgs/tools/security/grype/default.nix b/pkgs/tools/security/grype/default.nix
index 926a48bfb0cb2..8334b034841df 100644
--- a/pkgs/tools/security/grype/default.nix
+++ b/pkgs/tools/security/grype/default.nix
@@ -2,19 +2,18 @@
 , buildGoModule
 , fetchFromGitHub
 , installShellFiles
-
 , openssl
 }:
 
 buildGoModule rec {
   pname = "grype";
-  version = "0.58.0";
+  version = "0.59.0";
 
   src = fetchFromGitHub {
     owner = "anchore";
     repo = pname;
-    rev = "v${version}";
-    hash = "sha256-7yb6ufRoAB70hnoBv6ZwEtKeTJxxmWEknksCmM55eYE=";
+    rev = "refs/tags/v${version}";
+    hash = "sha256-TAoF67Fxl0OUiQd48h786+lIsdEuk4C/zdeEO/DRX/k=";
     # populate values that require us to use git. By doing this in postFetch we
     # can delete .git afterwards and maintain better reproducibility of the src.
     leaveDotGit = true;
@@ -26,14 +25,19 @@ buildGoModule rec {
       find "$out" -name .git -print0 | xargs -0 rm -rf
     '';
   };
+
   proxyVendor = true;
 
-  vendorHash = "sha256-7i9/tufEUGVqNHP61pQuIK2tMdiBcs3vfFz1bzlHFKk=";
+  vendorHash = "sha256-kRxKa3HUO2yvMai03voVvsprg/Kd01OtJQHJn3ECk58=";
 
   nativeBuildInputs = [
     installShellFiles
   ];
 
+  nativeCheckInputs = [
+    openssl
+  ];
+
   subPackages = [ "." ];
 
   excludedPackages = "test/integration";
@@ -55,7 +59,6 @@ buildGoModule rec {
     ldflags+=" -X github.com/anchore/grype/internal/version.buildDate=$(cat SOURCE_DATE_EPOCH)"
   '';
 
-  nativeCheckInputs = [ openssl ];
   preCheck = ''
     # test all dirs (except excluded)
     unset subPackages
diff --git a/pkgs/tools/security/otpauth/default.nix b/pkgs/tools/security/otpauth/default.nix
index b5678dff7d843..032372d87e92c 100644
--- a/pkgs/tools/security/otpauth/default.nix
+++ b/pkgs/tools/security/otpauth/default.nix
@@ -5,16 +5,16 @@
 
 buildGoModule rec {
   pname = "otpauth";
-  version = "0.5.0";
+  version = "0.5.1";
 
   src = fetchFromGitHub {
     owner = "dim13";
     repo = "otpauth";
     rev = "v${version}";
-    sha256 = "sha256-toFBkUssU10ejoZzWnrm5o2P0p5Oq8kKP4vb2ASDC0s=";
+    sha256 = "sha256-jeKxCuE3cA/oTEKwdrCGPchsrtaMyirTzv8oLl9gxtA=";
   };
 
-  vendorSha256 = "sha256-jnIq7Zc2MauJReJ9a8TeqXXsvHixsBB+znmXAxcpqUQ=";
+  vendorHash = "sha256-jnIq7Zc2MauJReJ9a8TeqXXsvHixsBB+znmXAxcpqUQ=";
   doCheck = true;
 
   meta = with lib; {
diff --git a/pkgs/tools/security/rekor/default.nix b/pkgs/tools/security/rekor/default.nix
index e367314ba19be..6fadf0c8f3619 100644
--- a/pkgs/tools/security/rekor/default.nix
+++ b/pkgs/tools/security/rekor/default.nix
@@ -54,7 +54,7 @@ let
         homepage = "https://github.com/sigstore/rekor";
         changelog = "https://github.com/sigstore/rekor/releases/tag/v${version}";
         license = licenses.asl20;
-        maintainers = with maintainers; [ lesuisse jk ];
+        maintainers = with maintainers; [ lesuisse jk developer-guy ];
       };
     };
 in {
diff --git a/pkgs/tools/security/trufflehog/default.nix b/pkgs/tools/security/trufflehog/default.nix
index fb8cde8943587..7ae37cf286ca6 100644
--- a/pkgs/tools/security/trufflehog/default.nix
+++ b/pkgs/tools/security/trufflehog/default.nix
@@ -5,13 +5,13 @@
 
 buildGoModule rec {
   pname = "trufflehog";
-  version = "3.28.6";
+  version = "3.28.7";
 
   src = fetchFromGitHub {
     owner = "trufflesecurity";
     repo = "trufflehog";
     rev = "refs/tags/v${version}";
-    hash = "sha256-CzZRkEQ3FANzg7QZi5r/AT4DurKo3M4V/ghxGvJas7s=";
+    hash = "sha256-6oOIKvUocImC3HMeG5aJXLlruymechcsDhMxEyYiNzU=";
   };
 
   vendorHash = "sha256-/4xZjqstrjfIlD15x2INSunb57WGR7NzKaQxUABxQV0=";
diff --git a/pkgs/tools/security/vault/default.nix b/pkgs/tools/security/vault/default.nix
index 5c2a2e9b0d898..266ed3f5c8837 100644
--- a/pkgs/tools/security/vault/default.nix
+++ b/pkgs/tools/security/vault/default.nix
@@ -6,16 +6,16 @@
 
 buildGoModule rec {
   pname = "vault";
-  version = "1.12.3";
+  version = "1.13.0";
 
   src = fetchFromGitHub {
     owner = "hashicorp";
     repo = "vault";
     rev = "v${version}";
-    sha256 = "sha256-ZNk9bmZwD1aUY3fYT5Qngoq+9qXgvH/nWSWc30st7nE=";
+    sha256 = "sha256-F9Ki+3jMkJ+CI2yQmrnqT98xJqSSKQTtYHxQTYdfNbQ=";
   };
 
-  vendorHash = "sha256-sPpTB3N1w0JppHcwdyLYwSxjzzUAJcBJ5zJ2u4rXXkQ=";
+  vendorHash = "sha256-Ny4TTa67x/mwTclZrtPoWU6nHu5q4KafP1s4rvk21Hs=";
 
   subPackages = [ "." ];
 
diff --git a/pkgs/tools/security/waf-tester/default.nix b/pkgs/tools/security/waf-tester/default.nix
index 427bb1db2c310..b8dd01ed19b81 100644
--- a/pkgs/tools/security/waf-tester/default.nix
+++ b/pkgs/tools/security/waf-tester/default.nix
@@ -7,16 +7,16 @@
 
 buildGoModule rec {
   pname = "waf-tester";
-  version = "0.6.12";
+  version = "0.6.13";
 
   src = fetchFromGitHub {
     owner = "jreisinger";
     repo = pname;
-    rev = "v${version}";
-    hash = "sha256-baj9JuC4PF5c50K2aY+xwdE9t4aTzOu+isqJ6r1pWuc=";
+    rev = "refs/tags/v${version}";
+    hash = "sha256-UPviooQNGRVwf/bTz9ApedJDAGeCvh9iD1HXFOQXPcw=";
   };
 
-  vendorSha256 = "sha256-qVzgZX4HVXZ3qgYAu3a46vcGl4Pk2D1Zx/giEmPEG88=";
+  vendorHash = "sha256-HOYHrR1LtVcXMKFHPaA7PYH4Fp9nhqal2oxYTq/i4/8=";
 
   ldflags = [
     "-s"
@@ -33,6 +33,7 @@ buildGoModule rec {
   meta = with lib; {
     description = "Tool to test Web Application Firewalls (WAFs)";
     homepage = "https://github.com/jreisinger/waf-tester";
+    changelog = "https://github.com/jreisinger/waf-tester/releases/tag/v${version}";
     license = licenses.gpl3Only;
     maintainers = with maintainers; [ fab ];
   };
diff --git a/pkgs/tools/system/automatic-timezoned/default.nix b/pkgs/tools/system/automatic-timezoned/default.nix
index ceee8c9efa6c0..b2a7bb8a8b5aa 100644
--- a/pkgs/tools/system/automatic-timezoned/default.nix
+++ b/pkgs/tools/system/automatic-timezoned/default.nix
@@ -5,16 +5,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "automatic-timezoned";
-  version = "1.0.62";
+  version = "1.0.68";
 
   src = fetchFromGitHub {
     owner = "maxbrunet";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-3T9/VAr/ZrGTZZK3rsIpnOeKdp9WxPO0JkGamDi3hyM=";
+    sha256 = "sha256-wtmyUlkruFE3dQmsb9x2683gwEVjsBCQJ8VW4b0IdkU=";
   };
 
-  cargoHash = "sha256-rNMEXvAGpKxn2t6uvgTx3sc3tpGCXmzOM/iPWwWq2JM=";
+  cargoHash = "sha256-nQx70KtWzvg6w8UNJqTrqzBc5SZKwCiHx2jhoBbmNP4=";
 
   meta = with lib; {
     description = "Automatically update system timezone based on location";
diff --git a/pkgs/tools/system/nsc/default.nix b/pkgs/tools/system/nsc/default.nix
index ef56a9ea1d6b4..b8898fcad6553 100644
--- a/pkgs/tools/system/nsc/default.nix
+++ b/pkgs/tools/system/nsc/default.nix
@@ -7,13 +7,13 @@
 
 buildGoModule rec {
   pname = "nsc";
-  version = "2.7.6";
+  version = "2.7.8";
 
   src = fetchFromGitHub {
     owner = "nats-io";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-aieUCQ5JVJQs4RoTGaXwfTv3xC1ozSsQyfCLsD245go=";
+    hash = "sha256-cgp/kkHgH5JIWMgrUHHHyuKedbJ3n6L9xBglXCcMYms=";
   };
 
   ldflags = [
@@ -23,7 +23,7 @@ buildGoModule rec {
     "-X main.builtBy=nixpkgs"
   ];
 
-  vendorHash = "sha256-gDwppx0ORG+pXzTdGtUVbiFyTD/P7avt+/V89Gl0QYY=";
+  vendorHash = "sha256-l9Fl0j8Fa/hiV/2ebmIlnFtekYLwDg3eMpY7lLBreGg=";
 
   nativeBuildInputs = [ installShellFiles ];
 
diff --git a/pkgs/tools/system/zenith/default.nix b/pkgs/tools/system/zenith/default.nix
index 282bd31571d90..25552ad8b8601 100644
--- a/pkgs/tools/system/zenith/default.nix
+++ b/pkgs/tools/system/zenith/default.nix
@@ -12,13 +12,13 @@ assert nvidiaSupport -> stdenv.isLinux;
 
 rustPlatform.buildRustPackage rec {
   pname = "zenith";
-  version = "0.13.1";
+  version = "0.14.0";
 
   src = fetchFromGitHub {
     owner = "bvaisvil";
     repo = pname;
     rev = version;
-    sha256 = "sha256-N/DvPVYGM/DjTvKvOlR60q6rvNyfAQlnvFnFG5nbUmQ=";
+    sha256 = "sha256-GrrdE9Ih8x8N2HN+1NfxfthfHbufLAT/Ac+ZZWW5Zg8=";
   };
 
   # remove cargo config so it can find the linker on aarch64-linux
@@ -26,7 +26,7 @@ rustPlatform.buildRustPackage rec {
     rm .cargo/config
   '';
 
-  cargoSha256 = "sha256-Y/vvRJpv82Uc+Bu3lbZxRsu4TL6sAjz5AWHAHkwh98Y=";
+  cargoHash = "sha256-2VgyUVBcmSlmPSqAWrzWjH5J6Co/rAC9EQCckYzfW2o=";
 
   nativeBuildInputs = [ llvmPackages.clang ] ++ lib.optional nvidiaSupport makeWrapper;
   buildInputs = [ llvmPackages.libclang ] ++ lib.optionals stdenv.isDarwin [ IOKit ];
diff --git a/pkgs/tools/text/miller/default.nix b/pkgs/tools/text/miller/default.nix
index 9d00ab028a9aa..250c094d30168 100644
--- a/pkgs/tools/text/miller/default.nix
+++ b/pkgs/tools/text/miller/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "miller";
-  version = "6.6.0";
+  version = "6.7.0";
 
   src = fetchFromGitHub {
     owner = "johnkerl";
     repo = "miller";
     rev = "v${version}";
-    sha256 = "sha256-Uvf2kkWD6ir8XicEX+FNYmd2A9c/jd6GgwjYomNfjfc=";
+    sha256 = "sha256-fKgw4ii/riPTklEB+Q8/sOx2dCMS/kevyvXgpyFlkVs=";
   };
 
-  vendorSha256 = "sha256-VW0mTq0oc95wVkFa+0rpsiOlS/9LT2Xy6u0RtSTsEoA=";
+  vendorHash = "sha256-uZa9H7Tj2ynwl3fFY9U+WZ0FcNuvHRQf7RCW6rebm5g=";
 
   subPackages = [ "cmd/mlr" ];
 
diff --git a/pkgs/tools/text/ripgrep-all/default.nix b/pkgs/tools/text/ripgrep-all/default.nix
index 0fc39b8da2803..d25d068b7e075 100644
--- a/pkgs/tools/text/ripgrep-all/default.nix
+++ b/pkgs/tools/text/ripgrep-all/default.nix
@@ -1,5 +1,5 @@
 { stdenv, lib, fetchFromGitHub, rustPlatform, makeWrapper, ffmpeg
-, pandoc, poppler_utils, ripgrep, Security, imagemagick, tesseract
+, pandoc, poppler_utils, ripgrep, Security, imagemagick, tesseract3
 }:
 
 rustPlatform.buildRustPackage rec {
@@ -19,7 +19,7 @@ rustPlatform.buildRustPackage rec {
 
   postInstall = ''
     wrapProgram $out/bin/rga \
-      --prefix PATH ":" "${lib.makeBinPath [ ffmpeg pandoc poppler_utils ripgrep imagemagick tesseract ]}"
+      --prefix PATH ":" "${lib.makeBinPath [ ffmpeg pandoc poppler_utils ripgrep imagemagick tesseract3 ]}"
   '';
 
   # Use upstream's example data to run a couple of queries to ensure the dependencies
diff --git a/pkgs/tools/text/tuc/default.nix b/pkgs/tools/text/tuc/default.nix
index 5401515d0fbc8..3598a317633f7 100644
--- a/pkgs/tools/text/tuc/default.nix
+++ b/pkgs/tools/text/tuc/default.nix
@@ -1,16 +1,16 @@
 { lib, fetchFromGitHub, rustPlatform }:
 rustPlatform.buildRustPackage rec {
   pname = "tuc";
-  version = "0.11.0";
+  version = "1.0.0";
 
   src = fetchFromGitHub {
     owner = "riquito";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-M2SK6KF8R0WcyFf8eTyYNK5oXj/DfCrAkUZJ3J2LF6U=";
+    sha256 = "sha256-zEWQ1wGpEowVPdlezC/LZhoPGS546nuqREfavo3fbTs=";
   };
 
-  cargoSha256 = "sha256-MhEIDRC40zQ8mBXxONavtPr87SrueV57HhmIRLIagGA=";
+  cargoHash = "sha256-YRw1HxVy1/SOWfareR6rh6M78xFm+Im//klhXGGt95g=";
 
   meta = with lib; {
     description = "When cut doesn't cut it";
diff --git a/pkgs/tools/wayland/wl-mirror/default.nix b/pkgs/tools/wayland/wl-mirror/default.nix
index 729a8a915aa7a..1ab7071a46cb8 100644
--- a/pkgs/tools/wayland/wl-mirror/default.nix
+++ b/pkgs/tools/wayland/wl-mirror/default.nix
@@ -28,13 +28,13 @@ in
 
 stdenv.mkDerivation rec {
   pname = "wl-mirror";
-  version = "0.12.2";
+  version = "0.13.0";
 
   src = fetchFromGitHub {
     owner = "Ferdi265";
     repo = "wl-mirror";
     rev = "v${version}";
-    hash = "sha256-NUujjcDRVpC5LbJNy2I5cVCOSoqS14XxjsYiZNOBs+s=";
+    hash = "sha256-jjOcEr/E7l3ykdLAfiDlRSI0u76byDmBwfispTbopk8=";
   };
 
   strictDeps = true;
diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix
index 9b97a3d5345f8..090f5a4123cba 100644
--- a/pkgs/top-level/all-packages.nix
+++ b/pkgs/top-level/all-packages.nix
@@ -1362,7 +1362,7 @@ with pkgs;
 
   aioblescan = with python3Packages; toPythonApplication aioblescan;
 
-  aiodnsbrute = python3Packages.callPackage ../tools/security/aiodnsbrute { };
+  aiodnsbrute = callPackage ../tools/security/aiodnsbrute { };
 
   aircrack-ng = callPackage ../tools/networking/aircrack-ng { };
 
@@ -6761,7 +6761,7 @@ with pkgs;
 
   dtc = callPackage ../development/compilers/dtc { };
 
-  dt-schema = python3Packages.callPackage ../development/tools/dt-schema { };
+  dt-schema = with python3Packages; toPythonApplication dtschema;
 
   dub = callPackage ../development/tools/build-managers/dub { };
 
@@ -33553,7 +33553,7 @@ with pkgs;
     tesseract3
     tesseract4
     tesseract5;
-  tesseract = tesseract3;
+  tesseract = tesseract5;
 
   tetraproc = callPackage ../applications/audio/tetraproc { };
 
diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix
index 48b9d11131e95..1f62eb218e230 100644
--- a/pkgs/top-level/python-packages.nix
+++ b/pkgs/top-level/python-packages.nix
@@ -2233,6 +2233,8 @@ self: super: with self; {
 
   dask = callPackage ../development/python-modules/dask { };
 
+  dask-awkward = callPackage ../development/python-modules/dask-awkward { };
+
   dask-gateway = callPackage ../development/python-modules/dask-gateway { };
 
   dask-gateway-server = callPackage ../development/python-modules/dask-gateway-server { };
@@ -2898,6 +2900,8 @@ self: super: with self; {
 
   dtlssocket = callPackage ../development/python-modules/dtlssocket { };
 
+  dtschema = callPackage ../development/python-modules/dtschema { };
+
   ducc0 = callPackage ../development/python-modules/ducc0 { };
 
   duckdb = callPackage ../development/python-modules/duckdb {
@@ -3192,6 +3196,8 @@ self: super: with self; {
 
   exchangelib = callPackage ../development/python-modules/exchangelib { };
 
+  execnb = callPackage ../development/python-modules/execnb { };
+
   execnet = callPackage ../development/python-modules/execnet { };
 
   executing = callPackage ../development/python-modules/executing { };
@@ -3277,6 +3283,8 @@ self: super: with self; {
 
   faraday-plugins = callPackage ../development/python-modules/faraday-plugins { };
 
+  fastai = callPackage ../development/python-modules/fastai { };
+
   fastapi = callPackage ../development/python-modules/fastapi { };
 
   fastapi-mail = callPackage ../development/python-modules/fastapi-mail { };
@@ -3291,6 +3299,8 @@ self: super: with self; {
 
   fastdiff = callPackage ../development/python-modules/fastdiff { };
 
+  fastdownload = callPackage ../development/python-modules/fastdownload { };
+
   fastdtw = callPackage ../development/python-modules/fastdtw { };
 
   fastecdsa = callPackage ../development/python-modules/fastecdsa { };
@@ -6298,6 +6308,8 @@ self: super: with self; {
 
   nbconvert = callPackage ../development/python-modules/nbconvert { };
 
+  nbdev = callPackage ../development/python-modules/nbdev { };
+
   nbdime = callPackage ../development/python-modules/nbdime { };
 
   nbformat = callPackage ../development/python-modules/nbformat { };
@@ -9156,7 +9168,9 @@ self: super: with self; {
 
   pytest-localserver = callPackage ../development/python-modules/pytest-localserver { };
 
-  pytest-logdog = callPackage ../development/python-modules/pytest-logdog{ };
+  pytest-logdog = callPackage ../development/python-modules/pytest-logdog { };
+
+  pytest-md-report = callPackage ../development/python-modules/pytest-md-report { };
 
   pytest-metadata = callPackage ../development/python-modules/pytest-metadata { };
 
@@ -11494,6 +11508,10 @@ self: super: with self; {
 
   textacy = callPackage ../development/python-modules/textacy { };
 
+  textnets = callPackage ../development/python-modules/textnets {
+    en_core_web_sm = spacy_models.en_core_web_sm;
+  };
+
   texttable = callPackage ../development/python-modules/texttable { };
 
   text-unidecode = callPackage ../development/python-modules/text-unidecode { };
@@ -11991,6 +12009,8 @@ self: super: with self; {
 
   ukrainealarm = callPackage ../development/python-modules/ukrainealarm { };
 
+  ulid-transform = callPackage ../development/python-modules/ulid-transform { };
+
   ultraheat-api = callPackage ../development/python-modules/ultraheat-api { };
 
   umalqurra = callPackage ../development/python-modules/umalqurra { };
diff --git a/pkgs/top-level/stage.nix b/pkgs/top-level/stage.nix
index e5412c409ed57..2b503bce7c04e 100644
--- a/pkgs/top-level/stage.nix
+++ b/pkgs/top-level/stage.nix
@@ -199,8 +199,8 @@ let
 
     # All packages built with the Musl libc. This will override the
     # default GNU libc on Linux systems. Non-Linux systems are not
-    # supported.
-    pkgsMusl = if stdenv.hostPlatform.isLinux then nixpkgsFun {
+    # supported. 32-bit is also not supported.
+    pkgsMusl = if stdenv.hostPlatform.isLinux && stdenv.buildPlatform.is64bit then nixpkgsFun {
       overlays = [ (self': super': {
         pkgsMusl = super';
       })] ++ overlays;
@@ -208,7 +208,7 @@ let
         then "localSystem" else "crossSystem"} = {
         parsed = makeMuslParsedPlatform stdenv.hostPlatform.parsed;
       };
-    } else throw "Musl libc only supports Linux systems.";
+    } else throw "Musl libc only supports 64-bit Linux systems.";
 
     # All packages built for i686 Linux.
     # Used by wine, firefox with debugging version of Flash, ...