Merge branch 'master' into drbd-kernel-module
This commit is contained in:
commit
6ed0323361
|
@ -17,6 +17,10 @@ end_of_line = unset
|
|||
insert_final_newline = unset
|
||||
trim_trailing_whitespace = unset
|
||||
|
||||
# We want readFile .version to return the version without a newline.
|
||||
[.version]
|
||||
insert_final_newline = false
|
||||
|
||||
# see https://nixos.org/nixpkgs/manual/#chap-conventions
|
||||
|
||||
# Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces
|
||||
|
@ -108,3 +112,7 @@ charset = unset
|
|||
[lib/tests/*.plist]
|
||||
indent_style = tab
|
||||
insert_final_newline = unset
|
||||
|
||||
[pkgs/kde/generated/**]
|
||||
insert_final_newline = unset
|
||||
end_of_line = unset
|
||||
|
|
|
@ -131,13 +131,13 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @raitobezarius @ma27
|
|||
/pkgs/development/interpreters/python/hooks @FRidh @jonringer
|
||||
|
||||
# Haskell
|
||||
/doc/languages-frameworks/haskell.section.md @cdepillabout @sternenseemann @maralorn @ncfavier
|
||||
/maintainers/scripts/haskell @cdepillabout @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/development/compilers/ghc @cdepillabout @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/development/haskell-modules @cdepillabout @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/test/haskell @cdepillabout @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/top-level/release-haskell.nix @cdepillabout @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/top-level/haskell-packages.nix @cdepillabout @sternenseemann @maralorn @ncfavier
|
||||
/doc/languages-frameworks/haskell.section.md @sternenseemann @maralorn @ncfavier
|
||||
/maintainers/scripts/haskell @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/development/compilers/ghc @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/development/haskell-modules @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/test/haskell @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/top-level/release-haskell.nix @sternenseemann @maralorn @ncfavier
|
||||
/pkgs/top-level/haskell-packages.nix @sternenseemann @maralorn @ncfavier
|
||||
|
||||
# Perl
|
||||
/pkgs/development/interpreters/perl @stigtsp @zakame @dasJ
|
||||
|
@ -185,11 +185,18 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
|||
# Licenses
|
||||
/lib/licenses.nix @alyssais
|
||||
|
||||
# Qt / KDE
|
||||
/pkgs/applications/kde @ttuegel
|
||||
/pkgs/desktops/plasma-5 @ttuegel
|
||||
/pkgs/development/libraries/kde-frameworks @ttuegel
|
||||
/pkgs/development/libraries/qt-5 @ttuegel
|
||||
# Qt
|
||||
/pkgs/development/libraries/qt-5 @NixOS/qt-kde
|
||||
/pkgs/development/libraries/qt-6 @NixOS/qt-kde
|
||||
|
||||
# KDE / Plasma 5
|
||||
/pkgs/applications/kde @NixOS/qt-kde
|
||||
/pkgs/desktops/plasma-5 @NixOS/qt-kde
|
||||
/pkgs/development/libraries/kde-frameworks @NixOS/qt-kde
|
||||
|
||||
# KDE / Plasma 6
|
||||
/pkgs/kde @NixOS/qt-kde
|
||||
/maintainers/scripts/kde @NixOS/qt-kde
|
||||
|
||||
# PostgreSQL and related stuff
|
||||
/pkgs/servers/sql/postgresql @thoughtpolice @marsam
|
||||
|
@ -271,13 +278,13 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
|||
/pkgs/applications/editors/vscode/extensions @jonringer
|
||||
|
||||
# PHP interpreter, packages, extensions, tests and documentation
|
||||
/doc/languages-frameworks/php.section.md @aanderse @drupol @etu @globin @ma27 @talyz
|
||||
/nixos/tests/php @aanderse @drupol @etu @globin @ma27 @talyz
|
||||
/pkgs/build-support/php/build-pecl.nix @aanderse @drupol @etu @globin @ma27 @talyz
|
||||
/pkgs/build-support/php @drupol @etu
|
||||
/pkgs/development/interpreters/php @jtojnar @aanderse @drupol @etu @globin @ma27 @talyz
|
||||
/pkgs/development/php-packages @aanderse @drupol @etu @globin @ma27 @talyz
|
||||
/pkgs/top-level/php-packages.nix @jtojnar @aanderse @drupol @etu @globin @ma27 @talyz
|
||||
/doc/languages-frameworks/php.section.md @aanderse @drupol @globin @ma27 @talyz
|
||||
/nixos/tests/php @aanderse @drupol @globin @ma27 @talyz
|
||||
/pkgs/build-support/php/build-pecl.nix @aanderse @drupol @globin @ma27 @talyz
|
||||
/pkgs/build-support/php @drupol
|
||||
/pkgs/development/interpreters/php @jtojnar @aanderse @drupol @globin @ma27 @talyz
|
||||
/pkgs/development/php-packages @aanderse @drupol @globin @ma27 @talyz
|
||||
/pkgs/top-level/php-packages.nix @jtojnar @aanderse @drupol @globin @ma27 @talyz
|
||||
|
||||
# Docker tools
|
||||
/pkgs/build-support/docker @roberth
|
||||
|
@ -352,3 +359,8 @@ nixos/tests/zfs.nix @raitobezarius
|
|||
nixos/modules/services/continuous-integration/buildbot @Mic92 @zowoq
|
||||
nixos/tests/buildbot.nix @Mic92 @zowoq
|
||||
pkgs/development/tools/continuous-integration/buildbot @Mic92 @zowoq
|
||||
|
||||
# Pretix
|
||||
pkgs/by-name/pr/pretix/ @mweinelt
|
||||
nixos/modules/services/web-apps/pretix.nix @mweinelt
|
||||
nixos/tests/web-apps/pretix.nix @mweinelt
|
||||
|
|
|
@ -38,7 +38,7 @@ Reviewing helps to reduce the average time-to-merge for everyone.
|
|||
Thanks a lot if you do!
|
||||
|
||||
List of open PRs: https://github.com/NixOS/nixpkgs/pulls
|
||||
Reviewing guidelines: https://nixos.org/manual/nixpkgs/unstable/#chap-reviewing-contributions
|
||||
Reviewing guidelines: https://github.com/NixOS/nixpkgs/blob/master/pkgs/README.md#reviewing-contributions
|
||||
-->
|
||||
|
||||
---
|
||||
|
|
|
@ -1,216 +1,362 @@
|
|||
"6.topic: agda":
|
||||
- doc/languages-frameworks/agda.section.md
|
||||
- nixos/tests/agda.nix
|
||||
- pkgs/build-support/agda/**/*
|
||||
- pkgs/development/libraries/agda/**/*
|
||||
- pkgs/top-level/agda-packages.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/agda.section.md
|
||||
- nixos/tests/agda.nix
|
||||
- pkgs/build-support/agda/**/*
|
||||
- pkgs/development/libraries/agda/**/*
|
||||
- pkgs/top-level/agda-packages.nix
|
||||
|
||||
"6.topic: cinnamon":
|
||||
- pkgs/desktops/cinnamon/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/cinnamon.nix
|
||||
- nixos/tests/cinnamon.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/desktops/cinnamon/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/cinnamon.nix
|
||||
- nixos/tests/cinnamon.nix
|
||||
|
||||
"6.topic: emacs":
|
||||
- nixos/modules/services/editors/emacs.nix
|
||||
- nixos/modules/services/editors/emacs.xml
|
||||
- nixos/tests/emacs-daemon.nix
|
||||
- pkgs/applications/editors/emacs/elisp-packages/**/*
|
||||
- pkgs/applications/editors/emacs/**/*
|
||||
- pkgs/build-support/emacs/**/*
|
||||
- pkgs/top-level/emacs-packages.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/editors/emacs.nix
|
||||
- nixos/modules/services/editors/emacs.xml
|
||||
- nixos/tests/emacs-daemon.nix
|
||||
- pkgs/applications/editors/emacs/elisp-packages/**/*
|
||||
- pkgs/applications/editors/emacs/**/*
|
||||
- pkgs/build-support/emacs/**/*
|
||||
- pkgs/top-level/emacs-packages.nix
|
||||
|
||||
"6.topic: Enlightenment DE":
|
||||
- nixos/modules/services/x11/desktop-managers/enlightenment.nix
|
||||
- pkgs/desktops/enlightenment/**/*
|
||||
- pkgs/development/python-modules/python-efl/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/enlightenment.nix
|
||||
- pkgs/desktops/enlightenment/**/*
|
||||
- pkgs/development/python-modules/python-efl/*
|
||||
|
||||
"6.topic: erlang":
|
||||
- doc/languages-frameworks/beam.section.md
|
||||
- pkgs/development/beam-modules/**/*
|
||||
- pkgs/development/interpreters/elixir/**/*
|
||||
- pkgs/development/interpreters/erlang/**/*
|
||||
- pkgs/development/tools/build-managers/rebar/**/*
|
||||
- pkgs/development/tools/build-managers/rebar3/**/*
|
||||
- pkgs/development/tools/erlang/**/*
|
||||
- pkgs/top-level/beam-packages.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/beam.section.md
|
||||
- pkgs/development/beam-modules/**/*
|
||||
- pkgs/development/interpreters/elixir/**/*
|
||||
- pkgs/development/interpreters/erlang/**/*
|
||||
- pkgs/development/tools/build-managers/rebar/**/*
|
||||
- pkgs/development/tools/build-managers/rebar3/**/*
|
||||
- pkgs/development/tools/erlang/**/*
|
||||
- pkgs/top-level/beam-packages.nix
|
||||
|
||||
"6.topic: fetch":
|
||||
- pkgs/build-support/fetch*/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/fetch*/**/*
|
||||
|
||||
"6.topic: flakes":
|
||||
- '**/flake.nix'
|
||||
- lib/systems/flake-systems.nix
|
||||
- nixos/modules/config/nix-flakes.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- '**/flake.nix'
|
||||
- lib/systems/flake-systems.nix
|
||||
- nixos/modules/config/nix-flakes.nix
|
||||
|
||||
"6.topic: GNOME":
|
||||
- doc/languages-frameworks/gnome.section.md
|
||||
- nixos/modules/services/desktops/gnome/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/gnome.nix
|
||||
- nixos/tests/gnome-xorg.nix
|
||||
- nixos/tests/gnome.nix
|
||||
- pkgs/desktops/gnome/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/gnome.section.md
|
||||
- nixos/modules/services/desktops/gnome/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/gnome.nix
|
||||
- nixos/tests/gnome-xorg.nix
|
||||
- nixos/tests/gnome.nix
|
||||
- pkgs/desktops/gnome/**/*
|
||||
|
||||
"6.topic: golang":
|
||||
- doc/languages-frameworks/go.section.md
|
||||
- pkgs/build-support/go/**/*
|
||||
- pkgs/development/compilers/go/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/go.section.md
|
||||
- pkgs/build-support/go/**/*
|
||||
- pkgs/development/compilers/go/**/*
|
||||
|
||||
"6.topic: haskell":
|
||||
- doc/languages-frameworks/haskell.section.md
|
||||
- maintainers/scripts/haskell/**/*
|
||||
- pkgs/development/compilers/ghc/**/*
|
||||
- pkgs/development/haskell-modules/**/*
|
||||
- pkgs/development/tools/haskell/**/*
|
||||
- pkgs/test/haskell/**/*
|
||||
- pkgs/top-level/haskell-packages.nix
|
||||
- pkgs/top-level/release-haskell.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/haskell.section.md
|
||||
- maintainers/scripts/haskell/**/*
|
||||
- pkgs/development/compilers/ghc/**/*
|
||||
- pkgs/development/haskell-modules/**/*
|
||||
- pkgs/development/tools/haskell/**/*
|
||||
- pkgs/test/haskell/**/*
|
||||
- pkgs/top-level/haskell-packages.nix
|
||||
- pkgs/top-level/release-haskell.nix
|
||||
|
||||
"6.topic: jupyter":
|
||||
- pkgs/development/python-modules/jupyter*/**/*
|
||||
- pkgs/development/python-modules/mkdocs-jupyter/*
|
||||
- nixos/modules/services/development/jupyter/**/*
|
||||
- pkgs/applications/editors/jupyter-kernels/**/*
|
||||
- pkgs/applications/editors/jupyter/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/python-modules/jupyter*/**/*
|
||||
- pkgs/development/python-modules/mkdocs-jupyter/*
|
||||
- nixos/modules/services/development/jupyter/**/*
|
||||
- pkgs/applications/editors/jupyter-kernels/**/*
|
||||
- pkgs/applications/editors/jupyter/**/*
|
||||
|
||||
"6.topic: kernel":
|
||||
- pkgs/build-support/kernel/**/*
|
||||
- pkgs/os-specific/linux/kernel/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/kernel/**/*
|
||||
- pkgs/os-specific/linux/kernel/**/*
|
||||
|
||||
"6.topic: lib":
|
||||
- lib/**
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- lib/**
|
||||
|
||||
"6.topic: lua":
|
||||
- pkgs/development/interpreters/lua-5/**/*
|
||||
- pkgs/development/interpreters/luajit/**/*
|
||||
- pkgs/development/lua-modules/**/*
|
||||
- pkgs/top-level/lua-packages.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/interpreters/lua-5/**/*
|
||||
- pkgs/development/interpreters/luajit/**/*
|
||||
- pkgs/development/lua-modules/**/*
|
||||
- pkgs/top-level/lua-packages.nix
|
||||
|
||||
"6.topic: Lumina DE":
|
||||
- nixos/modules/services/x11/desktop-managers/lumina.nix
|
||||
- pkgs/desktops/lumina/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/lumina.nix
|
||||
- pkgs/desktops/lumina/**/*
|
||||
|
||||
"6.topic: LXQt":
|
||||
- nixos/modules/services/x11/desktop-managers/lxqt.nix
|
||||
- pkgs/desktops/lxqt/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/lxqt.nix
|
||||
- pkgs/desktops/lxqt/**/*
|
||||
|
||||
"6.topic: mate":
|
||||
- nixos/modules/services/x11/desktop-managers/mate.nix
|
||||
- nixos/tests/mate.nix
|
||||
- pkgs/desktops/mate/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/mate.nix
|
||||
- nixos/tests/mate.nix
|
||||
- pkgs/desktops/mate/**/*
|
||||
|
||||
"6.topic: module system":
|
||||
- lib/modules.nix
|
||||
- lib/types.nix
|
||||
- lib/options.nix
|
||||
- lib/tests/modules.sh
|
||||
- lib/tests/modules/**
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- lib/modules.nix
|
||||
- lib/types.nix
|
||||
- lib/options.nix
|
||||
- lib/tests/modules.sh
|
||||
- lib/tests/modules/**
|
||||
|
||||
"6.topic: nixos":
|
||||
- nixos/**/*
|
||||
- pkgs/os-specific/linux/nixos-rebuild/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/**/*
|
||||
- pkgs/os-specific/linux/nixos-rebuild/**/*
|
||||
|
||||
"6.topic: nim":
|
||||
- doc/languages-frameworks/nim.section.md
|
||||
- pkgs/development/compilers/nim/*
|
||||
- pkgs/development/nim-packages/**/*
|
||||
- pkgs/top-level/nim-packages.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/nim.section.md
|
||||
- pkgs/development/compilers/nim/*
|
||||
- pkgs/development/nim-packages/**/*
|
||||
- pkgs/top-level/nim-packages.nix
|
||||
|
||||
"6.topic: nodejs":
|
||||
- doc/languages-frameworks/javascript.section.md
|
||||
- pkgs/build-support/node/**/*
|
||||
- pkgs/development/node-packages/**/*
|
||||
- pkgs/development/tools/yarn/*
|
||||
- pkgs/development/tools/yarn2nix-moretea/**/*
|
||||
- pkgs/development/web/nodejs/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/javascript.section.md
|
||||
- pkgs/build-support/node/**/*
|
||||
- pkgs/development/node-packages/**/*
|
||||
- pkgs/development/tools/yarn/*
|
||||
- pkgs/development/tools/yarn2nix-moretea/**/*
|
||||
- pkgs/development/web/nodejs/*
|
||||
|
||||
"6.topic: ocaml":
|
||||
- doc/languages-frameworks/ocaml.section.md
|
||||
- pkgs/development/compilers/ocaml/**/*
|
||||
- pkgs/development/compilers/reason/**/*
|
||||
- pkgs/development/ocaml-modules/**/*
|
||||
- pkgs/development/tools/ocaml/**/*
|
||||
- pkgs/top-level/ocaml-packages.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/ocaml.section.md
|
||||
- pkgs/development/compilers/ocaml/**/*
|
||||
- pkgs/development/compilers/reason/**/*
|
||||
- pkgs/development/ocaml-modules/**/*
|
||||
- pkgs/development/tools/ocaml/**/*
|
||||
- pkgs/top-level/ocaml-packages.nix
|
||||
|
||||
"6.topic: pantheon":
|
||||
- nixos/modules/services/desktops/pantheon/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/pantheon.nix
|
||||
- nixos/modules/services/x11/display-managers/lightdm-greeters/pantheon.nix
|
||||
- nixos/tests/pantheon.nix
|
||||
- pkgs/desktops/pantheon/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/desktops/pantheon/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/pantheon.nix
|
||||
- nixos/modules/services/x11/display-managers/lightdm-greeters/pantheon.nix
|
||||
- nixos/tests/pantheon.nix
|
||||
- pkgs/desktops/pantheon/**/*
|
||||
|
||||
"6.topic: php":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/php.section.md
|
||||
- pkgs/build-support/php/**/*
|
||||
- pkgs/development/interpreters/php/*
|
||||
- pkgs/development/php-packages/**/*
|
||||
- pkgs/test/php/default.nix
|
||||
- pkgs/top-level/php-packages.nix
|
||||
|
||||
"6.topic: policy discussion":
|
||||
- .github/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/**/*
|
||||
|
||||
"6.topic: printing":
|
||||
- nixos/modules/services/printing/cupsd.nix
|
||||
- pkgs/misc/cups/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/printing/cupsd.nix
|
||||
- pkgs/misc/cups/**/*
|
||||
|
||||
"6.topic: python":
|
||||
- doc/languages-frameworks/python.section.md
|
||||
- pkgs/development/interpreters/python/**/*
|
||||
- pkgs/development/python-modules/**/*
|
||||
- pkgs/top-level/python-packages.nix
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/python.section.md
|
||||
- pkgs/development/interpreters/python/**/*
|
||||
- pkgs/development/python-modules/**/*
|
||||
- pkgs/top-level/python-packages.nix
|
||||
|
||||
"6.topic: qt/kde":
|
||||
- doc/languages-frameworks/qt.section.md
|
||||
- nixos/modules/services/x11/desktop-managers/plasma5.nix
|
||||
- nixos/tests/plasma5.nix
|
||||
- pkgs/applications/kde/**/*
|
||||
- pkgs/desktops/plasma-5/**/*
|
||||
- pkgs/development/libraries/kde-frameworks/**/*
|
||||
- pkgs/development/libraries/qt-5/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/qt.section.md
|
||||
- nixos/modules/services/x11/desktop-managers/plasma5.nix
|
||||
- nixos/tests/plasma5.nix
|
||||
- pkgs/applications/kde/**/*
|
||||
- pkgs/desktops/plasma-5/**/*
|
||||
- pkgs/development/libraries/kde-frameworks/**/*
|
||||
- pkgs/development/libraries/qt-5/**/*
|
||||
|
||||
"6.topic: ruby":
|
||||
- doc/languages-frameworks/ruby.section.md
|
||||
- pkgs/development/interpreters/ruby/**/*
|
||||
- pkgs/development/ruby-modules/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/ruby.section.md
|
||||
- pkgs/development/interpreters/ruby/**/*
|
||||
- pkgs/development/ruby-modules/**/*
|
||||
|
||||
"6.topic: rust":
|
||||
- doc/languages-frameworks/rust.section.md
|
||||
- pkgs/build-support/rust/**/*
|
||||
- pkgs/development/compilers/rust/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/rust.section.md
|
||||
- pkgs/build-support/rust/**/*
|
||||
- pkgs/development/compilers/rust/**/*
|
||||
|
||||
"6.topic: stdenv":
|
||||
- pkgs/stdenv/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/stdenv/**/*
|
||||
|
||||
"6.topic: steam":
|
||||
- pkgs/games/steam/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/games/steam/**/*
|
||||
|
||||
"6.topic: systemd":
|
||||
- pkgs/os-specific/linux/systemd/**/*
|
||||
- nixos/modules/system/boot/systemd*/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/os-specific/linux/systemd/**/*
|
||||
- nixos/modules/system/boot/systemd*/**/*
|
||||
|
||||
"6.topic: TeX":
|
||||
- doc/languages-frameworks/texlive.section.md
|
||||
- pkgs/test/texlive/**
|
||||
- pkgs/tools/typesetting/tex/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/texlive.section.md
|
||||
- pkgs/test/texlive/**
|
||||
- pkgs/tools/typesetting/tex/**/*
|
||||
|
||||
"6.topic: testing":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
# NOTE: Let's keep the scope limited to test frameworks that are
|
||||
# *developed in this repo*;
|
||||
# - not individual tests
|
||||
# - not packages for test frameworks
|
||||
- nixos/lib/testing/**
|
||||
- nixos/lib/test-driver/**
|
||||
- nixos/tests/nixos-test-driver/**
|
||||
- nixos/lib/testing-python.nix # legacy
|
||||
- nixos/tests/make-test-python.nix # legacy
|
||||
# lib/debug.nix has a test framework (runTests) but it's not the main focus
|
||||
|
||||
"6.topic: vim":
|
||||
- doc/languages-frameworks/vim.section.md
|
||||
- pkgs/applications/editors/vim/**/*
|
||||
- pkgs/applications/editors/vim/plugins/**/*
|
||||
- nixos/modules/programs/neovim.nix
|
||||
- pkgs/applications/editors/neovim/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/vim.section.md
|
||||
- pkgs/applications/editors/vim/**/*
|
||||
- pkgs/applications/editors/vim/plugins/**/*
|
||||
- nixos/modules/programs/neovim.nix
|
||||
- pkgs/applications/editors/neovim/**/*
|
||||
|
||||
"6.topic: vscode":
|
||||
- pkgs/applications/editors/vscode/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/editors/vscode/**/*
|
||||
|
||||
"6.topic: xfce":
|
||||
- nixos/doc/manual/configuration/xfce.xml
|
||||
- nixos/modules/services/x11/desktop-managers/xfce.nix
|
||||
- nixos/tests/xfce.nix
|
||||
- pkgs/desktops/xfce/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/doc/manual/configuration/xfce.xml
|
||||
- nixos/modules/services/x11/desktop-managers/xfce.nix
|
||||
- nixos/tests/xfce.nix
|
||||
- pkgs/desktops/xfce/**/*
|
||||
|
||||
"6.topic: zig":
|
||||
- pkgs/development/compilers/zig/**/*
|
||||
- doc/hooks/zig.section.md
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/compilers/zig/**/*
|
||||
- doc/hooks/zig.section.md
|
||||
|
||||
"8.has: changelog":
|
||||
- nixos/doc/manual/release-notes/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/doc/manual/release-notes/**/*
|
||||
|
||||
"8.has: documentation":
|
||||
- doc/**/*
|
||||
- nixos/doc/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/**/*
|
||||
- nixos/doc/**/*
|
||||
|
||||
"8.has: module (update)":
|
||||
- nixos/modules/**/*
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/**/*
|
||||
|
|
|
@ -20,11 +20,11 @@ jobs:
|
|||
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Create backport PRs
|
||||
uses: korthout/backport-action@08bafb375e6e9a9a2b53a744b987e5d81a133191 # v2.1.1
|
||||
uses: korthout/backport-action@e8161d6a0dbfa2651b7daa76cbb75bc7c925bbf3 # v2.4.1
|
||||
with:
|
||||
# Config README: https://github.com/korthout/backport-action#backport-action
|
||||
copy_labels_pattern: 'severity:\ssecurity'
|
||||
|
|
|
@ -18,8 +18,8 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
# we don't limit this action to only NixOS repo since the checks are cheap and useful developer feedback
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
- uses: cachix/cachix-action@18cf96c7c98e048e10a83abd92116114cd8504be # v14
|
||||
with:
|
||||
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
|
||||
|
|
|
@ -87,7 +87,7 @@ jobs:
|
|||
exit 1
|
||||
fi
|
||||
echo "mergedSha=$mergedSha" >> "$GITHUB_ENV"
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
with:
|
||||
# pull_request_target checks out the base branch by default
|
||||
ref: ${{ env.mergedSha }}
|
||||
|
@ -98,7 +98,7 @@ jobs:
|
|||
base=$(mktemp -d)
|
||||
git worktree add "$base" "$(git rev-parse HEAD^1)"
|
||||
echo "base=$base" >> "$GITHUB_ENV"
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
- name: Fetching the pinned tool
|
||||
# Update the pinned version using pkgs/test/nixpkgs-check-by-name/scripts/update-pinned-tool.sh
|
||||
run: |
|
||||
|
|
|
@ -12,11 +12,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'NixOS'
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
with:
|
||||
# pull_request_target checks out the base branch by default
|
||||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
with:
|
||||
# explicitly enable sandbox
|
||||
extra_nix_config: sandbox = true
|
||||
|
|
|
@ -24,11 +24,11 @@ jobs:
|
|||
- name: print list of changed files
|
||||
run: |
|
||||
cat "$HOME/changed_files"
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
with:
|
||||
# pull_request_target checks out the base branch by default
|
||||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
with:
|
||||
# nixpkgs commit is pinned so that it doesn't break
|
||||
# editorconfig-checker 2.4.0
|
||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
if: "github.repository_owner == 'NixOS' && !contains(github.event.pull_request.title, '[skip treewide]')"
|
||||
steps:
|
||||
- uses: actions/labeler@ac9175f8a1f3625fd0d4fb234536d26811351594 # v4.3.0
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sync-labels: true
|
||||
|
|
|
@ -14,11 +14,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'NixOS'
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
with:
|
||||
# pull_request_target checks out the base branch by default
|
||||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
with:
|
||||
# explicitly enable sandbox
|
||||
extra_nix_config: sandbox = true
|
||||
|
|
|
@ -15,11 +15,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'NixOS'
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
with:
|
||||
# pull_request_target checks out the base branch by default
|
||||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
with:
|
||||
# explicitly enable sandbox
|
||||
extra_nix_config: sandbox = true
|
||||
|
|
|
@ -24,12 +24,12 @@ jobs:
|
|||
if [[ -s "$HOME/changed_files" ]]; then
|
||||
echo "CHANGED_FILES=$HOME/changed_files" > "$GITHUB_ENV"
|
||||
fi
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
with:
|
||||
# pull_request_target checks out the base branch by default
|
||||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
if: ${{ env.CHANGED_FILES && env.CHANGED_FILES != '' }}
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixpkgs-unstable
|
||||
- name: Parse all changed or added nix files
|
||||
|
|
|
@ -41,7 +41,7 @@ jobs:
|
|||
into: staging-23.11
|
||||
name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
|
||||
- name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
|
||||
uses: devmasx/merge-branch@854d3ac71ed1e9deb668e0074781b81fdd6e771f # 1.4.0
|
||||
|
|
|
@ -39,7 +39,7 @@ jobs:
|
|||
into: staging
|
||||
name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
|
||||
- name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
|
||||
uses: devmasx/merge-branch@854d3ac71ed1e9deb668e0074781b81fdd6e771f # 1.4.0
|
||||
|
|
|
@ -16,8 +16,8 @@ jobs:
|
|||
if: github.repository_owner == 'NixOS' && github.ref == 'refs/heads/master' # ensure workflow_dispatch only runs on master
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
- uses: cachix/install-nix-action@6004951b182f8860210c8d6f0d808ec5b1a33d28 # v25
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
|
||||
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixpkgs-unstable
|
||||
- name: setup
|
||||
|
@ -46,7 +46,7 @@ jobs:
|
|||
run: |
|
||||
git clean -f
|
||||
- name: create PR
|
||||
uses: peter-evans/create-pull-request@153407881ec5c347639a548ade7d8ad1d6740e38 # v5.0.2
|
||||
uses: peter-evans/create-pull-request@70a41aba780001da0a30141984ae2a0c95d8704e # v6.0.2
|
||||
with:
|
||||
body: |
|
||||
Automatic update by [update-terraform-providers](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/update-terraform-providers.yml) action.
|
||||
|
|
|
@ -11,12 +11,12 @@ outputs/
|
|||
result-*
|
||||
result
|
||||
repl-result-*
|
||||
tags
|
||||
!pkgs/development/python-modules/result
|
||||
/doc/NEWS.html
|
||||
/doc/NEWS.txt
|
||||
/doc/manual.html
|
||||
/doc/manual.pdf
|
||||
/result
|
||||
/source/
|
||||
.version-suffix
|
||||
|
||||
|
|
|
@ -129,19 +129,17 @@ When a PR is created, it will be pre-populated with some checkboxes detailed bel
|
|||
|
||||
#### Tested using sandboxing
|
||||
|
||||
When sandbox builds are enabled, Nix will setup an isolated environment for each build process. It is used to remove further hidden dependencies set by the build environment to improve reproducibility. This includes access to the network during the build outside of `fetch*` functions and files outside the Nix store. Depending on the operating system access to other resources are blocked as well (ex. inter process communication is isolated on Linux); see [sandbox](https://nixos.org/manual/nix/stable/command-ref/conf-file#conf-sandbox) in the Nix manual for details.
|
||||
When sandbox builds are enabled, Nix will set up an isolated environment for each build process.
|
||||
It is used to remove further hidden dependencies set by the build environment to improve reproducibility.
|
||||
This includes access to the network during the build outside of `fetch*` functions and files outside the Nix store.
|
||||
Depending on the operating system, access to other resources is blocked as well (e.g., inter-process communication is isolated on Linux); see [sandbox](https://nixos.org/manual/nix/stable/command-ref/conf-file#conf-sandbox) in the Nix manual for details.
|
||||
|
||||
Sandboxing is not enabled by default in Nix due to a small performance hit on each build. In pull requests for [nixpkgs](https://github.com/NixOS/nixpkgs/) people are asked to test builds with sandboxing enabled (see `Tested using sandboxing` in the pull request template) because in [Hydra](https://nixos.org/hydra/) sandboxing is also used.
|
||||
In pull requests for [nixpkgs](https://github.com/NixOS/nixpkgs/) people are asked to test builds with sandboxing enabled (see `Tested using sandboxing` in the pull request template) because in [Hydra](https://nixos.org/hydra/) sandboxing is also used.
|
||||
|
||||
Depending if you use NixOS or other platforms you can use one of the following methods to enable sandboxing **before** building the package:
|
||||
If you are on Linux, sandboxing is enabled by default.
|
||||
On other platforms, sandboxing is disabled by default due to a small performance hit on each build.
|
||||
|
||||
- **Globally enable sandboxing on NixOS**: add the following to `configuration.nix`
|
||||
|
||||
```nix
|
||||
nix.settings.sandbox = true;
|
||||
```
|
||||
|
||||
- **Globally enable sandboxing on non-NixOS platforms**: add the following to: `/etc/nix/nix.conf`
|
||||
Please enable sandboxing **before** building the package by adding the following to: `/etc/nix/nix.conf`:
|
||||
|
||||
```ini
|
||||
sandbox = true
|
||||
|
@ -441,14 +439,14 @@ gitGraph
|
|||
|
||||
Here's an overview of the different branches:
|
||||
|
||||
| branch | `master` | `staging` | `staging-next` |
|
||||
| branch | `master` | `staging-next` | `staging` |
|
||||
| --- | --- | --- | --- |
|
||||
| Used for development | ✔️ | ✔️ | ❌ |
|
||||
| Built by Hydra | ✔️ | ❌ | ✔️ |
|
||||
| [Mass rebuilds][mass-rebuild] | ❌ | ✔️ | ⚠️ Only to fix Hydra builds |
|
||||
| Critical security fixes | ✔️ for non-mass-rebuilds | ❌ | ✔️ for mass-rebuilds |
|
||||
| Automatically merged into | `staging-next` | - | `staging` |
|
||||
| Manually merged into | - | `staging-next` | `master` |
|
||||
| Used for development | ✔️ | ❌ | ✔️ |
|
||||
| Built by Hydra | ✔️ | ✔️ | ❌ |
|
||||
| [Mass rebuilds][mass-rebuild] | ❌ | ⚠️ Only to fix Hydra builds | ✔️ |
|
||||
| Critical security fixes | ✔️ for non-mass-rebuilds | ✔️ for mass-rebuilds | ❌ |
|
||||
| Automatically merged into | `staging-next` | `staging` | - |
|
||||
| Manually merged into | - | `master` | `staging-next` |
|
||||
|
||||
The staging workflow is used for all main branches, `master` and `release-YY.MM`, with corresponding names:
|
||||
- `master`/`release-YY.MM`
|
||||
|
|
11
README.md
11
README.md
|
@ -1,9 +1,10 @@
|
|||
<p align="center">
|
||||
<a href="https://nixos.org#gh-light-mode-only">
|
||||
<img src="https://raw.githubusercontent.com/NixOS/nixos-homepage/master/logo/nixos-hires.png" width="500px" alt="NixOS logo"/>
|
||||
</a>
|
||||
<a href="https://nixos.org#gh-dark-mode-only">
|
||||
<img src="https://raw.githubusercontent.com/NixOS/nixos-artwork/master/logo/nixos-white.png" width="500px" alt="NixOS logo"/>
|
||||
<a href="https://nixos.org">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/NixOS/nixos-homepage/master/logo/nixos-hires.png">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/NixOS/nixos-artwork/master/logo/nixos-white.png">
|
||||
<img src="https://raw.githubusercontent.com/NixOS/nixos-homepage/master/logo/nixos-hires.png" width="500px" alt="NixOS logo">
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
document.addEventListener('DOMContentLoaded', function(event) {
|
||||
anchors.add('h1[id]:not(div.note h1, div.warning h1, div.tip h1, div.caution h1, div.important h1), h2[id]:not(div.note h2, div.warning h2, div.tip h2, div.caution h2, div.important h2), h3[id]:not(div.note h3, div.warning h3, div.tip h3, div.caution h3, div.important h3), h4[id]:not(div.note h4, div.warning h4, div.tip h4, div.caution h4, div.important h4), h5[id]:not(div.note h5, div.warning h5, div.tip h5, div.caution h5, div.important h5), h6[id]:not(div.note h6, div.warning h6, div.tip h6, div.caution h6, div.important h6)');
|
||||
});
|
File diff suppressed because one or more lines are too long
|
@ -80,6 +80,10 @@ stdenv.mkDerivation {
|
|||
|
||||
The main difference between `fetchurl` and `fetchzip` is in how they store the contents. `fetchurl` will store the unaltered contents of the URL within the Nix store. `fetchzip` on the other hand, will decompress the archive for you, making files and directories directly accessible in the future. `fetchzip` can only be used with archives. Despite the name, `fetchzip` is not limited to .zip files and can also be used with any tarball.
|
||||
|
||||
Additional parameters to `fetchurl`:
|
||||
- `downloadToTemp`: Defaults to `false`. If `true`, saves the source to `$downloadedFile`, to be used in conjunction with `postFetch`
|
||||
- `postFetch`: Shell code executed after the file has been fetched successfully. Use it for postprocessing, to check or transform the file.
|
||||
|
||||
## `fetchpatch` {#fetchpatch}
|
||||
|
||||
`fetchpatch` works very similarly to `fetchurl` with the same arguments expected. It expects patch files as a source and performs normalization on them before computing the checksum. For example, it will remove comments or other unstable parts that are sometimes added by version control systems and can change over time.
|
||||
|
@ -258,6 +262,10 @@ or
|
|||
|
||||
***
|
||||
```
|
||||
|
||||
This function should only be used by non-redistributable software with an unfree license that we need to require the user to download manually.
|
||||
It produces packages that cannot be built automatically.
|
||||
|
||||
## `fetchtorrent` {#fetchtorrent}
|
||||
|
||||
`fetchtorrent` expects two arguments. `url` which can either be a Magnet URI (Magnet Link) such as `magnet:?xt=urn:btih:dd8255ecdc7ca55fb0bbf81323d87062db1f6d1c` or an HTTP URL pointing to a `.torrent` file. It can also take a `config` argument which will craft a `settings.json` configuration file and give it to `transmission`, the underlying program that is performing the fetch. The available config options for `transmission` can be found [here](https://github.com/transmission/transmission/blob/main/docs/Editing-Configuration-Files.md#options)
|
||||
|
|
|
@ -6,7 +6,6 @@ This chapter describes tools for creating various types of images.
|
|||
images/appimagetools.section.md
|
||||
images/dockertools.section.md
|
||||
images/ocitools.section.md
|
||||
images/snaptools.section.md
|
||||
images/portableservice.section.md
|
||||
images/makediskimage.section.md
|
||||
images/binarycache.section.md
|
||||
|
|
|
@ -6,7 +6,7 @@ Docker itself is not used to perform any of the operations done by these functio
|
|||
## buildImage {#ssec-pkgs-dockerTools-buildImage}
|
||||
|
||||
This function builds a Docker-compatible repository tarball containing a single image.
|
||||
As such, the result is suitable for being loaded in Docker with `docker load` (see [](#ex-dockerTools-buildImage) for how to do this).
|
||||
As such, the result is suitable for being loaded in Docker with `docker image load` (see [](#ex-dockerTools-buildImage) for how to do this).
|
||||
|
||||
This function will create a single layer for all files (and dependencies) that are specified in its argument.
|
||||
Only new dependencies that are not already in the existing layers will be copied.
|
||||
|
@ -43,7 +43,7 @@ Similarly, if you encounter errors similar to `Error_Protocol ("certificate has
|
|||
`fromImage` (Path or Null; _optional_)
|
||||
|
||||
: The repository tarball of an image to be used as the base for the generated image.
|
||||
It must be a valid Docker image, such as one exported by `docker save`, or another image built with the `dockerTools` utility functions.
|
||||
It must be a valid Docker image, such as one exported by `docker image save`, or another image built with the `dockerTools` utility functions.
|
||||
This can be seen as an equivalent of `FROM fromImage` in a `Dockerfile`.
|
||||
A value of `null` can be seen as an equivalent of `FROM scratch`.
|
||||
|
||||
|
@ -123,7 +123,7 @@ Similarly, if you encounter errors similar to `Error_Protocol ("certificate has
|
|||
|
||||
_Default value:_ `""`.
|
||||
|
||||
`config` (Attribute Set; _optional_)
|
||||
`config` (Attribute Set or Null; _optional_)
|
||||
|
||||
: Used to specify the configuration of the containers that will be started off the generated image.
|
||||
Must be an attribute set, with each attribute as listed in the [Docker Image Specification v1.3.0](https://github.com/moby/moby/blob/46f7ab808b9504d735d600e259ca0723f76fb164/image/spec/spec.md#image-json-field-descriptions).
|
||||
|
@ -178,6 +178,13 @@ Similarly, if you encounter errors similar to `Error_Protocol ("certificate has
|
|||
|
||||
_Default value:_ 0.
|
||||
|
||||
`compressor` (String; _optional_)
|
||||
|
||||
: Selects the algorithm used to compress the image.
|
||||
|
||||
_Default value:_ `"gz"`.\
|
||||
_Possible values:_ `"none"`, `"gz"`, `"zstd"`.
|
||||
|
||||
`contents` **DEPRECATED**
|
||||
|
||||
: This attribute is deprecated, and users are encouraged to use `copyToRoot` instead.
|
||||
|
@ -247,7 +254,7 @@ Cooking the image...
|
|||
Finished.
|
||||
/nix/store/p4dsg62inh9d2ksy3c7bv58xa851dasr-docker-image-redis.tar.gz
|
||||
|
||||
$ docker load -i /nix/store/p4dsg62inh9d2ksy3c7bv58xa851dasr-docker-image-redis.tar.gz
|
||||
$ docker image load -i /nix/store/p4dsg62inh9d2ksy3c7bv58xa851dasr-docker-image-redis.tar.gz
|
||||
(some output removed for clarity)
|
||||
Loaded image: redis:latest
|
||||
```
|
||||
|
@ -345,8 +352,8 @@ dockerTools.buildImage {
|
|||
|
||||
After importing the generated repository tarball with Docker, its CLI will display a reasonable date and sort the images as expected:
|
||||
|
||||
```ShellSession
|
||||
$ docker images
|
||||
```shell
|
||||
$ docker image ls
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
hello latest de2bf4786de6 About a minute ago 25.2MB
|
||||
```
|
||||
|
@ -364,7 +371,7 @@ Despite the similar name, [`buildImage`](#ssec-pkgs-dockerTools-buildImage) work
|
|||
Even though some of the arguments may seem related, they cannot be interchanged.
|
||||
:::
|
||||
|
||||
You can use this function to load an image in Docker with `docker load`.
|
||||
You can load the result of this function in Docker with `docker image load`.
|
||||
See [](#ex-dockerTools-buildLayeredImage-hello) to see how to do that.
|
||||
|
||||
### Examples {#ssec-pkgs-dockerTools-buildLayeredImage-examples}
|
||||
|
@ -404,7 +411,7 @@ Adding manifests...
|
|||
Done.
|
||||
/nix/store/hxcz7snvw7f8rzhbh6mv8jq39d992905-hello.tar.gz
|
||||
|
||||
$ docker load -i /nix/store/hxcz7snvw7f8rzhbh6mv8jq39d992905-hello.tar.gz
|
||||
$ docker image load -i /nix/store/hxcz7snvw7f8rzhbh6mv8jq39d992905-hello.tar.gz
|
||||
(some output removed for clarity)
|
||||
Loaded image: hello:latest
|
||||
```
|
||||
|
@ -415,7 +422,7 @@ Loaded image: hello:latest
|
|||
`streamLayeredImage` builds a **script** which, when run, will stream to stdout a Docker-compatible repository tarball containing a single image, using multiple layers to improve sharing between images.
|
||||
This means that `streamLayeredImage` does not output an image into the Nix store, but only a script that builds the image, saving on IO and disk/cache space, particularly with large images.
|
||||
|
||||
You can use this function to load an image in Docker with `docker load`.
|
||||
You can load the result of this function in Docker with `docker image load`.
|
||||
See [](#ex-dockerTools-streamLayeredImage-hello) to see how to do that.
|
||||
|
||||
For this function, you specify a [store path](https://nixos.org/manual/nix/stable/store/store-path) or a list of store paths to be added to the image, and the functions will automatically include any dependencies of those paths in the image.
|
||||
|
@ -440,7 +447,7 @@ This allows the function to produce reproducible images.
|
|||
|
||||
: The name of the generated image.
|
||||
|
||||
`tag` (String; _optional_)
|
||||
`tag` (String or Null; _optional_)
|
||||
|
||||
: Tag of the generated image.
|
||||
If `null`, the hash of the nix derivation will be used as the tag.
|
||||
|
@ -450,7 +457,7 @@ This allows the function to produce reproducible images.
|
|||
`fromImage`(Path or Null; _optional_)
|
||||
|
||||
: The repository tarball of an image to be used as the base for the generated image.
|
||||
It must be a valid Docker image, such as one exported by `docker save`, or another image built with the `dockerTools` utility functions.
|
||||
It must be a valid Docker image, such as one exported by `docker image save`, or another image built with the `dockerTools` utility functions.
|
||||
This can be seen as an equivalent of `FROM fromImage` in a `Dockerfile`.
|
||||
A value of `null` can be seen as an equivalent of `FROM scratch`.
|
||||
|
||||
|
@ -470,7 +477,7 @@ This allows the function to produce reproducible images.
|
|||
|
||||
_Default value:_ `[]`
|
||||
|
||||
`config` (Attribute Set; _optional_) []{#dockerTools-buildLayeredImage-arg-config}
|
||||
`config` (Attribute Set or Null; _optional_) []{#dockerTools-buildLayeredImage-arg-config}
|
||||
|
||||
: Used to specify the configuration of the containers that will be started off the generated image.
|
||||
Must be an attribute set, with each attribute as listed in the [Docker Image Specification v1.3.0](https://github.com/moby/moby/blob/46f7ab808b9504d735d600e259ca0723f76fb164/image/spec/spec.md#image-json-field-descriptions).
|
||||
|
@ -500,6 +507,16 @@ This allows the function to produce reproducible images.
|
|||
|
||||
_Default value:_ `"1970-01-01T00:00:01Z"`.
|
||||
|
||||
`uid` (Number; _optional_) []{#dockerTools-buildLayeredImage-arg-uid}
|
||||
`gid` (Number; _optional_) []{#dockerTools-buildLayeredImage-arg-gid}
|
||||
`uname` (String; _optional_) []{#dockerTools-buildLayeredImage-arg-uname}
|
||||
`gname` (String; _optional_) []{#dockerTools-buildLayeredImage-arg-gname}
|
||||
|
||||
: Credentials for Nix store ownership.
|
||||
Can be overridden to e.g. `1000` / `1000` / `"user"` / `"user"` to enable building a container where Nix can be used as an unprivileged user in single-user mode.
|
||||
|
||||
_Default value:_ `0` / `0` / `"root"` / `"root"`
|
||||
|
||||
`maxLayers` (Number; _optional_) []{#dockerTools-buildLayeredImage-arg-maxLayers}
|
||||
|
||||
: The maximum number of layers that will be used by the generated image.
|
||||
|
@ -594,7 +611,7 @@ dockerTools.streamLayeredImage {
|
|||
```
|
||||
|
||||
The result of building this package is a script.
|
||||
Running this script and piping it into `docker load` gives you the same image that was built in [](#ex-dockerTools-buildLayeredImage-hello).
|
||||
Running this script and piping it into `docker image load` gives you the same image that was built in [](#ex-dockerTools-buildLayeredImage-hello).
|
||||
Note that in this case, the image is never added to the Nix store, but instead streamed directly into Docker.
|
||||
|
||||
```shell
|
||||
|
@ -602,7 +619,7 @@ $ nix-build
|
|||
(output removed for clarity)
|
||||
/nix/store/wsz2xl8ckxnlb769irvq6jv1280dfvxd-stream-hello
|
||||
|
||||
$ /nix/store/wsz2xl8ckxnlb769irvq6jv1280dfvxd-stream-hello | docker load
|
||||
$ /nix/store/wsz2xl8ckxnlb769irvq6jv1280dfvxd-stream-hello | docker image load
|
||||
No 'fromImage' provided
|
||||
Creating layer 1 from paths: ['/nix/store/i93s7xxblavsacpy82zdbn4kplsyq48l-libunistring-1.1']
|
||||
Creating layer 2 from paths: ['/nix/store/ji01n9vinnj22nbrb86nx8a1ssgpilx8-libidn2-2.3.4']
|
||||
|
@ -718,7 +735,7 @@ dockerTools.streamLayeredImage {
|
|||
[]{#ssec-pkgs-dockerTools-fetchFromRegistry}
|
||||
## pullImage {#ssec-pkgs-dockerTools-pullImage}
|
||||
|
||||
This function is similar to the `docker pull` command, which means it can be used to pull a Docker image from a registry that implements the [Docker Registry HTTP API V2](https://distribution.github.io/distribution/spec/api/).
|
||||
This function is similar to the `docker image pull` command, which means it can be used to pull a Docker image from a registry that implements the [Docker Registry HTTP API V2](https://distribution.github.io/distribution/spec/api/).
|
||||
By default, the `docker.io` registry is used.
|
||||
|
||||
The image will be downloaded as an uncompressed Docker-compatible repository tarball, which is suitable for use with other `dockerTools` functions such as [`buildImage`](#ssec-pkgs-dockerTools-buildImage), [`buildLayeredImage`](#ssec-pkgs-dockerTools-buildLayeredImage), and [`streamLayeredImage`](#ssec-pkgs-dockerTools-streamLayeredImage).
|
||||
|
@ -1105,7 +1122,7 @@ This is currently implemented by linking to the `env` binary from the `coreutils
|
|||
### binSh {#sssec-pkgs-dockerTools-helpers-binSh}
|
||||
|
||||
This provides a `/bin/sh` link to the `bash` binary from the `bashInteractive` package.
|
||||
Because of this, it supports cases such as running a command interactively in a container (for example by running `docker run -it <image_name>`).
|
||||
Because of this, it supports cases such as running a command interactively in a container (for example by running `docker container run -it <image_name>`).
|
||||
|
||||
### caCertificates {#sssec-pkgs-dockerTools-helpers-caCertificates}
|
||||
|
||||
|
@ -1169,9 +1186,9 @@ This is made possible by `binSh`.
|
|||
$ nix-build
|
||||
(some output removed for clarity)
|
||||
/nix/store/2p0i3i04cgjlk71hsn7ll4kxaxxiv4qg-docker-image-env-helpers.tar.gz
|
||||
$ docker load -i /nix/store/2p0i3i04cgjlk71hsn7ll4kxaxxiv4qg-docker-image-env-helpers.tar.gz
|
||||
$ docker image load -i /nix/store/2p0i3i04cgjlk71hsn7ll4kxaxxiv4qg-docker-image-env-helpers.tar.gz
|
||||
(output removed for clarity)
|
||||
$ docker run --rm -it env-helpers:latest /bin/sh
|
||||
$ docker container run --rm -it env-helpers:latest /bin/sh
|
||||
sh-5.2# help
|
||||
GNU bash, version 5.2.21(1)-release (x86_64-pc-linux-gnu)
|
||||
(rest of output removed for clarity)
|
||||
|
@ -1209,9 +1226,9 @@ This is made possible by `binSh`.
|
|||
$ nix-build
|
||||
(some output removed for clarity)
|
||||
/nix/store/rpf47f4z5b9qr4db4ach9yr4b85hjhxq-env-helpers.tar.gz
|
||||
$ docker load -i /nix/store/rpf47f4z5b9qr4db4ach9yr4b85hjhxq-env-helpers.tar.gz
|
||||
$ docker image load -i /nix/store/rpf47f4z5b9qr4db4ach9yr4b85hjhxq-env-helpers.tar.gz
|
||||
(output removed for clarity)
|
||||
$ docker run --rm -it env-helpers:latest /bin/sh
|
||||
$ docker container run --rm -it env-helpers:latest /bin/sh
|
||||
sh-5.2# help
|
||||
GNU bash, version 5.2.21(1)-release (x86_64-pc-linux-gnu)
|
||||
(rest of output removed for clarity)
|
||||
|
@ -1315,7 +1332,7 @@ $ nix-build
|
|||
(some output removed for clarity)
|
||||
/nix/store/pkj1sgzaz31wl0pbvbg3yp5b3kxndqms-hello-2.12.1-env.tar.gz
|
||||
|
||||
$ docker load -i /nix/store/pkj1sgzaz31wl0pbvbg3yp5b3kxndqms-hello-2.12.1-env.tar.gz
|
||||
$ docker image load -i /nix/store/pkj1sgzaz31wl0pbvbg3yp5b3kxndqms-hello-2.12.1-env.tar.gz
|
||||
(some output removed for clarity)
|
||||
Loaded image: hello-2.12.1-env:latest
|
||||
```
|
||||
|
@ -1323,7 +1340,7 @@ Loaded image: hello-2.12.1-env:latest
|
|||
After starting an interactive container, the derivation can be built by running `buildDerivation`, and the output can be executed as expected:
|
||||
|
||||
```shell
|
||||
$ docker run -it hello-2.12.1-env:latest
|
||||
$ docker container run -it hello-2.12.1-env:latest
|
||||
[nix-shell:~]$ buildDerivation
|
||||
Running phase: unpackPhase
|
||||
unpacking source archive /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
|
||||
|
@ -1443,14 +1460,14 @@ dockerTools.streamNixShellImage {
|
|||
```
|
||||
|
||||
The result of building this package is a script.
|
||||
Running this script and piping it into `docker load` gives you the same image that was built in [](#ex-dockerTools-buildNixShellImage-hello).
|
||||
Running this script and piping it into `docker image load` gives you the same image that was built in [](#ex-dockerTools-buildNixShellImage-hello).
|
||||
|
||||
```shell
|
||||
$ nix-build
|
||||
(some output removed for clarity)
|
||||
/nix/store/8vhznpz2frqazxnd8pgdvf38jscdypax-stream-hello-2.12.1-env
|
||||
|
||||
$ /nix/store/8vhznpz2frqazxnd8pgdvf38jscdypax-stream-hello-2.12.1-env | docker load
|
||||
$ /nix/store/8vhznpz2frqazxnd8pgdvf38jscdypax-stream-hello-2.12.1-env | docker image load
|
||||
(some output removed for clarity)
|
||||
Loaded image: hello-2.12.1-env:latest
|
||||
```
|
||||
|
@ -1458,7 +1475,7 @@ Loaded image: hello-2.12.1-env:latest
|
|||
After starting an interactive container, the derivation can be built by running `buildDerivation`, and the output can be executed as expected:
|
||||
|
||||
```shell
|
||||
$ docker run -it hello-2.12.1-env:latest
|
||||
$ docker container run -it hello-2.12.1-env:latest
|
||||
[nix-shell:~]$ buildDerivation
|
||||
Running phase: unpackPhase
|
||||
unpacking source archive /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
|
||||
|
@ -1497,14 +1514,14 @@ dockerTools.streamNixShellImage {
|
|||
}
|
||||
```
|
||||
|
||||
The result of building this package is a script which can be run and piped into `docker load` to load the generated image.
|
||||
The result of building this package is a script which can be run and piped into `docker image load` to load the generated image.
|
||||
|
||||
```shell
|
||||
$ nix-build
|
||||
(some output removed for clarity)
|
||||
/nix/store/h5abh0vljgzg381lna922gqknx6yc0v7-stream-hello-2.12.1-env
|
||||
|
||||
$ /nix/store/h5abh0vljgzg381lna922gqknx6yc0v7-stream-hello-2.12.1-env | docker load
|
||||
$ /nix/store/h5abh0vljgzg381lna922gqknx6yc0v7-stream-hello-2.12.1-env | docker image load
|
||||
(some output removed for clarity)
|
||||
Loaded image: hello-2.12.1-env:latest
|
||||
```
|
||||
|
@ -1512,7 +1529,7 @@ Loaded image: hello-2.12.1-env:latest
|
|||
After starting an interactive container, we can verify the extra package is available by running `cowsay`:
|
||||
|
||||
```shell
|
||||
$ docker run -it hello-2.12.1-env:latest
|
||||
$ docker container run -it hello-2.12.1-env:latest
|
||||
[nix-shell:~]$ cowsay "Hello, world!"
|
||||
_______________
|
||||
< Hello, world! >
|
||||
|
@ -1546,14 +1563,14 @@ dockerTools.streamNixShellImage {
|
|||
}
|
||||
```
|
||||
|
||||
The result of building this package is a script which can be run and piped into `docker load` to load the generated image.
|
||||
The result of building this package is a script which can be run and piped into `docker image load` to load the generated image.
|
||||
|
||||
```shell
|
||||
$ nix-build
|
||||
(some output removed for clarity)
|
||||
/nix/store/iz4dhdvgzazl5vrgyz719iwjzjy6xlx1-stream-hello-2.12.1-env
|
||||
|
||||
$ /nix/store/iz4dhdvgzazl5vrgyz719iwjzjy6xlx1-stream-hello-2.12.1-env | docker load
|
||||
$ /nix/store/iz4dhdvgzazl5vrgyz719iwjzjy6xlx1-stream-hello-2.12.1-env | docker image load
|
||||
(some output removed for clarity)
|
||||
Loaded image: hello-2.12.1-env:latest
|
||||
```
|
||||
|
@ -1561,7 +1578,7 @@ Loaded image: hello-2.12.1-env:latest
|
|||
After starting an interactive container, we can see the result of the `shellHook`:
|
||||
|
||||
```shell
|
||||
$ docker run -it hello-2.12.1-env:latest
|
||||
$ docker container run -it hello-2.12.1-env:latest
|
||||
Hello, world!
|
||||
|
||||
[nix-shell:~]$
|
||||
|
|
|
@ -1,37 +1,104 @@
|
|||
# pkgs.ociTools {#sec-pkgs-ociTools}
|
||||
|
||||
`pkgs.ociTools` is a set of functions for creating containers according to the [OCI container specification v1.0.0](https://github.com/opencontainers/runtime-spec). Beyond that, it makes no assumptions about the container runner you choose to use to run the created container.
|
||||
`pkgs.ociTools` is a set of functions for creating runtime container bundles according to the [OCI runtime specification v1.0.0](https://github.com/opencontainers/runtime-spec/blob/v1.0.0/spec.md).
|
||||
It makes no assumptions about the container runner you choose to use to run the created container.
|
||||
|
||||
The set of functions in `pkgs.ociTools` currently does not handle the [OCI image specification](https://github.com/opencontainers/image-spec).
|
||||
|
||||
At a high-level an OCI implementation would download an OCI Image then unpack that image into an OCI Runtime filesystem bundle.
|
||||
At this point the OCI Runtime Bundle would be run by an OCI Runtime.
|
||||
`pkgs.ociTools` provides utilities to create OCI Runtime bundles.
|
||||
|
||||
## buildContainer {#ssec-pkgs-ociTools-buildContainer}
|
||||
|
||||
This function creates a simple OCI container that runs a single command inside of it. An OCI container consists of a `config.json` and a rootfs directory. The nix store of the container will contain all referenced dependencies of the given command.
|
||||
This function creates an OCI runtime container (consisting of a `config.json` and a root filesystem directory) that runs a single command inside of it.
|
||||
The nix store of the container will contain all referenced dependencies of the given command.
|
||||
|
||||
The parameters of `buildContainer` with an example value are described below:
|
||||
This function has an assumption that the container will run on POSIX platforms, and sets configurations (such as the user running the process or certain mounts) according to this assumption.
|
||||
Because of this, a container built with `buildContainer` will not work on Windows or other non-POSIX platforms without modifications to the container configuration.
|
||||
These modifications aren't supported by `buildContainer`.
|
||||
|
||||
For `linux` platforms, `buildContainer` also configures the following namespaces (see {manpage}`unshare(1)`) to isolate the OCI container from the global namespace:
|
||||
PID, network, mount, IPC, and UTS.
|
||||
|
||||
Note that no user namespace is created, which means that you won't be able to run the container unless you are the `root` user.
|
||||
|
||||
### Inputs {#ssec-pkgs-ociTools-buildContainer-inputs}
|
||||
|
||||
`buildContainer` expects an argument with the following attributes:
|
||||
|
||||
`args` (List of String)
|
||||
|
||||
: Specifies a set of arguments to run inside the container.
|
||||
Any packages referenced by `args` will be made available inside the container.
|
||||
|
||||
`mounts` (Attribute Set; _optional_)
|
||||
|
||||
: Would specify additional mounts that the runtime must make available to the container.
|
||||
|
||||
:::{.warning}
|
||||
As explained in [issue #290879](https://github.com/NixOS/nixpkgs/issues/290879), this attribute is currently ignored.
|
||||
:::
|
||||
|
||||
:::{.note}
|
||||
`buildContainer` includes a minimal set of necessary filesystems to be mounted into the container, and this set can't be changed with the `mounts` attribute.
|
||||
:::
|
||||
|
||||
_Default value:_ `{}`.
|
||||
|
||||
`readonly` (Boolean; _optional_)
|
||||
|
||||
: If `true`, sets the container's root filesystem as read-only.
|
||||
|
||||
_Default value:_ `false`.
|
||||
|
||||
`os` **DEPRECATED**
|
||||
|
||||
: Specifies the operating system on which the container filesystem is based on.
|
||||
If specified, its value should follow the [OCI Image Configuration Specification](https://github.com/opencontainers/image-spec/blob/main/config.md#properties).
|
||||
According to the linked specification, all possible values for `$GOOS` in [the Go docs](https://go.dev/doc/install/source#environment) should be valid, but will commonly be one of `darwin` or `linux`.
|
||||
|
||||
_Default value:_ `"linux"`.
|
||||
|
||||
`arch` **DEPRECATED**
|
||||
|
||||
: Used to specify the architecture for which the binaries in the container filesystem have been compiled.
|
||||
If specified, its value should follow the [OCI Image Configuration Specification](https://github.com/opencontainers/image-spec/blob/main/config.md#properties).
|
||||
According to the linked specification, all possible values for `$GOARCH` in [the Go docs](https://go.dev/doc/install/source#environment) should be valid, but will commonly be one of `386`, `amd64`, `arm`, or `arm64`.
|
||||
|
||||
_Default value:_ `x86_64`.
|
||||
|
||||
### Examples {#ssec-pkgs-ociTools-buildContainer-examples}
|
||||
|
||||
::: {.example #ex-ociTools-buildContainer-bash}
|
||||
# Creating an OCI runtime container that runs `bash`
|
||||
|
||||
This example uses `ociTools.buildContainer` to create a simple container that runs `bash`.
|
||||
|
||||
```nix
|
||||
buildContainer {
|
||||
{ ociTools, lib, bash }:
|
||||
ociTools.buildContainer {
|
||||
args = [
|
||||
(with pkgs;
|
||||
writeScript "run.sh" ''
|
||||
#!${bash}/bin/bash
|
||||
exec ${bash}/bin/bash
|
||||
'').outPath
|
||||
(lib.getExe bash)
|
||||
];
|
||||
|
||||
mounts = {
|
||||
"/data" = {
|
||||
type = "none";
|
||||
source = "/var/lib/mydata";
|
||||
options = [ "bind" ];
|
||||
};
|
||||
};
|
||||
|
||||
readonly = false;
|
||||
}
|
||||
```
|
||||
|
||||
- `args` specifies a set of arguments to run inside the container. This is the only required argument for `buildContainer`. All referenced packages inside the derivation will be made available inside the container.
|
||||
As an example of how to run the container generated by this package, we'll use `runc` to start the container.
|
||||
Any other tool that supports OCI containers could be used instead.
|
||||
|
||||
- `mounts` specifies additional mount points chosen by the user. By default only a minimal set of necessary filesystems are mounted into the container (e.g procfs, cgroupfs)
|
||||
```shell
|
||||
$ nix-build
|
||||
(some output removed for clarity)
|
||||
/nix/store/7f9hgx0arvhzp2a3qphp28rxbn748l25-join
|
||||
|
||||
- `readonly` makes the container's rootfs read-only if it is set to true. The default value is false `false`.
|
||||
$ cd /nix/store/7f9hgx0arvhzp2a3qphp28rxbn748l25-join
|
||||
$ nix-shell -p runc
|
||||
[nix-shell:/nix/store/7f9hgx0arvhzp2a3qphp28rxbn748l25-join]$ sudo runc run ocitools-example
|
||||
help
|
||||
GNU bash, version 5.2.26(1)-release (x86_64-pc-linux-gnu)
|
||||
(some output removed for clarity)
|
||||
```
|
||||
:::
|
||||
|
|
|
@ -1,81 +1,174 @@
|
|||
# pkgs.portableService {#sec-pkgs-portableService}
|
||||
|
||||
`pkgs.portableService` is a function to create _portable service images_,
|
||||
as read-only, immutable, `squashfs` archives.
|
||||
|
||||
systemd supports a concept of [Portable Services](https://systemd.io/PORTABLE_SERVICES/).
|
||||
Portable Services are a delivery method for system services that uses two specific features of container management:
|
||||
|
||||
* Applications are bundled. I.e. multiple services, their binaries and
|
||||
all their dependencies are packaged in an image, and are run directly from it.
|
||||
* Stricter default security policies, i.e. sandboxing of applications.
|
||||
|
||||
This allows using Nix to build images which can be run on many recent Linux distributions.
|
||||
|
||||
The primary tool for interacting with Portable Services is `portablectl`,
|
||||
and they are managed by the `systemd-portabled` system service.
|
||||
`pkgs.portableService` is a function to create [Portable Services](https://systemd.io/PORTABLE_SERVICES/) in a read-only, immutable, `squashfs` raw disk image.
|
||||
This lets you use Nix to build images which can be run on many recent Linux distributions.
|
||||
|
||||
::: {.note}
|
||||
Portable services are supported starting with systemd 239 (released on 2018-06-22).
|
||||
:::
|
||||
|
||||
A very simple example of using `portableService` is described below:
|
||||
The generated image will contain the file system structure as required by the Portable Services specification, along with the packages given to `portableService` and all of their dependencies.
|
||||
When generated, the image will exist in the Nix store with the `.raw` file extension, as required by the specification.
|
||||
See [](#ex-portableService-hello) to understand how to use the output of `portableService`.
|
||||
|
||||
## Inputs {#ssec-pkgs-portableService-inputs}
|
||||
|
||||
`portableService` expects one argument with the following attributes:
|
||||
|
||||
`pname` (String)
|
||||
|
||||
: The name of the portable service.
|
||||
The generated image will be named according to the template `$pname_$version.raw`, which is supported by the Portable Services specification.
|
||||
|
||||
`version` (String)
|
||||
|
||||
: The version of the portable service.
|
||||
The generated image will be named according to the template `$pname_$version.raw`, which is supported by the Portable Services specification.
|
||||
|
||||
`units` (List of Attribute Set)
|
||||
|
||||
: A list of derivations for systemd unit files.
|
||||
Each derivation must produce a single file, and must have a name that starts with the value of `pname` and ends with the suffix of the unit type (e.g. ".service", ".socket", ".timer", and so on).
|
||||
See [](#ex-portableService-hello) to better understand this naming constraint.
|
||||
|
||||
`description` (String or Null; _optional_)
|
||||
|
||||
: If specified, the value is added as `PORTABLE_PRETTY_NAME` to the `/etc/os-release` file in the generated image.
|
||||
This could be used to provide more information to anyone inspecting the image.
|
||||
|
||||
_Default value:_ `null`.
|
||||
|
||||
`homepage` (String or Null; _optional_)
|
||||
|
||||
: If specified, the value is added as `HOME_URL` to the `/etc/os-release` file in the generated image.
|
||||
This could be used to provide more information to anyone inspecting the image.
|
||||
|
||||
_Default value:_ `null`.
|
||||
|
||||
`symlinks` (List of Attribute Set; _optional_)
|
||||
|
||||
: A list of attribute sets in the format `{object, symlink}`.
|
||||
For each item in the list, `portableService` will create a symlink in the path specified by `symlink` (relative to the root of the image) that points to `object`.
|
||||
|
||||
All packages that `object` depends on and their dependencies are automatically copied into the image.
|
||||
|
||||
This can be used to create symlinks for applications that assume some files to exist globally (`/etc/ssl` or `/bin/bash`, for example).
|
||||
See [](#ex-portableService-symlinks) to understand how to do that.
|
||||
|
||||
_Default value:_ `[]`.
|
||||
|
||||
`contents` (List of Attribute Set; _optional_)
|
||||
|
||||
: A list of additional derivations to be included as-is in the image.
|
||||
These derivations will be included directly in a `/nix/store` directory inside the image.
|
||||
|
||||
_Default value:_ `[]`.
|
||||
|
||||
`squashfsTools` (Attribute Set; _optional_)
|
||||
|
||||
: Allows you to override the package that provides {manpage}`mksquashfs(1)`, which is used internally by `portableService`.
|
||||
|
||||
_Default value:_ `pkgs.squashfsTools`.
|
||||
|
||||
`squash-compression` (String; _optional_)
|
||||
|
||||
: Passed as the compression option to {manpage}`mksquashfs(1)`, which is used internally by `portableService`.
|
||||
|
||||
_Default value:_ `"xz -Xdict-size 100%"`.
|
||||
|
||||
`squash-block-size` (String; _optional_)
|
||||
|
||||
: Passed as the block size option to {manpage}`mksquashfs(1)`, which is used internally by `portableService`.
|
||||
|
||||
_Default value:_ `"1M"`.
|
||||
|
||||
## Examples {#ssec-pkgs-portableService-examples}
|
||||
|
||||
[]{#ex-pkgs-portableService}
|
||||
:::{.example #ex-portableService-hello}
|
||||
# Building a Portable Service image
|
||||
|
||||
The following example builds a Portable Service image with the `hello` package, along with a service unit that runs it.
|
||||
|
||||
```nix
|
||||
pkgs.portableService {
|
||||
pname = "demo";
|
||||
version = "1.0";
|
||||
units = [ demo-service demo-socket ];
|
||||
{ lib, writeText, portableService, hello }:
|
||||
let
|
||||
hello-service = writeText "hello.service" ''
|
||||
[Unit]
|
||||
Description=Hello world service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=${lib.getExe hello}
|
||||
'';
|
||||
in
|
||||
portableService {
|
||||
pname = "hello";
|
||||
inherit (hello) version;
|
||||
units = [ hello-service ];
|
||||
}
|
||||
```
|
||||
|
||||
The above example will build an squashfs archive image in `result/$pname_$version.raw`. The image will contain the
|
||||
file system structure as required by the portable service specification, and a subset of the Nix store with all the
|
||||
dependencies of the two derivations in the `units` list.
|
||||
`units` must be a list of derivations, and their names must be prefixed with the service name (`"demo"` in this case).
|
||||
Otherwise `systemd-portabled` will ignore them.
|
||||
After building the package, the generated image can be loaded into a system through {manpage}`portablectl(1)`:
|
||||
|
||||
::: {.note}
|
||||
The `.raw` file extension of the image is required by the portable services specification.
|
||||
```shell
|
||||
$ nix-build
|
||||
(some output removed for clarity)
|
||||
/nix/store/8c20z1vh7z8w8dwagl8w87b45dn5k6iq-hello-img-2.12.1
|
||||
|
||||
$ portablectl attach /nix/store/8c20z1vh7z8w8dwagl8w87b45dn5k6iq-hello-img-2.12.1/hello_2.12.1.raw
|
||||
Created directory /etc/systemd/system.attached.
|
||||
Created directory /etc/systemd/system.attached/hello.service.d.
|
||||
Written /etc/systemd/system.attached/hello.service.d/20-portable.conf.
|
||||
Created symlink /etc/systemd/system.attached/hello.service.d/10-profile.conf → /usr/lib/systemd/portable/profile/default/service.conf.
|
||||
Copied /etc/systemd/system.attached/hello.service.
|
||||
Created symlink /etc/portables/hello_2.12.1.raw → /nix/store/8c20z1vh7z8w8dwagl8w87b45dn5k6iq-hello-img-2.12.1/hello_2.12.1.raw.
|
||||
|
||||
$ systemctl start hello
|
||||
$ journalctl -u hello
|
||||
Feb 28 22:39:16 hostname systemd[1]: Starting Hello world service...
|
||||
Feb 28 22:39:16 hostname hello[102887]: Hello, world!
|
||||
Feb 28 22:39:16 hostname systemd[1]: hello.service: Deactivated successfully.
|
||||
Feb 28 22:39:16 hostname systemd[1]: Finished Hello world service.
|
||||
|
||||
$ portablectl detach hello_2.12.1
|
||||
Removed /etc/systemd/system.attached/hello.service.
|
||||
Removed /etc/systemd/system.attached/hello.service.d/10-profile.conf.
|
||||
Removed /etc/systemd/system.attached/hello.service.d/20-portable.conf.
|
||||
Removed /etc/systemd/system.attached/hello.service.d.
|
||||
Removed /etc/portables/hello_2.12.1.raw.
|
||||
Removed /etc/systemd/system.attached.
|
||||
```
|
||||
:::
|
||||
|
||||
Some other options available are:
|
||||
- `description`, `homepage`
|
||||
:::{.example #ex-portableService-symlinks}
|
||||
# Specifying symlinks when building a Portable Service image
|
||||
|
||||
Are added to the `/etc/os-release` in the image and are shown by the portable services tooling.
|
||||
Default to empty values, not added to os-release.
|
||||
- `symlinks`
|
||||
Some services may expect files or directories to be available globally.
|
||||
An example is a service which expects all trusted SSL certificates to exist in a specific location by default.
|
||||
|
||||
A list of attribute sets {object, symlink}. Symlinks will be created in the root filesystem of the image to
|
||||
objects in the Nix store. Defaults to an empty list.
|
||||
- `contents`
|
||||
To make things available globally, you must specify the `symlinks` attribute when using `portableService`.
|
||||
The following package builds on the package from [](#ex-portableService-hello) to make `/etc/ssl` available globally (this is only for illustrative purposes, because `hello` doesn't use `/etc/ssl`).
|
||||
|
||||
A list of additional derivations to be included in the image Nix store, as-is. Defaults to an empty list.
|
||||
- `squashfsTools`
|
||||
|
||||
Defaults to `pkgs.squashfsTools`, allows you to override the package that provides `mksquashfs`.
|
||||
- `squash-compression`, `squash-block-size`
|
||||
|
||||
Options to `mksquashfs`. Default to `"xz -Xdict-size 100%"` and `"1M"` respectively.
|
||||
|
||||
A typical usage of `symlinks` would be:
|
||||
```nix
|
||||
symlinks = [
|
||||
{ object = "${pkgs.cacert}/etc/ssl"; symlink = "/etc/ssl"; }
|
||||
{ object = "${pkgs.bash}/bin/bash"; symlink = "/bin/sh"; }
|
||||
{ object = "${pkgs.php}/bin/php"; symlink = "/usr/bin/php"; }
|
||||
];
|
||||
```
|
||||
to create these symlinks for legacy applications that assume them existing globally.
|
||||
{ lib, writeText, portableService, hello, cacert }:
|
||||
let
|
||||
hello-service = writeText "hello.service" ''
|
||||
[Unit]
|
||||
Description=Hello world service
|
||||
|
||||
Once the image is created, and deployed on a host in `/var/lib/portables/`, you can attach the image and run the service. As root run:
|
||||
```console
|
||||
portablectl attach demo_1.0.raw
|
||||
systemctl enable --now demo.socket
|
||||
systemctl enable --now demo.service
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=${lib.getExe hello}
|
||||
'';
|
||||
in
|
||||
portableService {
|
||||
pname = "hello";
|
||||
inherit (hello) version;
|
||||
units = [ hello-service ];
|
||||
symlinks = [
|
||||
{ object = "${cacert}/etc/ssl"; symlink = "/etc/ssl"; }
|
||||
];
|
||||
}
|
||||
```
|
||||
::: {.note}
|
||||
See the [man page](https://www.freedesktop.org/software/systemd/man/portablectl.html) of `portablectl` for more info on its usage.
|
||||
:::
|
||||
|
|
|
@ -1,71 +0,0 @@
|
|||
# pkgs.snapTools {#sec-pkgs-snapTools}
|
||||
|
||||
`pkgs.snapTools` is a set of functions for creating Snapcraft images. Snap and Snapcraft is not used to perform these operations.
|
||||
|
||||
## The makeSnap Function {#ssec-pkgs-snapTools-makeSnap-signature}
|
||||
|
||||
`makeSnap` takes a single named argument, `meta`. This argument mirrors [the upstream `snap.yaml` format](https://docs.snapcraft.io/snap-format) exactly.
|
||||
|
||||
The `base` should not be specified, as `makeSnap` will force set it.
|
||||
|
||||
Currently, `makeSnap` does not support creating GUI stubs.
|
||||
|
||||
## Build a Hello World Snap {#ssec-pkgs-snapTools-build-a-snap-hello}
|
||||
|
||||
The following expression packages GNU Hello as a Snapcraft snap.
|
||||
|
||||
``` {#ex-snapTools-buildSnap-hello .nix}
|
||||
let
|
||||
inherit (import <nixpkgs> { }) snapTools hello;
|
||||
in snapTools.makeSnap {
|
||||
meta = {
|
||||
name = "hello";
|
||||
summary = hello.meta.description;
|
||||
description = hello.meta.longDescription;
|
||||
architectures = [ "amd64" ];
|
||||
confinement = "strict";
|
||||
apps.hello.command = "${hello}/bin/hello";
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
`nix-build` this expression and install it with `snap install ./result --dangerous`. `hello` will now be the Snapcraft version of the package.
|
||||
|
||||
## Build a Graphical Snap {#ssec-pkgs-snapTools-build-a-snap-firefox}
|
||||
|
||||
Graphical programs require many more integrations with the host. This example uses Firefox as an example because it is one of the most complicated programs we could package.
|
||||
|
||||
``` {#ex-snapTools-buildSnap-firefox .nix}
|
||||
let
|
||||
inherit (import <nixpkgs> { }) snapTools firefox;
|
||||
in snapTools.makeSnap {
|
||||
meta = {
|
||||
name = "nix-example-firefox";
|
||||
summary = firefox.meta.description;
|
||||
architectures = [ "amd64" ];
|
||||
apps.nix-example-firefox = {
|
||||
command = "${firefox}/bin/firefox";
|
||||
plugs = [
|
||||
"pulseaudio"
|
||||
"camera"
|
||||
"browser-support"
|
||||
"avahi-observe"
|
||||
"cups-control"
|
||||
"desktop"
|
||||
"desktop-legacy"
|
||||
"gsettings"
|
||||
"home"
|
||||
"network"
|
||||
"mount-observe"
|
||||
"removable-media"
|
||||
"x11"
|
||||
];
|
||||
};
|
||||
confinement = "strict";
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
`nix-build` this expression and install it with `snap install ./result --dangerous`. `nix-example-firefox` will now be the Snapcraft version of the Firefox package.
|
||||
|
||||
The specific meaning behind plugs can be looked up in the [Snapcraft interface documentation](https://docs.snapcraft.io/supported-interfaces).
|
|
@ -7,7 +7,9 @@ Like [`stdenv.mkDerivation`](#sec-using-stdenv), each of these build helpers cre
|
|||
|
||||
`runCommand :: String -> AttrSet -> String -> Derivation`
|
||||
|
||||
`runCommand name drvAttrs buildCommand` returns a derivation that is built by running the specified shell commands.
|
||||
The result of `runCommand name drvAttrs buildCommand` is a derivation that is built by running the specified shell commands.
|
||||
|
||||
By default `runCommand` runs in a stdenv with no compiler environment, whereas [`runCommandCC`](#trivial-builder-runCommandCC) uses the default stdenv, `pkgs.stdenv`.
|
||||
|
||||
`name :: String`
|
||||
: The name that Nix will append to the store path in the same way that `stdenv.mkDerivation` uses its `name` attribute.
|
||||
|
@ -92,6 +94,107 @@ writeShellScript "evaluate-my-file.sh" ''
|
|||
```
|
||||
::::
|
||||
|
||||
### `makeDesktopItem` {#trivial-builder-makeDesktopItem}
|
||||
|
||||
Write an [XDG desktop file](https://specifications.freedesktop.org/desktop-entry-spec/1.4/) to the Nix store.
|
||||
|
||||
This function is usually used to add desktop items to a package through the `copyDesktopItems` hook.
|
||||
|
||||
`makeDesktopItem` adheres to version 1.4 of the specification.
|
||||
|
||||
#### Inputs {#trivial-builder-makeDesktopItem-inputs}
|
||||
|
||||
`makeDesktopItem` takes an attribute set that accepts most values from the [XDG specification](https://specifications.freedesktop.org/desktop-entry-spec/1.4/ar01s06.html).
|
||||
|
||||
All recognised keys from the specification are supported with the exception of the "Hidden" field. The keys are converted into camelCase format, but correspond 1:1 to their equivalent in the specification: `genericName`, `noDisplay`, `comment`, `icon`, `onlyShowIn`, `notShowIn`, `dbusActivatable`, `tryExec`, `exec`, `path`, `terminal`, `mimeTypes`, `categories`, `implements`, `keywords`, `startupNotify`, `startupWMClass`, `url`, `prefersNonDefaultGPU`.
|
||||
|
||||
The "Version" field is hardcoded to the version `makeDesktopItem` currently adheres to.
|
||||
|
||||
The following fields are either required, are of a different type than in the specification, carry specific default values, or are additional fields supported by `makeDesktopItem`:
|
||||
|
||||
`name` (String)
|
||||
|
||||
: The name of the desktop file in the Nix store.
|
||||
|
||||
`type` (String; _optional_)
|
||||
|
||||
: Default value: `"Application"`
|
||||
|
||||
`desktopName` (String)
|
||||
|
||||
: Corresponds to the "Name" field of the specification.
|
||||
|
||||
`actions` (List of Attribute set; _optional_)
|
||||
|
||||
: A list of attribute sets {name, exec?, icon?}
|
||||
|
||||
`extraConfig` (Attribute set; _optional_)
|
||||
|
||||
: Additional key/value pairs to be added verbatim to the desktop file. Attributes need to be prefixed with 'X-'.
|
||||
|
||||
#### Examples {#trivial-builder-makeDesktopItem-examples}
|
||||
|
||||
::: {.example #ex-makeDesktopItem}
|
||||
# Usage 1 of `makeDesktopItem`
|
||||
|
||||
Write a desktop file `/nix/store/<store path>/my-program.desktop` to the Nix store.
|
||||
|
||||
```nix
|
||||
{makeDesktopItem}:
|
||||
makeDesktopItem {
|
||||
name = "my-program";
|
||||
desktopName = "My Program";
|
||||
genericName = "Video Player";
|
||||
noDisplay = false;
|
||||
comment = "Cool video player";
|
||||
icon = "/path/to/icon";
|
||||
onlyShowIn = [ "KDE" ];
|
||||
dbusActivatable = true;
|
||||
tryExec = "my-program";
|
||||
exec = "my-program --someflag";
|
||||
path = "/some/working/path";
|
||||
terminal = false;
|
||||
actions.example = {
|
||||
name = "New Window";
|
||||
exec = "my-program --new-window";
|
||||
icon = "/some/icon";
|
||||
};
|
||||
mimeTypes = [ "video/mp4" ];
|
||||
categories = [ "Utility" ];
|
||||
implements = [ "org.my-program" ];
|
||||
keywords = [ "Video" "Player" ];
|
||||
startupNotify = false;
|
||||
startupWMClass = "MyProgram";
|
||||
prefersNonDefaultGPU = false;
|
||||
extraConfig.X-SomeExtension = "somevalue";
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
::: {.example #ex2-makeDesktopItem}
|
||||
# Usage 2 of `makeDesktopItem`
|
||||
|
||||
Override the `hello` package to add a desktop item.
|
||||
|
||||
```nix
|
||||
{ copyDesktopItems
|
||||
, hello
|
||||
, makeDesktopItem }:
|
||||
|
||||
hello.overrideAttrs {
|
||||
nativeBuildInputs = [ copyDesktopItems ];
|
||||
|
||||
desktopItems = [(makeDesktopItem {
|
||||
name = "hello";
|
||||
desktopName = "Hello";
|
||||
exec = "hello";
|
||||
})];
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
### `writeTextFile` {#trivial-builder-writeTextFile}
|
||||
|
||||
Write a text file to the Nix store.
|
||||
|
@ -153,6 +256,12 @@ Write a text file to the Nix store.
|
|||
|
||||
Default: `true`
|
||||
|
||||
`derivationArgs` (Attribute set, _optional_)
|
||||
|
||||
: Extra arguments to pass to the underlying call to `stdenv.mkDerivation`.
|
||||
|
||||
Default: `{}`
|
||||
|
||||
The resulting store path will include some variation of the name, and it will be a file unless `destination` is used, in which case it will be a directory.
|
||||
|
||||
::: {.example #ex-writeTextFile}
|
||||
|
@ -549,14 +658,18 @@ This creates a derivation with a directory structure like the following:
|
|||
|
||||
## `writeReferencesToFile` {#trivial-builder-writeReferencesToFile}
|
||||
|
||||
Writes the closure of transitive dependencies to a file.
|
||||
Deprecated. Use [`writeClosure`](#trivial-builder-writeClosure) instead.
|
||||
|
||||
This produces the equivalent of `nix-store -q --requisites`.
|
||||
## `writeClosure` {#trivial-builder-writeClosure}
|
||||
|
||||
Given a list of [store paths](https://nixos.org/manual/nix/stable/glossary#gloss-store-path) (or string-like expressions coercible to store paths), write their collective [closure](https://nixos.org/manual/nix/stable/glossary#gloss-closure) to a text file.
|
||||
|
||||
The result is equivalent to the output of `nix-store -q --requisites`.
|
||||
|
||||
For example,
|
||||
|
||||
```nix
|
||||
writeReferencesToFile (writeScriptBin "hi" ''${hello}/bin/hello'')
|
||||
writeClosure [ (writeScriptBin "hi" ''${hello}/bin/hello'') ]
|
||||
```
|
||||
|
||||
produces an output path `/nix/store/<hash>-runtime-deps` containing
|
||||
|
|
|
@ -122,16 +122,17 @@ in pkgs.stdenv.mkDerivation {
|
|||
${pkgs.documentation-highlighter}/mono-blue.css \
|
||||
${pkgs.documentation-highlighter}/loader.js
|
||||
|
||||
cp -t out ./overrides.css ./style.css
|
||||
cp -t out ./style.css ./anchor.min.js ./anchor-use.js
|
||||
|
||||
nixos-render-docs manual html \
|
||||
--manpage-urls ./manpage-urls.json \
|
||||
--revision ${pkgs.lib.trivial.revisionWithDefault (pkgs.rev or "master")} \
|
||||
--stylesheet style.css \
|
||||
--stylesheet overrides.css \
|
||||
--stylesheet highlightjs/mono-blue.css \
|
||||
--script ./highlightjs/highlight.pack.js \
|
||||
--script ./highlightjs/loader.js \
|
||||
--script ./anchor.min.js \
|
||||
--script ./anchor-use.js \
|
||||
--toc-depth 1 \
|
||||
--section-toc-depth 1 \
|
||||
manual.md \
|
||||
|
|
|
@ -6,8 +6,9 @@ All generators follow a similar call interface: `generatorName configFunctions d
|
|||
Generators can be fine-tuned to produce exactly the file format required by your application/service. One example is an INI-file format which uses `: ` as separator, the strings `"yes"`/`"no"` as boolean values and requires all string values to be quoted:
|
||||
|
||||
```nix
|
||||
with lib;
|
||||
let
|
||||
inherit (lib) generators isString;
|
||||
|
||||
customToINI = generators.toINI {
|
||||
# specifies how to format a key/value pair
|
||||
mkKeyValue = generators.mkKeyValueDefault {
|
||||
|
|
|
@ -55,7 +55,13 @@ Here is a simple package example. It is a pure Coq library, thus it depends on C
|
|||
```nix
|
||||
{ lib, mkCoqDerivation, version ? null
|
||||
, coq, mathcomp, mathcomp-finmap, mathcomp-bigenough }:
|
||||
with lib; mkCoqDerivation {
|
||||
|
||||
let
|
||||
inherit (lib) licenses maintainers switch;
|
||||
inherit (lib.versions) range;
|
||||
in
|
||||
|
||||
mkCoqDerivation {
|
||||
/* namePrefix leads to e.g. `name = coq8.11-mathcomp1.11-multinomials-1.5.2` */
|
||||
namePrefix = [ "coq" "mathcomp" ];
|
||||
pname = "multinomials";
|
||||
|
|
|
@ -144,4 +144,4 @@ All new projects should use the CUDA redistributables available in [`cudaPackage
|
|||
| Find libraries | `configurePhase` | Missing dependency on a `dev` output | Add the missing dependency | The `dev` output typically contain CMake configuration files |
|
||||
| Find libraries | `buildPhase` or `patchelf` | Missing dependency on a `lib` or `static` output | Add the missing dependency | The `lib` or `static` output typically contain the libraries |
|
||||
|
||||
In the scenario you are unable to run the resulting binary: this is arguably the most complicated as it could be any combination of the previous reasons. This type of failure typically occurs when a library attempts to load or open a library it depends on that it does not declare in its `DT_NEEDED` section. As a first step, ensure that dependencies are patched with [`cudaPackages.autoAddOpenGLRunpath`](https://search.nixos.org/packages?channel=unstable&type=packages&query=cudaPackages.autoAddOpenGLRunpath). Failing that, try running the application with [`nixGL`](https://github.com/guibou/nixGL) or a similar wrapper tool. If that works, it likely means that the application is attempting to load a library that is not in the `RPATH` or `RUNPATH` of the binary.
|
||||
In the scenario you are unable to run the resulting binary: this is arguably the most complicated as it could be any combination of the previous reasons. This type of failure typically occurs when a library attempts to load or open a library it depends on that it does not declare in its `DT_NEEDED` section. As a first step, ensure that dependencies are patched with [`cudaPackages.autoAddDriverRunpath`](https://search.nixos.org/packages?channel=unstable&type=packages&query=cudaPackages.autoAddDriverRunpath). Failing that, try running the application with [`nixGL`](https://github.com/guibou/nixGL) or a similar wrapper tool. If that works, it likely means that the application is attempting to load a library that is not in the `RPATH` or `RUNPATH` of the binary.
|
||||
|
|
|
@ -210,11 +210,11 @@ buildDotnetGlobalTool {
|
|||
|
||||
nugetSha256 = "sha256-ZG2HFyKYhVNVYd2kRlkbAjZJq88OADe3yjxmLuxXDUo=";
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
homepage = "https://cmd.petabridge.com/index.html";
|
||||
changelog = "https://cmd.petabridge.com/articles/RELEASE_NOTES.html";
|
||||
license = licenses.unfree;
|
||||
platforms = platforms.linux;
|
||||
license = lib.licenses.unfree;
|
||||
platforms = lib.platforms.linux;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
|
|
@ -51,11 +51,11 @@ pet = buildGoModule rec {
|
|||
|
||||
vendorHash = "sha256-ciBIR+a1oaYH+H1PcC8cD8ncfJczk1IiJ8iYNM+R6aA=";
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "Simple command-line snippet manager, written in Go";
|
||||
homepage = "https://github.com/knqyf263/pet";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ kalbasit ];
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [ kalbasit ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
|
|
@ -113,7 +113,7 @@ Each of those compiler versions has a corresponding attribute set built using
|
|||
it. However, the non-standard package sets are not tested regularly and, as a
|
||||
result, contain fewer working packages. The corresponding package set for GHC
|
||||
9.4.5 is `haskell.packages.ghc945`. In fact `haskellPackages` is just an alias
|
||||
for `haskell.packages.ghc927`:
|
||||
for `haskell.packages.ghc964`:
|
||||
|
||||
```console
|
||||
$ nix-env -f '<nixpkgs>' -qaP -A haskell.packages.ghc927
|
||||
|
@ -1020,6 +1020,11 @@ failing because of e.g. a syntax error in the Haddock documentation.
|
|||
: Sets `doCheck` to `false` for `drv`. Useful if a package has a broken,
|
||||
flaky or otherwise problematic test suite breaking the build.
|
||||
|
||||
`dontCheckIf condition drv`
|
||||
: Sets `doCheck` to `false` for `drv`, but only if `condition` applies.
|
||||
Otherwise it's a no-op. Useful to conditionally disable tests for a package
|
||||
without interfering with previous overrides or default values.
|
||||
|
||||
<!-- Purposefully omitting the non-list variants here. They are a bit
|
||||
ugly, and we may want to deprecate them at some point. -->
|
||||
|
||||
|
|
|
@ -93,11 +93,11 @@ build-idris-package {
|
|||
hash = "sha256-h28F9EEPuvab6zrfeE+0k1XGQJGwINnsJEG8yjWIl7w=";
|
||||
};
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "Idris YAML lib";
|
||||
homepage = "https://github.com/Heather/Idris.Yaml";
|
||||
license = licenses.mit;
|
||||
maintainers = [ maintainers.brainrape ];
|
||||
license = lib.licenses.mit;
|
||||
maintainers = [ lib.maintainers.brainrape ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
|
|
@ -4,12 +4,31 @@ Ant-based Java packages are typically built from source as follows:
|
|||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "...";
|
||||
pname = "...";
|
||||
version = "...";
|
||||
|
||||
src = fetchurl { ... };
|
||||
|
||||
nativeBuildInputs = [ jdk ant ];
|
||||
nativeBuildInputs = [
|
||||
ant
|
||||
jdk
|
||||
stripJavaArchivesHook # removes timestamp metadata from jar files
|
||||
];
|
||||
|
||||
buildPhase = "ant";
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
ant # build the project using ant
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
# copy generated jar file(s) to an appropriate location in $out
|
||||
install -Dm644 build/foo.jar $out/share/java/foo.jar
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
}
|
||||
```
|
||||
|
||||
|
@ -17,6 +36,10 @@ Note that `jdk` is an alias for the OpenJDK (self-built where available,
|
|||
or pre-built via Zulu). Platforms with OpenJDK not (yet) in Nixpkgs
|
||||
(`Aarch32`, `Aarch64`) point to the (unfree) `oraclejdk`.
|
||||
|
||||
Also note that not using `stripJavaArchivesHook` will likely cause the
|
||||
generated `.jar` files to be non-deterministic, which is not optimal.
|
||||
Using it, however, does not always guarantee reproducibility.
|
||||
|
||||
JAR files that are intended to be used by other packages should be
|
||||
installed in `$out/share/java`. JDKs have a stdenv setup hook that add
|
||||
any JARs in the `share/java` directories of the build inputs to the
|
||||
|
|
|
@ -4,11 +4,14 @@
|
|||
|
||||
This contains instructions on how to package javascript applications.
|
||||
|
||||
The various tools available will be listed in the [tools-overview](#javascript-tools-overview). Some general principles for packaging will follow. Finally some tool specific instructions will be given.
|
||||
The various tools available will be listed in the [tools-overview](#javascript-tools-overview).
|
||||
Some general principles for packaging will follow.
|
||||
Finally some tool specific instructions will be given.
|
||||
|
||||
## Getting unstuck / finding code examples {#javascript-finding-examples}
|
||||
|
||||
If you find you are lacking inspiration for packing javascript applications, the links below might prove useful. Searching online for prior art can be helpful if you are running into solved problems.
|
||||
If you find you are lacking inspiration for packaging javascript applications, the links below might prove useful.
|
||||
Searching online for prior art can be helpful if you are running into solved problems.
|
||||
|
||||
### Github {#javascript-finding-examples-github}
|
||||
|
||||
|
@ -30,17 +33,23 @@ The following principles are given in order of importance with potential excepti
|
|||
|
||||
It is often not documented which node version is used upstream, but if it is, try to use the same version when packaging.
|
||||
|
||||
This can be a problem if upstream is using the latest and greatest and you are trying to use an earlier version of node. Some cryptic errors regarding V8 may appear.
|
||||
This can be a problem if upstream is using the latest and greatest and you are trying to use an earlier version of node.
|
||||
Some cryptic errors regarding V8 may appear.
|
||||
|
||||
### Try to respect the package manager originally used by upstream (and use the upstream lock file) {#javascript-upstream-package-manager}
|
||||
|
||||
A lock file (package-lock.json, yarn.lock...) is supposed to make reproducible installations of node_modules for each tool.
|
||||
A lock file (package-lock.json, yarn.lock...) is supposed to make reproducible installations of `node_modules` for each tool.
|
||||
|
||||
Guidelines of package managers, recommend to commit those lock files to the repos. If a particular lock file is present, it is a strong indication of which package manager is used upstream.
|
||||
Guidelines of package managers, recommend to commit those lock files to the repos.
|
||||
If a particular lock file is present, it is a strong indication of which package manager is used upstream.
|
||||
|
||||
It's better to try to use a Nix tool that understand the lock file. Using a different tool might give you hard to understand error because different packages have been installed. An example of problems that could arise can be found [here](https://github.com/NixOS/nixpkgs/pull/126629). Upstream use NPM, but this is an attempt to package it with `yarn2nix` (that uses yarn.lock).
|
||||
It's better to try to use a Nix tool that understand the lock file.
|
||||
Using a different tool might give you hard to understand error because different packages have been installed.
|
||||
An example of problems that could arise can be found [here](https://github.com/NixOS/nixpkgs/pull/126629).
|
||||
Upstream use NPM, but this is an attempt to package it with `yarn2nix` (that uses yarn.lock).
|
||||
|
||||
Using a different tool forces to commit a lock file to the repository. Those files are fairly large, so when packaging for nixpkgs, this approach does not scale well.
|
||||
Using a different tool forces to commit a lock file to the repository.
|
||||
Those files are fairly large, so when packaging for nixpkgs, this approach does not scale well.
|
||||
|
||||
Exceptions to this rule are:
|
||||
|
||||
|
@ -78,17 +87,23 @@ Exceptions to this rule are:
|
|||
|
||||
### Using node_modules directly {#javascript-using-node_modules}
|
||||
|
||||
Each tool has an abstraction to just build the node_modules (dependencies) directory. You can always use the `stdenv.mkDerivation` with the node_modules to build the package (symlink the node_modules directory and then use the package build command). The node_modules abstraction can be also used to build some web framework frontends. For an example of this see how [plausible](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/web-apps/plausible/default.nix) is built. `mkYarnModules` to make the derivation containing node_modules. Then when building the frontend you can just symlink the node_modules directory.
|
||||
Each tool has an abstraction to just build the node_modules (dependencies) directory.
|
||||
You can always use the `stdenv.mkDerivation` with the node_modules to build the package (symlink the node_modules directory and then use the package build command).
|
||||
The node_modules abstraction can be also used to build some web framework frontends.
|
||||
For an example of this see how [plausible](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/web-apps/plausible/default.nix) is built. `mkYarnModules` to make the derivation containing node_modules.
|
||||
Then when building the frontend you can just symlink the node_modules directory.
|
||||
|
||||
## Javascript packages inside nixpkgs {#javascript-packages-nixpkgs}
|
||||
|
||||
The [pkgs/development/node-packages](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/node-packages) folder contains a generated collection of [NPM packages](https://npmjs.com/) that can be installed with the Nix package manager.
|
||||
|
||||
As a rule of thumb, the package set should only provide _end user_ software packages, such as command-line utilities. Libraries should only be added to the package set if there is a non-NPM package that requires it.
|
||||
As a rule of thumb, the package set should only provide _end user_ software packages, such as command-line utilities.
|
||||
Libraries should only be added to the package set if there is a non-NPM package that requires it.
|
||||
|
||||
When it is desired to use NPM libraries in a development project, use the `node2nix` generator directly on the `package.json` configuration file of the project.
|
||||
|
||||
The package set provides support for the official stable Node.js versions. The latest stable LTS release in `nodePackages`, as well as the latest stable current release in `nodePackages_latest`.
|
||||
The package set provides support for the official stable Node.js versions.
|
||||
The latest stable LTS release in `nodePackages`, as well as the latest stable current release in `nodePackages_latest`.
|
||||
|
||||
If your package uses native addons, you need to examine what kind of native build system it uses. Here are some examples:
|
||||
|
||||
|
@ -96,7 +111,8 @@ If your package uses native addons, you need to examine what kind of native buil
|
|||
- `node-gyp-builder`
|
||||
- `node-pre-gyp`
|
||||
|
||||
After you have identified the correct system, you need to override your package expression while adding in build system as a build input. For example, `dat` requires `node-gyp-build`, so we override its expression in [pkgs/development/node-packages/overrides.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/node-packages/overrides.nix):
|
||||
After you have identified the correct system, you need to override your package expression while adding in build system as a build input.
|
||||
For example, `dat` requires `node-gyp-build`, so we override its expression in [pkgs/development/node-packages/overrides.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/node-packages/overrides.nix):
|
||||
|
||||
```nix
|
||||
dat = prev.dat.override (oldAttrs: {
|
||||
|
@ -159,7 +175,8 @@ git config --global url."https://github.com/".insteadOf git://github.com/
|
|||
|
||||
### buildNpmPackage {#javascript-buildNpmPackage}
|
||||
|
||||
`buildNpmPackage` allows you to package npm-based projects in Nixpkgs without the use of an auto-generated dependencies file (as used in [node2nix](#javascript-node2nix)). It works by utilizing npm's cache functionality -- creating a reproducible cache that contains the dependencies of a project, and pointing npm to it.
|
||||
`buildNpmPackage` allows you to package npm-based projects in Nixpkgs without the use of an auto-generated dependencies file (as used in [node2nix](#javascript-node2nix)).
|
||||
It works by utilizing npm's cache functionality -- creating a reproducible cache that contains the dependencies of a project, and pointing npm to it.
|
||||
|
||||
Here's an example:
|
||||
|
||||
|
@ -184,16 +201,18 @@ buildNpmPackage rec {
|
|||
|
||||
NODE_OPTIONS = "--openssl-legacy-provider";
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "A modern web UI for various torrent clients with a Node.js backend and React frontend";
|
||||
homepage = "https://flood.js.org";
|
||||
license = licenses.gpl3Only;
|
||||
maintainers = with maintainers; [ winter ];
|
||||
license = lib.licenses.gpl3Only;
|
||||
maintainers = with lib.maintainers; [ winter ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
In the default `installPhase` set by `buildNpmPackage`, it uses `npm pack --json --dry-run` to decide what files to install in `$out/lib/node_modules/$name/`, where `$name` is the `name` string defined in the package's `package.json`. Additionally, the `bin` and `man` keys in the source's `package.json` are used to decide what binaries and manpages are supposed to be installed. If these are not defined, `npm pack` may miss some files, and no binaries will be produced.
|
||||
In the default `installPhase` set by `buildNpmPackage`, it uses `npm pack --json --dry-run` to decide what files to install in `$out/lib/node_modules/$name/`, where `$name` is the `name` string defined in the package's `package.json`.
|
||||
Additionally, the `bin` and `man` keys in the source's `package.json` are used to decide what binaries and manpages are supposed to be installed.
|
||||
If these are not defined, `npm pack` may miss some files, and no binaries will be produced.
|
||||
|
||||
#### Arguments {#javascript-buildNpmPackage-arguments}
|
||||
|
||||
|
@ -233,6 +252,37 @@ sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
|
|||
|
||||
It returns a derivation with all `package-lock.json` dependencies downloaded into `$out/`, usable as an npm cache.
|
||||
|
||||
#### importNpmLock {#javascript-buildNpmPackage-importNpmLock}
|
||||
|
||||
`importNpmLock` is a Nix function that requires the following optional arguments:
|
||||
|
||||
- `npmRoot`: Path to package directory containing the source tree
|
||||
- `package`: Parsed contents of `package.json`
|
||||
- `packageLock`: Parsed contents of `package-lock.json`
|
||||
- `pname`: Package name
|
||||
- `version`: Package version
|
||||
|
||||
It returns a derivation with a patched `package.json` & `package-lock.json` with all dependencies resolved to Nix store paths.
|
||||
|
||||
This function is analogous to using `fetchNpmDeps`, but instead of specifying `hash` it uses metadata from `package.json` & `package-lock.json`.
|
||||
|
||||
Note that `npmHooks.npmConfigHook` cannot be used with `importNpmLock`. You will instead need to use `importNpmLock.npmConfigHook`:
|
||||
|
||||
```nix
|
||||
{ buildNpmPackage, importNpmLock }:
|
||||
|
||||
buildNpmPackage {
|
||||
pname = "hello";
|
||||
version = "0.1.0";
|
||||
|
||||
npmDeps = importNpmLock {
|
||||
npmRoot = ./.;
|
||||
};
|
||||
|
||||
npmConfigHook = importNpmLock.npmConfigHook;
|
||||
}
|
||||
```
|
||||
|
||||
### corepack {#javascript-corepack}
|
||||
|
||||
This package puts the corepack wrappers for pnpm and yarn in your PATH, and they will honor the `packageManager` setting in the `package.json`.
|
||||
|
@ -284,10 +334,10 @@ buildPhase = ''
|
|||
'';
|
||||
```
|
||||
|
||||
The dist phase is also trying to build a binary, the only way to override it is with:
|
||||
The `distPhase` is packing the package's dependencies in a tarball using `yarn pack`. You can disable it using:
|
||||
|
||||
```nix
|
||||
distPhase = "true";
|
||||
doDist = false;
|
||||
```
|
||||
|
||||
The configure phase can sometimes fail because it makes many assumptions which may not always apply. One common override is:
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# User’s Guide to Lua Infrastructure {#users-guide-to-lua-infrastructure}
|
||||
# Lua {#lua}
|
||||
|
||||
## Using Lua {#using-lua}
|
||||
## Using Lua {#lua-userguide}
|
||||
|
||||
### Overview of Lua {#overview-of-lua}
|
||||
### Overview of Lua {#lua-overview}
|
||||
|
||||
Several versions of the Lua interpreter are available: luajit, lua 5.1, 5.2, 5.3.
|
||||
The attribute `lua` refers to the default interpreter, it is also possible to refer to specific versions, e.g. `lua5_2` refers to Lua 5.2.
|
||||
|
@ -118,7 +118,7 @@ Again, it is possible to launch the interpreter from the shell.
|
|||
The Lua interpreter has the attribute `pkgs` which contains all Lua libraries for that specific interpreter.
|
||||
|
||||
|
||||
## Developing with Lua {#developing-with-lua}
|
||||
## Developing with lua {#lua-developing}
|
||||
|
||||
Now that you know how to get a working Lua environment with Nix, it is time
|
||||
to go forward and start actually developing with Lua. There are two ways to
|
||||
|
@ -193,10 +193,10 @@ luaposix = buildLuarocksPackage {
|
|||
disabled = (luaOlder "5.1") || (luaAtLeast "5.4");
|
||||
propagatedBuildInputs = [ bit32 lua std_normalize ];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
homepage = "https://github.com/luaposix/luaposix/";
|
||||
description = "Lua bindings for POSIX";
|
||||
maintainers = with maintainers; [ vyp lblasc ];
|
||||
maintainers = with lib.maintainers; [ vyp lblasc ];
|
||||
license.fullName = "MIT/X11";
|
||||
};
|
||||
};
|
||||
|
@ -215,6 +215,11 @@ install the package
|
|||
environment variable and add dependent libraries to script's `LUA_PATH` and
|
||||
`LUA_CPATH`.
|
||||
|
||||
It accepts as arguments:
|
||||
|
||||
* 'luarocksConfig': a nix value that directly maps to the luarocks config used during
|
||||
the installation
|
||||
|
||||
By default `meta.platforms` is set to the same value as the interpreter unless overridden otherwise.
|
||||
|
||||
#### `buildLuaApplication` function {#buildluaapplication-function}
|
||||
|
@ -229,30 +234,20 @@ The `lua.withPackages` takes a function as an argument that is passed the set of
|
|||
Using the `withPackages` function, the previous example for the luafilesystem environment can be written like this:
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
lua.withPackages (ps: [ps.luafilesystem])
|
||||
```
|
||||
|
||||
`withPackages` passes the correct package set for the specific interpreter version as an argument to the function. In the above example, `ps` equals `luaPackages`.
|
||||
But you can also easily switch to using `lua5_2`:
|
||||
But you can also easily switch to using `lua5_1`:
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
||||
lua5_2.withPackages (ps: [ps.lua])
|
||||
lua5_1.withPackages (ps: [ps.lua])
|
||||
```
|
||||
|
||||
Now, `ps` is set to `lua52Packages`, matching the version of the interpreter.
|
||||
Now, `ps` is set to `lua5_1.pkgs`, matching the version of the interpreter.
|
||||
|
||||
### Possible Todos {#possible-todos}
|
||||
|
||||
* export/use version specific variables such as `LUA_PATH_5_2`/`LUAROCKS_CONFIG_5_2`
|
||||
* let luarocks check for dependencies via exporting the different rocktrees in temporary config
|
||||
|
||||
### Lua Contributing guidelines {#lua-contributing-guidelines}
|
||||
### Lua Contributing guidelines {#lua-contributing}
|
||||
|
||||
Following rules should be respected:
|
||||
|
||||
* Make sure libraries build for all Lua interpreters.
|
||||
* Commit names of Lua libraries should reflect that they are Lua libraries, so write for example `luaPackages.luafilesystem: 1.11 -> 1.12`.
|
||||
|
|
|
@ -34,11 +34,11 @@ maven.buildMavenPackage rec {
|
|||
--add-flags "-jar $out/share/jd-cli/jd-cli.jar"
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "Simple command line wrapper around JD Core Java Decompiler project";
|
||||
homepage = "https://github.com/intoolswetrust/jd-cli";
|
||||
license = licenses.gpl3Plus;
|
||||
maintainers = with maintainers; [ majiir ];
|
||||
license = lib.licenses.gpl3Plus;
|
||||
maintainers = with lib.maintainers; [ majiir ];
|
||||
};
|
||||
}:
|
||||
```
|
||||
|
|
|
@ -110,11 +110,11 @@ buildDunePackage rec {
|
|||
hash = "sha256-d5/3KUBAWRj8tntr4RkJ74KWW7wvn/B/m1nx0npnzyc=";
|
||||
};
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
homepage = "https://github.com/flowtype/ocaml-wtf8";
|
||||
description = "WTF-8 is a superset of UTF-8 that allows unpaired surrogates.";
|
||||
license = licenses.mit;
|
||||
maintainers = [ maintainers.eqyiel ];
|
||||
license = lib.licenses.mit;
|
||||
maintainers = [ lib.maintainers.eqyiel ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
| Package | Aliases | Interpreter |
|
||||
|------------|-----------------|-------------|
|
||||
| python27 | python2, python | CPython 2.7 |
|
||||
| python38 | | CPython 3.8 |
|
||||
| python39 | | CPython 3.9 |
|
||||
| python310 | | CPython 3.10 |
|
||||
| python311 | python3 | CPython 3.11 |
|
||||
|
@ -60,7 +59,6 @@ sets are
|
|||
|
||||
* `pkgs.python27Packages`
|
||||
* `pkgs.python3Packages`
|
||||
* `pkgs.python38Packages`
|
||||
* `pkgs.python39Packages`
|
||||
* `pkgs.python310Packages`
|
||||
* `pkgs.python311Packages`
|
||||
|
@ -76,8 +74,9 @@ and the aliases
|
|||
|
||||
#### `buildPythonPackage` function {#buildpythonpackage-function}
|
||||
|
||||
The `buildPythonPackage` function is implemented in
|
||||
`pkgs/development/interpreters/python/mk-python-derivation.nix`
|
||||
The `buildPythonPackage` function has its name binding in
|
||||
`pkgs/development/interpreters/python/python-packages-base.nix` and is
|
||||
implemented in `pkgs/development/interpreters/python/mk-python-derivation.nix`
|
||||
using setup hooks.
|
||||
|
||||
The following is an example:
|
||||
|
@ -116,11 +115,11 @@ buildPythonPackage rec {
|
|||
rm testing/test_argcomplete.py
|
||||
'';
|
||||
|
||||
nativeBuildInputs = [
|
||||
build-system = [
|
||||
setuptools-scm
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
dependencies = [
|
||||
attrs
|
||||
py
|
||||
setuptools
|
||||
|
@ -132,12 +131,12 @@ buildPythonPackage rec {
|
|||
hypothesis
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
changelog = "https://github.com/pytest-dev/pytest/releases/tag/${version}";
|
||||
description = "Framework for writing tests";
|
||||
homepage = "https://github.com/pytest-dev/pytest";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ domenkozar lovek323 madjar lsix ];
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [ domenkozar lovek323 madjar lsix ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
@ -172,10 +171,10 @@ following are specific to `buildPythonPackage`:
|
|||
variable in wrapped programs.
|
||||
* `pyproject`: Whether the pyproject format should be used. When set to `true`,
|
||||
`pypaBuildHook` will be used, and you can add the required build dependencies
|
||||
from `build-system.requires` to `nativeBuildInputs`. Note that the pyproject
|
||||
from `build-system.requires` to `build-system`. Note that the pyproject
|
||||
format falls back to using `setuptools`, so you can use `pyproject = true`
|
||||
even if the package only has a `setup.py`. When set to `false`, you can
|
||||
use the existing [hooks](#setup-hooks0 or provide your own logic to build the
|
||||
use the existing [hooks](#setup-hooks) or provide your own logic to build the
|
||||
package. This can be useful for packages that don't support the pyproject
|
||||
format. When unset, the legacy `setuptools` hooks are used for backwards
|
||||
compatibility.
|
||||
|
@ -206,17 +205,22 @@ build inputs (see "Specifying dependencies"). The following are of special
|
|||
interest for Python packages, either because these are primarily used, or
|
||||
because their behaviour is different:
|
||||
|
||||
* `nativeBuildInputs ? []`: Build-time only dependencies. Typically executables
|
||||
as well as the items listed in `setup_requires`.
|
||||
* `nativeBuildInputs ? []`: Build-time only dependencies. Typically executables.
|
||||
* `build-system ? []`: Build-time only Python dependencies. Items listed in `build-system.requires`/`setup_requires`.
|
||||
* `buildInputs ? []`: Build and/or run-time dependencies that need to be
|
||||
compiled for the host machine. Typically non-Python libraries which are being
|
||||
linked.
|
||||
* `nativeCheckInputs ? []`: Dependencies needed for running the [`checkPhase`](#ssec-check-phase). These
|
||||
are added to [`nativeBuildInputs`](#var-stdenv-nativeBuildInputs) when [`doCheck = true`](#var-stdenv-doCheck). Items listed in
|
||||
`tests_require` go here.
|
||||
* `propagatedBuildInputs ? []`: Aside from propagating dependencies,
|
||||
* `dependencies ? []`: Aside from propagating dependencies,
|
||||
`buildPythonPackage` also injects code into and wraps executables with the
|
||||
paths included in this list. Items listed in `install_requires` go here.
|
||||
* `optional-dependencies ? { }`: Optional feature flagged dependencies. Items listed in `extras_requires` go here.
|
||||
|
||||
Aside from propagating dependencies,
|
||||
`buildPythonPackage` also injects code into and wraps executables with the
|
||||
paths included in this list. Items listed in `extras_requires` go here.
|
||||
|
||||
##### Overriding Python packages {#overriding-python-packages}
|
||||
|
||||
|
@ -299,16 +303,17 @@ python3Packages.buildPythonApplication rec {
|
|||
hash = "sha256-Pe229rT0aHwA98s+nTHQMEFKZPo/yw6sot8MivFDvAw=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = with python3Packages; [
|
||||
build-system = with python3Packages; [
|
||||
setuptools
|
||||
wheel
|
||||
];
|
||||
|
||||
propagatedBuildInputs = with python3Packages; [
|
||||
dependencies = with python3Packages; [
|
||||
tornado
|
||||
python-daemon
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
# ...
|
||||
};
|
||||
}
|
||||
|
@ -462,14 +467,14 @@ are used in [`buildPythonPackage`](#buildpythonpackage-function).
|
|||
- `eggBuildHook` to skip building for eggs.
|
||||
- `eggInstallHook` to install eggs.
|
||||
- `pipBuildHook` to build a wheel using `pip` and PEP 517. Note a build system
|
||||
(e.g. `setuptools` or `flit`) should still be added as `nativeBuildInput`.
|
||||
(e.g. `setuptools` or `flit`) should still be added as `build-system`.
|
||||
- `pypaBuildHook` to build a wheel using
|
||||
[`pypa/build`](https://pypa-build.readthedocs.io/en/latest/index.html) and
|
||||
PEP 517/518. Note a build system (e.g. `setuptools` or `flit`) should still
|
||||
be added as `nativeBuildInput`.
|
||||
be added as `build-system`.
|
||||
- `pipInstallHook` to install wheels.
|
||||
- `pytestCheckHook` to run tests with `pytest`. See [example usage](#using-pytestcheckhook).
|
||||
- `pythonCatchConflictsHook` to check whether a Python package is not already existing.
|
||||
- `pythonCatchConflictsHook` to fail if the package depends on two different versions of the same dependency.
|
||||
- `pythonImportsCheckHook` to check whether importing the listed modules works.
|
||||
- `pythonRelaxDepsHook` will relax Python dependencies restrictions for the package.
|
||||
See [example usage](#using-pythonrelaxdepshook).
|
||||
|
@ -881,7 +886,7 @@ buildPythonPackage rec {
|
|||
hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
build-system = [
|
||||
setuptools
|
||||
wheel
|
||||
];
|
||||
|
@ -895,12 +900,12 @@ buildPythonPackage rec {
|
|||
"toolz.dicttoolz"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
changelog = "https://github.com/pytoolz/toolz/releases/tag/${version}";
|
||||
homepage = "https://github.com/pytoolz/toolz";
|
||||
description = "List processing tools and functional utilities";
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
license = lib.licenses.bsd3;
|
||||
maintainers = with lib.maintainers; [ fridh ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
@ -941,7 +946,7 @@ with import <nixpkgs> {};
|
|||
hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
build-system = [
|
||||
python311.pkgs.setuptools
|
||||
python311.pkgs.wheel
|
||||
];
|
||||
|
@ -977,13 +982,15 @@ that we introduced with the `let` expression.
|
|||
|
||||
#### Handling dependencies {#handling-dependencies}
|
||||
|
||||
Our example, `toolz`, does not have any dependencies on other Python packages or
|
||||
system libraries. According to the manual, [`buildPythonPackage`](#buildpythonpackage-function) uses the
|
||||
arguments [`buildInputs`](#var-stdenv-buildInputs) and [`propagatedBuildInputs`](#var-stdenv-propagatedBuildInputs) to specify dependencies. If
|
||||
something is exclusively a build-time dependency, then the dependency should be
|
||||
included in [`buildInputs`](#var-stdenv-buildInputs), but if it is (also) a runtime dependency, then it
|
||||
should be added to [`propagatedBuildInputs`](#var-stdenv-propagatedBuildInputs). Test dependencies are considered
|
||||
build-time dependencies and passed to [`nativeCheckInputs`](#var-stdenv-nativeCheckInputs).
|
||||
Our example, `toolz`, does not have any dependencies on other Python packages or system libraries.
|
||||
[`buildPythonPackage`](#buildpythonpackage-function) uses the the following arguments in the following circumstances:
|
||||
|
||||
- `dependencies` - For Python runtime dependencies.
|
||||
- `build-system` - For Python build-time requirements.
|
||||
- [`buildInputs`](#var-stdenv-buildInputs) - For non-Python build-time requirements.
|
||||
- [`nativeCheckInputs`](#var-stdenv-nativeCheckInputs) - For test dependencies
|
||||
|
||||
Dependencies can belong to multiple arguments, for example if something is both a build time requirement & a runtime dependency.
|
||||
|
||||
The following example shows which arguments are given to [`buildPythonPackage`](#buildpythonpackage-function) in
|
||||
order to build [`datashape`](https://github.com/blaze/datashape).
|
||||
|
@ -1013,12 +1020,12 @@ buildPythonPackage rec {
|
|||
hash = "sha256-FLLvdm1MllKrgTGC6Gb0k0deZeVYvtCCLji/B7uhong=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
build-system = [
|
||||
setuptools
|
||||
wheel
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
dependencies = [
|
||||
multipledispatch
|
||||
numpy
|
||||
python-dateutil
|
||||
|
@ -1028,12 +1035,12 @@ buildPythonPackage rec {
|
|||
pytest
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
changelog = "https://github.com/blaze/datashape/releases/tag/${version}";
|
||||
homepage = "https://github.com/ContinuumIO/datashape";
|
||||
description = "A data description language";
|
||||
license = licenses.bsd2;
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
license = lib.licenses.bsd2;
|
||||
maintainers = with lib.maintainers; [ fridh ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
@ -1041,7 +1048,7 @@ buildPythonPackage rec {
|
|||
We can see several runtime dependencies, `numpy`, `multipledispatch`, and
|
||||
`python-dateutil`. Furthermore, we have [`nativeCheckInputs`](#var-stdenv-nativeCheckInputs) with `pytest`.
|
||||
`pytest` is a test runner and is only used during the [`checkPhase`](#ssec-check-phase) and is
|
||||
therefore not added to [`propagatedBuildInputs`](#var-stdenv-propagatedBuildInputs).
|
||||
therefore not added to `dependencies`.
|
||||
|
||||
In the previous case we had only dependencies on other Python packages to consider.
|
||||
Occasionally you have also system libraries to consider. E.g., `lxml` provides
|
||||
|
@ -1068,7 +1075,7 @@ buildPythonPackage rec {
|
|||
hash = "sha256-s9NiusRxFydHzaNRMjjxFcvWxfi45jGb9ql6eJJyQJk=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
build-system = [
|
||||
setuptools
|
||||
wheel
|
||||
];
|
||||
|
@ -1078,12 +1085,12 @@ buildPythonPackage rec {
|
|||
libxslt
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
changelog = "https://github.com/lxml/lxml/releases/tag/lxml-${version}";
|
||||
description = "Pythonic binding for the libxml2 and libxslt libraries";
|
||||
homepage = "https://lxml.de";
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ sjourdois ];
|
||||
license = lib.licenses.bsd3;
|
||||
maintainers = with lib.maintainers; [ sjourdois ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
@ -1125,7 +1132,7 @@ buildPythonPackage rec {
|
|||
hash = "sha256-9ru2r6kwhUCaskiFoaPNuJCfCVoUL01J40byvRt4kHQ=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
build-system = [
|
||||
setuptools
|
||||
wheel
|
||||
];
|
||||
|
@ -1136,7 +1143,7 @@ buildPythonPackage rec {
|
|||
fftwLongDouble
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
dependencies = [
|
||||
numpy
|
||||
scipy
|
||||
];
|
||||
|
@ -1149,12 +1156,12 @@ buildPythonPackage rec {
|
|||
# Tests cannot import pyfftw. pyfftw works fine though.
|
||||
doCheck = false;
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
changelog = "https://github.com/pyFFTW/pyFFTW/releases/tag/v${version}";
|
||||
description = "A pythonic wrapper around FFTW, the FFT library, presenting a unified interface for all the supported transforms";
|
||||
homepage = "http://hgomersall.github.com/pyFFTW";
|
||||
license = with licenses; [ bsd2 bsd3 ];
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
license = with lib.licenses; [ bsd2 bsd3 ];
|
||||
maintainers = with lib.maintainers; [ fridh ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
@ -1459,9 +1466,7 @@ mode is activated.
|
|||
|
||||
In the following example, we create a simple environment that has a Python 3.11
|
||||
version of our package in it, as well as its dependencies and other packages we
|
||||
like to have in the environment, all specified with [`propagatedBuildInputs`](#var-stdenv-propagatedBuildInputs).
|
||||
Indeed, we can just add any package we like to have in our environment to
|
||||
[`propagatedBuildInputs`](#var-stdenv-propagatedBuildInputs).
|
||||
like to have in the environment, all specified with `dependencies`.
|
||||
|
||||
```nix
|
||||
with import <nixpkgs> {};
|
||||
|
@ -1470,9 +1475,11 @@ with python311Packages;
|
|||
buildPythonPackage rec {
|
||||
name = "mypackage";
|
||||
src = ./path/to/package/source;
|
||||
propagatedBuildInputs = [
|
||||
dependencies = [
|
||||
pytest
|
||||
numpy
|
||||
];
|
||||
propagatedBuildInputs = [
|
||||
pkgs.libsndfile
|
||||
];
|
||||
}
|
||||
|
@ -1519,17 +1526,17 @@ buildPythonPackage rec {
|
|||
hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
build-system = [
|
||||
setuptools
|
||||
wheel
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
changelog = "https://github.com/pytoolz/toolz/releases/tag/${version}";
|
||||
homepage = "https://github.com/pytoolz/toolz/";
|
||||
description = "List processing tools and functional utilities";
|
||||
license = licenses.bsd3;
|
||||
maintainers = with maintainers; [ fridh ];
|
||||
license = lib.licenses.bsd3;
|
||||
maintainers = with lib.maintainers; [ fridh ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
@ -1903,8 +1910,8 @@ configure alternatives](#sec-overlays-alternatives-blas-lapack)".
|
|||
|
||||
In a `setup.py` or `setup.cfg` it is common to declare dependencies:
|
||||
|
||||
* `setup_requires` corresponds to [`nativeBuildInputs`](#var-stdenv-nativeBuildInputs)
|
||||
* `install_requires` corresponds to [`propagatedBuildInputs`](#var-stdenv-propagatedBuildInputs)
|
||||
* `setup_requires` corresponds to `build-system`
|
||||
* `install_requires` corresponds to `dependencies`
|
||||
* `tests_require` corresponds to [`nativeCheckInputs`](#var-stdenv-nativeCheckInputs)
|
||||
|
||||
### How to enable interpreter optimizations? {#optimizations}
|
||||
|
@ -1928,12 +1935,10 @@ in mypython
|
|||
|
||||
Some packages define optional dependencies for additional features. With
|
||||
`setuptools` this is called `extras_require` and `flit` calls it
|
||||
`extras-require`, while PEP 621 calls these `optional-dependencies`. A
|
||||
method for supporting this is by declaring the extras of a package in its
|
||||
`passthru`, e.g. in case of the package `dask`
|
||||
`extras-require`, while PEP 621 calls these `optional-dependencies`.
|
||||
|
||||
```nix
|
||||
passthru.optional-dependencies = {
|
||||
optional-dependencies = {
|
||||
complete = [ distributed ];
|
||||
};
|
||||
```
|
||||
|
@ -1941,11 +1946,13 @@ passthru.optional-dependencies = {
|
|||
and letting the package requiring the extra add the list to its dependencies
|
||||
|
||||
```nix
|
||||
propagatedBuildInputs = [
|
||||
dependencies = [
|
||||
...
|
||||
] ++ dask.optional-dependencies.complete;
|
||||
```
|
||||
|
||||
This method is using `passthru`, meaning that changing `optional-dependencies` of a package won't cause it to rebuild.
|
||||
|
||||
Note this method is preferred over adding parameters to builders, as that can
|
||||
result in packages depending on different variants and thereby causing
|
||||
collisions.
|
||||
|
@ -2008,6 +2015,10 @@ example of such a situation is when `py.test` is used.
|
|||
|
||||
* Tests that attempt to access `$HOME` can be fixed by using the following
|
||||
work-around before running tests (e.g. `preCheck`): `export HOME=$(mktemp -d)`
|
||||
* Compiling with Cython causes tests to fail with a `ModuleNotLoadedError`.
|
||||
This can be fixed with two changes in the derivation: 1) replacing `pytest` with
|
||||
`pytestCheckHook` and 2) adding a `preCheck` containing `cd $out` to run
|
||||
tests within the built output.
|
||||
|
||||
## Contributing {#contributing}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ an extra indirection.
|
|||
## Nix expression for a Qt package (default.nix) {#qt-default-nix}
|
||||
|
||||
```nix
|
||||
{ stdenv, lib, qt6, wrapQtAppsHook }:
|
||||
{ stdenv, qt6 }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
pname = "myapp";
|
||||
|
@ -23,10 +23,12 @@ stdenv.mkDerivation {
|
|||
}
|
||||
```
|
||||
|
||||
The same goes for Qt 5 where libraries and tools are under `libsForQt5`.
|
||||
|
||||
Any Qt package should include `wrapQtAppsHook` in `nativeBuildInputs`, or explicitly set `dontWrapQtApps` to bypass generating the wrappers.
|
||||
|
||||
::: {.note}
|
||||
Graphical Linux applications should also include `qtwayland` in `buildInputs`, to ensure the Wayland platform plugin is available.
|
||||
Qt 6 graphical applications should also include `qtwayland` in `buildInputs` on Linux (but not on platforms e.g. Darwin, where `qtwayland` is not available), to ensure the Wayland platform plugin is available.
|
||||
|
||||
This may become default in the future, see [NixOS/nixpkgs#269674](https://github.com/NixOS/nixpkgs/pull/269674).
|
||||
:::
|
||||
|
|
|
@ -35,20 +35,20 @@ rustPlatform.buildRustPackage rec {
|
|||
|
||||
cargoHash = "sha256-jtBw4ahSl88L0iuCXxQgZVm1EcboWRJMNtjxLVTtzts=";
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "A fast line-oriented regex search tool, similar to ag and ack";
|
||||
homepage = "https://github.com/BurntSushi/ripgrep";
|
||||
license = licenses.unlicense;
|
||||
license = lib.licenses.unlicense;
|
||||
maintainers = [];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
`buildRustPackage` requires either the `cargoHash` or the `cargoSha256`
|
||||
attribute which is computed over all crate sources of this package.
|
||||
`cargoSha256` is used for traditional Nix SHA-256 hashes. `cargoHash` should
|
||||
instead be used for [SRI](https://www.w3.org/TR/SRI/) hashes and should be
|
||||
preferred. For example:
|
||||
`buildRustPackage` requires either a `cargoHash` (preferred) or a
|
||||
`cargoSha256` attribute, computed over all crate sources of this package.
|
||||
`cargoHash` supports [SRI](https://www.w3.org/TR/SRI/) hashes and should be
|
||||
preferred over `cargoSha256` which was used for traditional Nix SHA-256 hashes.
|
||||
For example:
|
||||
|
||||
```nix
|
||||
cargoHash = "sha256-l1vL2ZdtDRxSGvP0X/l3nMw8+6WF67KPutJEzUROjg8=";
|
||||
|
@ -64,18 +64,18 @@ Both types of hashes are permitted when contributing to nixpkgs. The
|
|||
Cargo hash is obtained by inserting a fake checksum into the
|
||||
expression and building the package once. The correct checksum can
|
||||
then be taken from the failed build. A fake hash can be used for
|
||||
`cargoSha256` as follows:
|
||||
|
||||
```nix
|
||||
cargoSha256 = lib.fakeSha256;
|
||||
```
|
||||
|
||||
For `cargoHash` you can use:
|
||||
`cargoHash` as follows:
|
||||
|
||||
```nix
|
||||
cargoHash = lib.fakeHash;
|
||||
```
|
||||
|
||||
For `cargoSha256` you can use:
|
||||
|
||||
```nix
|
||||
cargoSha256 = lib.fakeSha256;
|
||||
```
|
||||
|
||||
Per the instructions in the [Cargo Book](https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html)
|
||||
best practices guide, Rust applications should always commit the `Cargo.lock`
|
||||
file in git to ensure a reproducible build. However, a few packages do not, and
|
||||
|
@ -90,7 +90,7 @@ directory into a tar.gz archive.
|
|||
The tarball with vendored dependencies contains a directory with the
|
||||
package's `name`, which is normally composed of `pname` and
|
||||
`version`. This means that the vendored dependencies hash
|
||||
(`cargoSha256`/`cargoHash`) is dependent on the package name and
|
||||
(`cargoHash`/`cargoSha256`) is dependent on the package name and
|
||||
version. The `cargoDepsName` attribute can be used to use another name
|
||||
for the directory of vendored dependencies. For example, the hash can
|
||||
be made invariant to the version by setting `cargoDepsName` to
|
||||
|
@ -115,7 +115,7 @@ rustPlatform.buildRustPackage rec {
|
|||
|
||||
### Importing a `Cargo.lock` file {#importing-a-cargo.lock-file}
|
||||
|
||||
Using `cargoSha256` or `cargoHash` is tedious when using
|
||||
Using a vendored hash (`cargoHash`/`cargoSha256`) is tedious when using
|
||||
`buildRustPackage` within a project, since it requires that the hash
|
||||
is updated after every change to `Cargo.lock`. Therefore,
|
||||
`buildRustPackage` also supports vendoring dependencies directly from
|
||||
|
@ -903,8 +903,8 @@ with import <nixpkgs>
|
|||
};
|
||||
let
|
||||
rustPlatform = makeRustPlatform {
|
||||
cargo = rust-bin.stable.latest.minimal;
|
||||
rustc = rust-bin.stable.latest.minimal;
|
||||
cargo = rust-bin.selectLatestNightlyWith (toolchain: toolchain.default);
|
||||
rustc = rust-bin.selectLatestNightlyWith (toolchain: toolchain.default);
|
||||
};
|
||||
in
|
||||
|
||||
|
@ -923,11 +923,11 @@ rustPlatform.buildRustPackage rec {
|
|||
|
||||
doCheck = false;
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "A fast line-oriented regex search tool, similar to ag and ack";
|
||||
homepage = "https://github.com/BurntSushi/ripgrep";
|
||||
license = with licenses; [ mit unlicense ];
|
||||
maintainers = with maintainers; [];
|
||||
license = with lib.licenses; [ mit unlicense ];
|
||||
maintainers = with lib.maintainers; [];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
|
|
@ -181,11 +181,11 @@ let
|
|||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "A LaTeX2e class for overhead transparencies";
|
||||
license = licenses.unfreeRedistributable;
|
||||
maintainers = with maintainers; [ veprbl ];
|
||||
platforms = platforms.all;
|
||||
license = lib.licenses.unfreeRedistributable;
|
||||
maintainers = with lib.maintainers; [ veprbl ];
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -318,5 +318,7 @@
|
|||
"passwd(5)": "https://man.archlinux.org/man/passwd.5",
|
||||
"group(5)": "https://man.archlinux.org/man/group.5",
|
||||
"login.defs(5)": "https://man.archlinux.org/man/login.defs.5",
|
||||
"nix-shell(1)": "https://nixos.org/manual/nix/stable/command-ref/nix-shell.html"
|
||||
"unshare(1)": "https://man.archlinux.org/man/unshare.1.en",
|
||||
"nix-shell(1)": "https://nixos.org/manual/nix/stable/command-ref/nix-shell.html",
|
||||
"mksquashfs(1)": "https://man.archlinux.org/man/extra/squashfs-tools/mksquashfs.1.en"
|
||||
}
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
.docbook .xref img[src^=images\/callouts\/],
|
||||
.screen img,
|
||||
.programlisting img,
|
||||
.literallayout img,
|
||||
.synopsis img {
|
||||
width: 1em;
|
||||
}
|
||||
|
||||
.calloutlist img {
|
||||
width: 1.5em;
|
||||
}
|
||||
|
||||
.prompt,
|
||||
.screen img,
|
||||
.programlisting img,
|
||||
.literallayout img,
|
||||
.synopsis img {
|
||||
-moz-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
|
@ -1,5 +1,12 @@
|
|||
# darwin.linux-builder {#sec-darwin-builder}
|
||||
|
||||
:::{.warning}
|
||||
By default, `darwin.linux-builder` uses a publicly-known private SSH **host key** (this is different from the SSH key used by the user that connects to the builder).
|
||||
|
||||
Given the intended use case for it (a Linux builder that runs **on the same machine**), this shouldn't be an issue.
|
||||
However, if you plan to deviate from this use case in any way (e.g. by exposing this builder to remote machines), you should understand the security implications of doing so and take any appropriate measures.
|
||||
:::
|
||||
|
||||
`darwin.linux-builder` provides a way to bootstrap a Linux remote builder on a macOS machine.
|
||||
|
||||
This requires macOS version 12.4 or later.
|
||||
|
@ -97,8 +104,9 @@ $ sudo launchctl kickstart -k system/org.nixos.nix-daemon
|
|||
{ virtualisation = {
|
||||
host.pkgs = pkgs;
|
||||
darwin-builder.workingDirectory = "/var/lib/darwin-builder";
|
||||
darwin-builder.hostPort = 22;
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
in {
|
||||
|
@ -110,7 +118,9 @@ $ sudo launchctl kickstart -k system/org.nixos.nix-daemon
|
|||
{
|
||||
nix.distributedBuilds = true;
|
||||
nix.buildMachines = [{
|
||||
hostName = "ssh://builder@localhost";
|
||||
hostName = "localhost";
|
||||
sshUser = "builder";
|
||||
sshKey = "/etc/nix/builder_ed25519";
|
||||
system = linuxSystem;
|
||||
maxJobs = 4;
|
||||
supportedFeatures = [ "kvm" "benchmark" "big-parallel" ];
|
||||
|
|
|
@ -3,16 +3,16 @@
|
|||
Nix packages can declare *meta-attributes* that contain information about a package such as a description, its homepage, its license, and so on. For instance, the GNU Hello package has a `meta` declaration like this:
|
||||
|
||||
```nix
|
||||
meta = with lib; {
|
||||
meta = {
|
||||
description = "A program that produces a familiar, friendly greeting";
|
||||
longDescription = ''
|
||||
GNU Hello is a program that prints "Hello, world!" when you run it.
|
||||
It is fully customizable.
|
||||
'';
|
||||
homepage = "https://www.gnu.org/software/hello/manual/";
|
||||
license = licenses.gpl3Plus;
|
||||
maintainers = with maintainers; [ eelco ];
|
||||
platforms = platforms.all;
|
||||
license = lib.licenses.gpl3Plus;
|
||||
maintainers = with lib.maintainers; [ eelco ];
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
```
|
||||
|
||||
|
|
587
doc/style.css
587
doc/style.css
|
@ -1,291 +1,416 @@
|
|||
/* Copied from http://bakefile.sourceforge.net/, which appears
|
||||
licensed under the GNU GPL. */
|
||||
|
||||
|
||||
/***************************************************************************
|
||||
Basic headers and text:
|
||||
***************************************************************************/
|
||||
|
||||
body
|
||||
{
|
||||
font-family: "Nimbus Sans L", sans-serif;
|
||||
font-size: 1em;
|
||||
background: white;
|
||||
margin: 2em 1em 2em 1em;
|
||||
html {
|
||||
line-height: 1.15;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
|
||||
h1, h2, h3, h4
|
||||
{
|
||||
color: #005aa0;
|
||||
body {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
h1 /* title */
|
||||
{
|
||||
font-size: 200%;
|
||||
}
|
||||
|
||||
h2 /* chapters, appendices, subtitle */
|
||||
{
|
||||
font-size: 180%;
|
||||
}
|
||||
|
||||
div.book
|
||||
{
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
div.book > div
|
||||
{
|
||||
/*
|
||||
* based on https://medium.com/@zkareemz/golden-ratio-62b3b6d4282a
|
||||
* we do 70 characters per line to fit code listings better
|
||||
* 70 * (font-size / 1.618)
|
||||
* expression for emacs:
|
||||
* (* 70 (/ 1 1.618))
|
||||
*/
|
||||
max-width: 43.2em;
|
||||
text-align: left;
|
||||
.book {
|
||||
margin: auto;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* Extra space between chapters, appendices. */
|
||||
div.chapter > div.titlepage h2, div.appendix > div.titlepage h2
|
||||
{
|
||||
margin-top: 1.5em;
|
||||
@media screen and (min-width: 768px) {
|
||||
.book {
|
||||
max-width: 46rem;
|
||||
}
|
||||
}
|
||||
|
||||
div.section > div.titlepage h2 /* sections */
|
||||
{
|
||||
font-size: 150%;
|
||||
margin-top: 1.5em;
|
||||
@media screen and (min-width: 992px) {
|
||||
.book {
|
||||
max-width: 60rem;
|
||||
}
|
||||
}
|
||||
|
||||
h3 /* subsections */
|
||||
{
|
||||
font-size: 125%;
|
||||
@media screen and (min-width: 1200px) {
|
||||
.book {
|
||||
max-width: 73rem;
|
||||
}
|
||||
}
|
||||
|
||||
div.simplesect h2
|
||||
{
|
||||
font-size: 110%;
|
||||
.book .list-of-examples {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.appendix h3
|
||||
{
|
||||
font-size: 150%;
|
||||
margin-top: 1.5em;
|
||||
h1 {
|
||||
font-size: 2em;
|
||||
margin: 0.67em 0;
|
||||
}
|
||||
|
||||
div.refnamediv h2, div.refsynopsisdiv h2, div.refsection h2 /* refentry parts */
|
||||
{
|
||||
margin-top: 1.4em;
|
||||
font-size: 125%;
|
||||
hr {
|
||||
box-sizing: content-box;
|
||||
height: 0;
|
||||
overflow: visible;
|
||||
}
|
||||
|
||||
div.refsection h3
|
||||
{
|
||||
font-size: 110%;
|
||||
pre {
|
||||
font-family: monospace, monospace;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
|
||||
/***************************************************************************
|
||||
Examples:
|
||||
***************************************************************************/
|
||||
|
||||
div.example
|
||||
{
|
||||
border: 1px solid #b0b0b0;
|
||||
padding: 6px 6px;
|
||||
margin-left: 1.5em;
|
||||
margin-right: 1.5em;
|
||||
background: #f4f4f8;
|
||||
border-radius: 0.4em;
|
||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
||||
a {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
div.example p.title
|
||||
{
|
||||
margin-top: 0em;
|
||||
strong {
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
div.example pre
|
||||
{
|
||||
box-shadow: none;
|
||||
code {
|
||||
font-family: monospace, monospace;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
|
||||
/***************************************************************************
|
||||
Screen dumps:
|
||||
***************************************************************************/
|
||||
|
||||
pre.screen, pre.programlisting
|
||||
{
|
||||
border: 1px solid #b0b0b0;
|
||||
padding: 3px 3px;
|
||||
margin-left: 0.5em;
|
||||
margin-right: 0.5em;
|
||||
|
||||
background: #f4f4f8;
|
||||
font-family: monospace;
|
||||
border-radius: 0.4em;
|
||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
||||
sup {
|
||||
font-size: 75%;
|
||||
line-height: 0;
|
||||
position: relative;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
div.example pre.programlisting
|
||||
{
|
||||
border: 0px;
|
||||
padding: 0 0;
|
||||
margin: 0 0 0 0;
|
||||
sup {
|
||||
top: -0.5em;
|
||||
}
|
||||
|
||||
/***************************************************************************
|
||||
Notes, warnings etc:
|
||||
***************************************************************************/
|
||||
|
||||
.note, .warning
|
||||
{
|
||||
border: 1px solid #b0b0b0;
|
||||
padding: 3px 3px;
|
||||
margin-left: 1.5em;
|
||||
margin-right: 1.5em;
|
||||
margin-bottom: 1em;
|
||||
padding: 0.3em 0.3em 0.3em 0.3em;
|
||||
background: #fffff5;
|
||||
border-radius: 0.4em;
|
||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
||||
::-webkit-file-upload-button {
|
||||
-webkit-appearance: button;
|
||||
font: inherit;
|
||||
}
|
||||
|
||||
div.note, div.warning
|
||||
{
|
||||
font-style: italic;
|
||||
pre {
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
div.note h3, div.warning h3
|
||||
{
|
||||
color: red;
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
html {
|
||||
font-size: 100%;
|
||||
padding-right: 0.5em;
|
||||
display: inline;
|
||||
line-height: 1.77777778;
|
||||
}
|
||||
|
||||
div.note p, div.warning p
|
||||
{
|
||||
margin-bottom: 0em;
|
||||
@media screen and (min-width: 4000px) {
|
||||
html {
|
||||
background: #000;
|
||||
}
|
||||
|
||||
html body {
|
||||
margin: auto;
|
||||
max-width: 250rem;
|
||||
}
|
||||
}
|
||||
|
||||
div.note h3 + p, div.warning h3 + p
|
||||
{
|
||||
display: inline;
|
||||
@media screen and (max-width: 320px) {
|
||||
html {
|
||||
font-size: calc(16 / 320 * 100vw);
|
||||
}
|
||||
}
|
||||
|
||||
div.note h3
|
||||
{
|
||||
color: blue;
|
||||
font-size: 100%;
|
||||
body {
|
||||
font-size: 1rem;
|
||||
font-family: 'Roboto', sans-serif;
|
||||
font-weight: 300;
|
||||
color: #000000;
|
||||
background-color: #ffffff;
|
||||
min-height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
div.navfooter *
|
||||
{
|
||||
font-size: 90%;
|
||||
@media screen and (max-width: 767.9px) {
|
||||
body {
|
||||
padding-left: 1rem;
|
||||
padding-right: 1rem;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/***************************************************************************
|
||||
Links colors and highlighting:
|
||||
***************************************************************************/
|
||||
|
||||
a { text-decoration: none; }
|
||||
a:hover { text-decoration: underline; }
|
||||
a:link { color: #0048b3; }
|
||||
a:visited { color: #002a6a; }
|
||||
|
||||
|
||||
/***************************************************************************
|
||||
Table of contents:
|
||||
***************************************************************************/
|
||||
|
||||
div.toc
|
||||
{
|
||||
font-size: 90%;
|
||||
a {
|
||||
text-decoration: none;
|
||||
border-bottom: 1px solid;
|
||||
color: #405d99;
|
||||
}
|
||||
|
||||
div.toc dl
|
||||
{
|
||||
margin-top: 0em;
|
||||
margin-bottom: 0em;
|
||||
ul {
|
||||
padding: 0;
|
||||
margin-top: 0;
|
||||
margin-right: 0;
|
||||
margin-bottom: 1rem;
|
||||
margin-left: 1rem;
|
||||
}
|
||||
|
||||
|
||||
/***************************************************************************
|
||||
Special elements:
|
||||
***************************************************************************/
|
||||
|
||||
tt, code
|
||||
{
|
||||
color: #400000;
|
||||
}
|
||||
|
||||
.term
|
||||
{
|
||||
font-weight: bold;
|
||||
|
||||
}
|
||||
|
||||
div.variablelist dd p, div.glosslist dd p
|
||||
{
|
||||
margin-top: 0em;
|
||||
}
|
||||
|
||||
div.variablelist dd, div.glosslist dd
|
||||
{
|
||||
margin-left: 1.5em;
|
||||
}
|
||||
|
||||
div.glosslist dt
|
||||
{
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.varname
|
||||
{
|
||||
color: #400000;
|
||||
}
|
||||
|
||||
span.command strong
|
||||
{
|
||||
font-weight: normal;
|
||||
color: #400000;
|
||||
}
|
||||
|
||||
div.calloutlist table
|
||||
{
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
table
|
||||
{
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
box-shadow: 0.4em 0.4em 0.5em #e0e0e0;
|
||||
width: 100%;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
table.simplelist
|
||||
{
|
||||
thead th {
|
||||
text-align: left;
|
||||
color: #005aa0;
|
||||
}
|
||||
|
||||
hr {
|
||||
margin-top: 1rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-weight: 800;
|
||||
line-height: 110%;
|
||||
font-size: 200%;
|
||||
margin-bottom: 1rem;
|
||||
color: #6586c8;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-weight: 800;
|
||||
line-height: 110%;
|
||||
font-size: 170%;
|
||||
margin-bottom: 0.625rem;
|
||||
color: #6586c8;
|
||||
}
|
||||
|
||||
h2:not(:first-child) {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-weight: 800;
|
||||
line-height: 110%;
|
||||
margin-bottom: 1rem;
|
||||
font-size: 150%;
|
||||
color: #6586c8;
|
||||
}
|
||||
|
||||
.note h3,
|
||||
.tip h3,
|
||||
.warning h3,
|
||||
.caution h3,
|
||||
.important h3 {
|
||||
font-size: 120%;
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-weight: 800;
|
||||
line-height: 110%;
|
||||
margin-bottom: 1rem;
|
||||
font-size: 140%;
|
||||
color: #6586c8;
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-weight: 800;
|
||||
line-height: 110%;
|
||||
margin-bottom: 1rem;
|
||||
font-size: 130%;
|
||||
color: #6a6a6a;
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-weight: 800;
|
||||
line-height: 110%;
|
||||
margin-bottom: 1rem;
|
||||
font-size: 120%
|
||||
}
|
||||
|
||||
strong {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
p {
|
||||
margin-top: 0;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
dt>*:first-child,
|
||||
dd>*:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
dt>*:last-child,
|
||||
dd>*:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
pre,
|
||||
code {
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
code {
|
||||
color: #ff8657;
|
||||
background: #f4f4f4;
|
||||
display: inline-block;
|
||||
padding: 0 0.5rem;
|
||||
border: 1px solid #d8d8d8;
|
||||
border-radius: 0.5rem;
|
||||
line-height: 1.57777778;
|
||||
}
|
||||
|
||||
div.book .programlisting,
|
||||
div.appendix .programlisting {
|
||||
border-radius: 0.5rem;
|
||||
padding: 1rem;
|
||||
overflow: auto;
|
||||
background: #f2f8fd;
|
||||
color: #000000;
|
||||
}
|
||||
|
||||
div.book .note,
|
||||
div.book .tip,
|
||||
div.book .warning,
|
||||
div.book .caution,
|
||||
div.book .important,
|
||||
div.appendix .note,
|
||||
div.appendix .tip,
|
||||
div.appendix .warning,
|
||||
div.appendix .caution,
|
||||
div.appendix .important {
|
||||
margin-bottom: 1rem;
|
||||
border-radius: 0.5rem;
|
||||
padding: 1.5rem;
|
||||
overflow: auto;
|
||||
background: #f4f4f4;
|
||||
}
|
||||
|
||||
div.book .note>.title,
|
||||
div.book .tip>.title,
|
||||
div.book .warning>.title,
|
||||
div.book .caution>.title,
|
||||
div.book .important>.title,
|
||||
div.appendix .note>.title,
|
||||
div.appendix .tip>.title,
|
||||
div.appendix .warning>.title,
|
||||
div.appendix .caution>.title,
|
||||
div.appendix .important>.title {
|
||||
font-weight: 800;
|
||||
/* font-family: 'Overpass', serif; */
|
||||
line-height: 110%;
|
||||
margin-bottom: 1rem;
|
||||
color: inherit;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.book .note> :first-child,
|
||||
div.book .tip> :first-child,
|
||||
div.book .warning> :first-child,
|
||||
div.book .caution> :first-child,
|
||||
div.book .important> :first-child,
|
||||
div.appendix .note> :first-child,
|
||||
div.appendix .tip> :first-child,
|
||||
div.appendix .warning> :first-child,
|
||||
div.appendix .caution> :first-child,
|
||||
div.appendix .important> :first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
div.book .note> :last-child,
|
||||
div.book .tip> :last-child,
|
||||
div.book .warning> :last-child,
|
||||
div.book .caution> :last-child,
|
||||
div.book .important> :last-child,
|
||||
div.appendix .note> :last-child,
|
||||
div.appendix .tip> :last-child,
|
||||
div.appendix .warning> :last-child,
|
||||
div.appendix .caution> :last-child,
|
||||
div.appendix .important> :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.book .note,
|
||||
div.book .tip,
|
||||
div.appendix .note,
|
||||
div.appendix .tip {
|
||||
color: #5277c3;
|
||||
background: #f2f8fd;
|
||||
}
|
||||
|
||||
div.book .warning,
|
||||
div.book .caution,
|
||||
div.appendix .warning,
|
||||
div.appendix .caution {
|
||||
color: #cc3900;
|
||||
background-color: #fff5e1;
|
||||
}
|
||||
|
||||
div.book .section,
|
||||
div.appendix .section {
|
||||
margin-top: 2em;
|
||||
}
|
||||
|
||||
div.book div.example,
|
||||
div.appendix div.example {
|
||||
margin-top: 1.5em;
|
||||
}
|
||||
|
||||
div.book br.example-break,
|
||||
div.appendix br.example-break {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.book div.footnotes>hr,
|
||||
div.appendix div.footnotes>hr {
|
||||
border-color: #d8d8d8;
|
||||
}
|
||||
|
||||
div.book div.footnotes>br,
|
||||
div.appendix div.footnotes>br {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.book dt,
|
||||
div.appendix dt {
|
||||
margin-top: 1em;
|
||||
}
|
||||
|
||||
div.book .toc dt,
|
||||
div.appendix .toc dt {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
div.book .list-of-examples dt,
|
||||
div.appendix .list-of-examples dt {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
div.book code,
|
||||
div.appendix code {
|
||||
padding: 0;
|
||||
border: 0;
|
||||
padding: 5px;
|
||||
background: #fffff5;
|
||||
font-weight: normal;
|
||||
font-style: italic;
|
||||
box-shadow: none;
|
||||
margin-bottom: 1em;
|
||||
background-color: inherit;
|
||||
color: inherit;
|
||||
font-size: 100%;
|
||||
-webkit-hyphens: none;
|
||||
-moz-hyphens: none;
|
||||
hyphens: none;
|
||||
}
|
||||
|
||||
div.navheader table, div.navfooter table {
|
||||
box-shadow: none;
|
||||
div.book div.toc,
|
||||
div.appendix div.toc {
|
||||
margin-bottom: 3em;
|
||||
border-bottom: 0.0625rem solid #d8d8d8;
|
||||
}
|
||||
|
||||
div.affiliation
|
||||
{
|
||||
font-style: italic;
|
||||
div.book div.toc dd,
|
||||
div.appendix div.toc dd {
|
||||
margin-left: 2em;
|
||||
}
|
||||
|
||||
div.book span.command,
|
||||
div.appendix span.command {
|
||||
font-family: monospace;
|
||||
-webkit-hyphens: none;
|
||||
-moz-hyphens: none;
|
||||
hyphens: none;
|
||||
}
|
||||
|
||||
div.book .informaltable th,
|
||||
div.book .informaltable td,
|
||||
div.appendix .informaltable th,
|
||||
div.appendix .informaltable td {
|
||||
padding: 0.5rem;
|
||||
}
|
||||
|
|
14
flake.nix
14
flake.nix
|
@ -27,7 +27,19 @@
|
|||
# We set it to null, to remove the "legacy" entrypoint's
|
||||
# non-hermetic default.
|
||||
system = null;
|
||||
} // args
|
||||
|
||||
modules = args.modules ++ [
|
||||
# This module is injected here since it exposes the nixpkgs self-path in as
|
||||
# constrained of contexts as possible to avoid more things depending on it and
|
||||
# introducing unnecessary potential fragility to changes in flakes itself.
|
||||
#
|
||||
# See: failed attempt to make pkgs.path not copy when using flakes:
|
||||
# https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1023287913
|
||||
({ config, pkgs, lib, ... }: {
|
||||
config.nixpkgs.flake.source = self.outPath;
|
||||
})
|
||||
];
|
||||
} // builtins.removeAttrs args [ "modules" ]
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
24.05
|
146
lib/asserts.nix
146
lib/asserts.nix
|
@ -2,47 +2,87 @@
|
|||
|
||||
rec {
|
||||
|
||||
/* Throw if pred is false, else return pred.
|
||||
Intended to be used to augment asserts with helpful error messages.
|
||||
/**
|
||||
Throw if pred is false, else return pred.
|
||||
Intended to be used to augment asserts with helpful error messages.
|
||||
|
||||
Example:
|
||||
assertMsg false "nope"
|
||||
stderr> error: nope
|
||||
# Inputs
|
||||
|
||||
assert assertMsg ("foo" == "bar") "foo is not bar, silly"; ""
|
||||
stderr> error: foo is not bar, silly
|
||||
`pred`
|
||||
|
||||
Type:
|
||||
assertMsg :: Bool -> String -> Bool
|
||||
: Predicate that needs to succeed, otherwise `msg` is thrown
|
||||
|
||||
`msg`
|
||||
|
||||
: Message to throw in case `pred` fails
|
||||
|
||||
# Type
|
||||
|
||||
```
|
||||
assertMsg :: Bool -> String -> Bool
|
||||
```
|
||||
|
||||
# Examples
|
||||
:::{.example}
|
||||
## `lib.asserts.assertMsg` usage example
|
||||
|
||||
```nix
|
||||
assertMsg false "nope"
|
||||
stderr> error: nope
|
||||
assert assertMsg ("foo" == "bar") "foo is not bar, silly"; ""
|
||||
stderr> error: foo is not bar, silly
|
||||
```
|
||||
|
||||
:::
|
||||
*/
|
||||
# TODO(Profpatsch): add tests that check stderr
|
||||
assertMsg =
|
||||
# Predicate that needs to succeed, otherwise `msg` is thrown
|
||||
pred:
|
||||
# Message to throw in case `pred` fails
|
||||
msg:
|
||||
pred || builtins.throw msg;
|
||||
|
||||
/* Specialized `assertMsg` for checking if `val` is one of the elements
|
||||
of the list `xs`. Useful for checking enums.
|
||||
/**
|
||||
Specialized `assertMsg` for checking if `val` is one of the elements
|
||||
of the list `xs`. Useful for checking enums.
|
||||
|
||||
Example:
|
||||
let sslLibrary = "libressl";
|
||||
in assertOneOf "sslLibrary" sslLibrary [ "openssl" "bearssl" ]
|
||||
stderr> error: sslLibrary must be one of [
|
||||
stderr> "openssl"
|
||||
stderr> "bearssl"
|
||||
stderr> ], but is: "libressl"
|
||||
# Inputs
|
||||
|
||||
Type:
|
||||
assertOneOf :: String -> ComparableVal -> List ComparableVal -> Bool
|
||||
`name`
|
||||
|
||||
: The name of the variable the user entered `val` into, for inclusion in the error message
|
||||
|
||||
`val`
|
||||
|
||||
: The value of what the user provided, to be compared against the values in `xs`
|
||||
|
||||
`xs`
|
||||
|
||||
: The list of valid values
|
||||
|
||||
# Type
|
||||
|
||||
```
|
||||
assertOneOf :: String -> ComparableVal -> List ComparableVal -> Bool
|
||||
```
|
||||
|
||||
# Examples
|
||||
:::{.example}
|
||||
## `lib.asserts.assertOneOf` usage example
|
||||
|
||||
```nix
|
||||
let sslLibrary = "libressl";
|
||||
in assertOneOf "sslLibrary" sslLibrary [ "openssl" "bearssl" ]
|
||||
stderr> error: sslLibrary must be one of [
|
||||
stderr> "openssl"
|
||||
stderr> "bearssl"
|
||||
stderr> ], but is: "libressl"
|
||||
```
|
||||
|
||||
:::
|
||||
*/
|
||||
assertOneOf =
|
||||
# The name of the variable the user entered `val` into, for inclusion in the error message
|
||||
name:
|
||||
# The value of what the user provided, to be compared against the values in `xs`
|
||||
val:
|
||||
# The list of valid values
|
||||
xs:
|
||||
assertMsg
|
||||
(lib.elem val xs)
|
||||
|
@ -50,29 +90,51 @@ rec {
|
|||
lib.generators.toPretty {} xs}, but is: ${
|
||||
lib.generators.toPretty {} val}";
|
||||
|
||||
/* Specialized `assertMsg` for checking if every one of `vals` is one of the elements
|
||||
of the list `xs`. Useful for checking lists of supported attributes.
|
||||
/**
|
||||
Specialized `assertMsg` for checking if every one of `vals` is one of the elements
|
||||
of the list `xs`. Useful for checking lists of supported attributes.
|
||||
|
||||
Example:
|
||||
let sslLibraries = [ "libressl" "bearssl" ];
|
||||
in assertEachOneOf "sslLibraries" sslLibraries [ "openssl" "bearssl" ]
|
||||
stderr> error: each element in sslLibraries must be one of [
|
||||
stderr> "openssl"
|
||||
stderr> "bearssl"
|
||||
stderr> ], but is: [
|
||||
stderr> "libressl"
|
||||
stderr> "bearssl"
|
||||
stderr> ]
|
||||
# Inputs
|
||||
|
||||
Type:
|
||||
assertEachOneOf :: String -> List ComparableVal -> List ComparableVal -> Bool
|
||||
`name`
|
||||
|
||||
: The name of the variable the user entered `val` into, for inclusion in the error message
|
||||
|
||||
`vals`
|
||||
|
||||
: The list of values of what the user provided, to be compared against the values in `xs`
|
||||
|
||||
`xs`
|
||||
|
||||
: The list of valid values
|
||||
|
||||
# Type
|
||||
|
||||
```
|
||||
assertEachOneOf :: String -> List ComparableVal -> List ComparableVal -> Bool
|
||||
```
|
||||
|
||||
# Examples
|
||||
:::{.example}
|
||||
## `lib.asserts.assertEachOneOf` usage example
|
||||
|
||||
```nix
|
||||
let sslLibraries = [ "libressl" "bearssl" ];
|
||||
in assertEachOneOf "sslLibraries" sslLibraries [ "openssl" "bearssl" ]
|
||||
stderr> error: each element in sslLibraries must be one of [
|
||||
stderr> "openssl"
|
||||
stderr> "bearssl"
|
||||
stderr> ], but is: [
|
||||
stderr> "libressl"
|
||||
stderr> "bearssl"
|
||||
stderr> ]
|
||||
```
|
||||
|
||||
:::
|
||||
*/
|
||||
assertEachOneOf =
|
||||
# The name of the variable the user entered `val` into, for inclusion in the error message
|
||||
name:
|
||||
# The list of values of what the user provided, to be compared against the values in `xs`
|
||||
vals:
|
||||
# The list of valid values
|
||||
xs:
|
||||
assertMsg
|
||||
(lib.all (val: lib.elem val xs) vals)
|
||||
|
|
1932
lib/attrsets.nix
1932
lib/attrsets.nix
File diff suppressed because it is too large
Load Diff
|
@ -221,9 +221,10 @@ rec {
|
|||
let
|
||||
f = if isFunction fn then fn else import fn;
|
||||
auto = intersectAttrs (functionArgs f) autoArgs;
|
||||
mirrorArgs = mirrorFunctionArgs f;
|
||||
origArgs = auto // args;
|
||||
pkgs = f origArgs;
|
||||
mkAttrOverridable = name: _: makeOverridable (newArgs: (f newArgs).${name}) origArgs;
|
||||
mkAttrOverridable = name: _: makeOverridable (mirrorArgs (newArgs: (f newArgs).${name})) origArgs;
|
||||
in
|
||||
if isDerivation pkgs then throw
|
||||
("function `callPackages` was called on a *single* derivation "
|
||||
|
@ -305,18 +306,129 @@ rec {
|
|||
in if drv == null then null else
|
||||
deepSeq drv' drv';
|
||||
|
||||
/* Make a set of packages with a common scope. All packages called
|
||||
with the provided `callPackage` will be evaluated with the same
|
||||
arguments. Any package in the set may depend on any other. The
|
||||
`overrideScope'` function allows subsequent modification of the package
|
||||
set in a consistent way, i.e. all packages in the set will be
|
||||
called with the overridden packages. The package sets may be
|
||||
hierarchical: the packages in the set are called with the scope
|
||||
provided by `newScope` and the set provides a `newScope` attribute
|
||||
which can form the parent scope for later package sets.
|
||||
/**
|
||||
Make an attribute set (a "scope") from functions that take arguments from that same attribute set.
|
||||
See [](#ex-makeScope) for how to use it.
|
||||
|
||||
Type:
|
||||
makeScope :: (AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a) -> (AttrSet -> AttrSet) -> AttrSet
|
||||
# Inputs
|
||||
|
||||
1. `newScope` (`AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a`)
|
||||
|
||||
A function that takes an attribute set `attrs` and returns what ends up as `callPackage` in the output.
|
||||
|
||||
Typical values are `callPackageWith` or the output attribute `newScope`.
|
||||
|
||||
2. `f` (`AttrSet -> AttrSet`)
|
||||
|
||||
A function that takes an attribute set as returned by `makeScope newScope f` (a "scope") and returns any attribute set.
|
||||
|
||||
This function is used to compute the fixpoint of the resulting scope using `callPackage`.
|
||||
Its argument is the lazily evaluated reference to the value of that fixpoint, and is typically called `self` or `final`.
|
||||
|
||||
See [](#ex-makeScope) for how to use it.
|
||||
See [](#sec-functions-library-fixedPoints) for details on fixpoint computation.
|
||||
|
||||
# Output
|
||||
|
||||
`makeScope` returns an attribute set of a form called `scope`, which also contains the final attributes produced by `f`:
|
||||
|
||||
```
|
||||
scope :: {
|
||||
callPackage :: ((AttrSet -> a) | Path) -> AttrSet -> a
|
||||
newScope = AttrSet -> scope
|
||||
overrideScope = (scope -> scope -> AttrSet) -> scope
|
||||
packages :: AttrSet -> AttrSet
|
||||
}
|
||||
```
|
||||
|
||||
- `callPackage` (`((AttrSet -> a) | Path) -> AttrSet -> a`)
|
||||
|
||||
A function that
|
||||
|
||||
1. Takes a function `p`, or a path to a Nix file that contains a function `p`, which takes an attribute set and returns value of arbitrary type `a`,
|
||||
2. Takes an attribute set `args` with explicit attributes to pass to `p`,
|
||||
3. Calls `f` with attributes from the original attribute set `attrs` passed to `newScope` updated with `args, i.e. `attrs // args`, if they match the attributes in the argument of `p`.
|
||||
|
||||
All such functions `p` will be called with the same value for `attrs`.
|
||||
|
||||
See [](#ex-makeScope-callPackage) for how to use it.
|
||||
|
||||
- `newScope` (`AttrSet -> scope`)
|
||||
|
||||
Takes an attribute set `attrs` and returns a scope that extends the original scope.
|
||||
|
||||
- `overrideScope` (`(scope -> scope -> AttrSet) -> scope`)
|
||||
|
||||
Takes a function `g` of the form `final: prev: { # attributes }` to act as an overlay on `f`, and returns a new scope with values determined by `extends g f`.
|
||||
See [](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.fixedPoints.extends) for details.
|
||||
|
||||
This allows subsequent modification of the final attribute set in a consistent way, i.e. all functions `p` invoked with `callPackage` will be called with the modified values.
|
||||
|
||||
- `packages` (`AttrSet -> AttrSet`)
|
||||
|
||||
The value of the argument `f` to `makeScope`.
|
||||
|
||||
- final attributes
|
||||
|
||||
The final values returned by `f`.
|
||||
|
||||
# Examples
|
||||
|
||||
:::{#ex-makeScope .example}
|
||||
# Create an interdependent package set on top of `pkgs`
|
||||
|
||||
The functions in `foo.nix` and `bar.nix` can depend on each other, in the sense that `foo.nix` can contain a function that expects `bar` as an attribute in its argument.
|
||||
|
||||
```nix
|
||||
let
|
||||
pkgs = import <nixpkgs> { };
|
||||
in
|
||||
pkgs.lib.makeScope pkgs.newScope (self: {
|
||||
foo = self.callPackage ./foo.nix { };
|
||||
bar = self.callPackage ./bar.nix { };
|
||||
})
|
||||
```
|
||||
|
||||
evaluates to
|
||||
|
||||
```nix
|
||||
{
|
||||
callPackage = «lambda»;
|
||||
newScope = «lambda»;
|
||||
overrideScope = «lambda»;
|
||||
packages = «lambda»;
|
||||
foo = «derivation»;
|
||||
bar = «derivation»;
|
||||
}
|
||||
```
|
||||
:::
|
||||
|
||||
:::{#ex-makeScope-callPackage .example}
|
||||
# Using `callPackage` from a scope
|
||||
|
||||
```nix
|
||||
let
|
||||
pkgs = import <nixpkgs> { };
|
||||
inherit (pkgs) lib;
|
||||
scope = lib.makeScope lib.callPackageWith (self: { a = 1; b = 2; });
|
||||
three = scope.callPackage ({ a, b }: a + b) { };
|
||||
four = scope.callPackage ({ a, b }: a + b) { a = 2; };
|
||||
in
|
||||
[ three four ]
|
||||
```
|
||||
|
||||
evaluates to
|
||||
|
||||
```nix
|
||||
[ 3 4 ]
|
||||
```
|
||||
:::
|
||||
|
||||
# Type
|
||||
|
||||
```
|
||||
makeScope :: (AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a) -> (AttrSet -> AttrSet) -> scope
|
||||
```
|
||||
*/
|
||||
makeScope = newScope: f:
|
||||
let self = f self // {
|
||||
|
|
|
@ -84,8 +84,8 @@ let
|
|||
mapAttrs' mapAttrsToList attrsToList concatMapAttrs mapAttrsRecursive
|
||||
mapAttrsRecursiveCond genAttrs isDerivation toDerivation optionalAttrs
|
||||
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
|
||||
recursiveUpdate matchAttrs overrideExisting showAttrPath getOutput getBin
|
||||
getLib getDev getMan chooseDevOutputs zipWithNames zip
|
||||
recursiveUpdate matchAttrs mergeAttrsList overrideExisting showAttrPath getOutput
|
||||
getBin getLib getDev getMan chooseDevOutputs zipWithNames zip
|
||||
recurseIntoAttrs dontRecurseIntoAttrs cartesianProductOfSets
|
||||
updateManyAttrsByPath;
|
||||
inherit (self.lists) singleton forEach foldr fold foldl foldl' imap0 imap1
|
||||
|
|
|
@ -1,14 +1,37 @@
|
|||
{ lib }:
|
||||
|
||||
let
|
||||
inherit (builtins) head tail isList isAttrs isInt attrNames;
|
||||
inherit (lib)
|
||||
and
|
||||
any
|
||||
attrByPath
|
||||
attrNames
|
||||
compare
|
||||
concat
|
||||
concatMap
|
||||
elem
|
||||
filter
|
||||
foldl
|
||||
foldr
|
||||
genericClosure
|
||||
head
|
||||
imap1
|
||||
init
|
||||
isAttrs
|
||||
isFunction
|
||||
isInt
|
||||
isList
|
||||
lists
|
||||
listToAttrs
|
||||
mapAttrs
|
||||
mergeAttrs
|
||||
meta
|
||||
nameValuePair
|
||||
tail
|
||||
toList
|
||||
;
|
||||
|
||||
in
|
||||
|
||||
with lib.lists;
|
||||
with lib.attrsets;
|
||||
with lib.strings;
|
||||
|
||||
rec {
|
||||
inherit (lib.attrsets) removeAttrs;
|
||||
|
||||
# returns default if env var is not set
|
||||
maybeEnv = name: default:
|
||||
|
@ -26,7 +49,7 @@ rec {
|
|||
base = (setAttrMerge "passthru" {} (f arg)
|
||||
( z: z // {
|
||||
function = foldArgs merger f arg;
|
||||
args = (lib.attrByPath ["passthru" "args"] {} z) // x;
|
||||
args = (attrByPath ["passthru" "args"] {} z) // x;
|
||||
} ));
|
||||
withStdOverrides = base // {
|
||||
override = base.passthru.function;
|
||||
|
@ -77,11 +100,11 @@ rec {
|
|||
# Output : are reqs satisfied? It's asserted.
|
||||
checkReqs = attrSet: argList: condList:
|
||||
(
|
||||
foldr lib.and true
|
||||
foldr and true
|
||||
(map (x: let name = (head x); in
|
||||
|
||||
((checkFlag attrSet name) ->
|
||||
(foldr lib.and true
|
||||
(foldr and true
|
||||
(map (y: let val=(getValue attrSet argList y); in
|
||||
(val!=null) && (val!=false))
|
||||
(tail x))))) condList));
|
||||
|
@ -159,11 +182,11 @@ rec {
|
|||
|
||||
closePropagationSlow = list: (uniqList {inputList = (innerClosePropagation [] list);});
|
||||
|
||||
# This is an optimisation of lib.closePropagation which avoids the O(n^2) behavior
|
||||
# This is an optimisation of closePropagation which avoids the O(n^2) behavior
|
||||
# Using a list of derivations, it generates the full closure of the propagatedXXXBuildInputs
|
||||
# The ordering / sorting / comparison is done based on the `outPath`
|
||||
# attribute of each derivation.
|
||||
# On some benchmarks, it performs up to 15 times faster than lib.closePropagation.
|
||||
# On some benchmarks, it performs up to 15 times faster than closePropagation.
|
||||
# See https://github.com/NixOS/nixpkgs/pull/194391 for details.
|
||||
closePropagationFast = list:
|
||||
builtins.map (x: x.val) (builtins.genericClosure {
|
||||
|
@ -250,10 +273,10 @@ rec {
|
|||
# foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix
|
||||
mergeAttrByFunc = x: y:
|
||||
let
|
||||
mergeAttrBy2 = { mergeAttrBy = lib.mergeAttrs; }
|
||||
mergeAttrBy2 = { mergeAttrBy = mergeAttrs; }
|
||||
// (maybeAttr "mergeAttrBy" {} x)
|
||||
// (maybeAttr "mergeAttrBy" {} y); in
|
||||
foldr lib.mergeAttrs {} [
|
||||
foldr mergeAttrs {} [
|
||||
x y
|
||||
(mapAttrs ( a: v: # merge special names using given functions
|
||||
if x ? ${a}
|
||||
|
@ -273,9 +296,9 @@ rec {
|
|||
|
||||
# sane defaults (same name as attr name so that inherit can be used)
|
||||
mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; }
|
||||
listToAttrs (map (n: nameValuePair n lib.concat)
|
||||
listToAttrs (map (n: nameValuePair n concat)
|
||||
[ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ])
|
||||
// listToAttrs (map (n: nameValuePair n lib.mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
|
||||
// listToAttrs (map (n: nameValuePair n mergeAttrs) [ "passthru" "meta" "cfg" "flags" ])
|
||||
// listToAttrs (map (n: nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ])
|
||||
;
|
||||
|
||||
|
@ -283,7 +306,7 @@ rec {
|
|||
if isAttrs x then
|
||||
if x ? outPath then "derivation"
|
||||
else "attrs"
|
||||
else if lib.isFunction x then "function"
|
||||
else if isFunction x then "function"
|
||||
else if isList x then "list"
|
||||
else if x == true then "bool"
|
||||
else if x == false then "bool"
|
||||
|
@ -304,4 +327,47 @@ rec {
|
|||
fakeHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
|
||||
fakeSha256 = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
fakeSha512 = "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
in
|
||||
|
||||
# Everything in this attrset is the public interface of the file.
|
||||
{
|
||||
inherit
|
||||
checkFlag
|
||||
checkReqs
|
||||
closePropagation
|
||||
closePropagationFast
|
||||
closePropagationSlow
|
||||
condConcat
|
||||
defaultMerge
|
||||
defaultMergeArg
|
||||
fakeHash
|
||||
fakeSha256
|
||||
fakeSha512
|
||||
foldArgs
|
||||
getValue
|
||||
ifEnable
|
||||
imap
|
||||
innerClosePropagation
|
||||
innerModifySumArgs
|
||||
lazyGenericClosure
|
||||
mapAttrsFlatten
|
||||
maybeAttr
|
||||
maybeAttrNullable
|
||||
maybeEnv
|
||||
mergeAttrBy
|
||||
mergeAttrByFunc
|
||||
mergeAttrsByFuncDefaults
|
||||
mergeAttrsByFuncDefaultsClean
|
||||
mergeAttrsConcatenateValues
|
||||
mergeAttrsNoOverride
|
||||
mergeAttrsWithFunc
|
||||
modifySumArgs
|
||||
nixType
|
||||
nvs
|
||||
setAttr
|
||||
setAttrMerge
|
||||
uniqList
|
||||
uniqListExt
|
||||
;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,20 @@
|
|||
{ lib }:
|
||||
|
||||
let
|
||||
inherit (lib) throwIfNot;
|
||||
inherit (lib)
|
||||
genAttrs
|
||||
isString
|
||||
throwIfNot
|
||||
;
|
||||
|
||||
showMaybeAttrPosPre = prefix: attrName: v:
|
||||
let pos = builtins.unsafeGetAttrPos attrName v;
|
||||
in if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
|
||||
|
||||
showMaybePackagePosPre = prefix: pkg:
|
||||
if pkg?meta.position && isString pkg.meta.position
|
||||
then "${prefix}${pkg.meta.position}"
|
||||
else "";
|
||||
in
|
||||
{
|
||||
/*
|
||||
|
@ -64,6 +77,11 @@ in
|
|||
#
|
||||
# This can be used for adding package attributes, such as `tests`.
|
||||
passthru ? { }
|
||||
, # Optional list of assumed outputs. Default: ["out"]
|
||||
#
|
||||
# This must match the set of outputs that the returned derivation has.
|
||||
# You must use this when the derivation has multiple outputs.
|
||||
outputs ? [ "out" ]
|
||||
}:
|
||||
let
|
||||
# These checks are strict in `drv` and some `drv` attributes, but the
|
||||
|
@ -71,11 +89,40 @@ in
|
|||
# Instead, the individual derivation attributes do depend on it.
|
||||
checked =
|
||||
throwIfNot (derivation.type or null == "derivation")
|
||||
"lazySimpleDerivation: input must be a derivation."
|
||||
"lazyDerivation: input must be a derivation."
|
||||
throwIfNot
|
||||
(derivation.outputs == [ "out" ])
|
||||
# Supporting multiple outputs should be a matter of inheriting more attrs.
|
||||
"The derivation ${derivation.name or "<unknown>"} has multiple outputs. This is not supported by lazySimpleDerivation yet. Support could be added, and be useful as long as the set of outputs is known in advance, without evaluating the actual derivation."
|
||||
# NOTE: Technically we could require our outputs to be a subset of the
|
||||
# actual ones, or even leave them unchecked and fail on a lazy basis.
|
||||
# However, consider the case where an output is added in the underlying
|
||||
# derivation, such as dev. lazyDerivation would remove it and cause it
|
||||
# to fail as a buildInputs item, without any indication as to what
|
||||
# happened. Hence the more stringent condition. We could consider
|
||||
# adding a flag to control this behavior if there's a valid case for it,
|
||||
# but the documentation must have a note like this.
|
||||
(derivation.outputs == outputs)
|
||||
''
|
||||
lib.lazyDerivation: The derivation ${derivation.name or "<unknown>"} has outputs that don't match the assumed outputs.
|
||||
|
||||
Assumed outputs passed to lazyDerivation${showMaybeAttrPosPre ",\n at " "outputs" args}:
|
||||
${lib.generators.toPretty { multiline = false; } outputs};
|
||||
|
||||
Actual outputs of the derivation${showMaybePackagePosPre ",\n defined at " derivation}:
|
||||
${lib.generators.toPretty { multiline = false; } derivation.outputs}
|
||||
|
||||
If the outputs are known ahead of evaluating the derivation,
|
||||
then update the lazyDerivation call to match the actual outputs, in the same order.
|
||||
If lazyDerivation is passed a literal value, just change it to the actual outputs.
|
||||
As a result it will work as before / as intended.
|
||||
|
||||
Otherwise, when the outputs are dynamic and can't be known ahead of time, it won't
|
||||
be possible to add laziness, but lib.lazyDerivation may still be useful for trimming
|
||||
the attributes.
|
||||
If you want to keep trimming the attributes, make sure that the package is in a
|
||||
variable (don't evaluate it twice!) and pass the variable and its outputs attribute
|
||||
to lib.lazyDerivation. This largely defeats laziness, but keeps the trimming.
|
||||
If none of the above works for you, replace the lib.lazyDerivation call by the
|
||||
expression in the derivation argument.
|
||||
''
|
||||
derivation;
|
||||
in
|
||||
{
|
||||
|
@ -92,12 +139,15 @@ in
|
|||
# A fixed set of derivation values, so that `lazyDerivation` can return
|
||||
# its attrset before evaluating `derivation`.
|
||||
# This must only list attributes that are available on _all_ derivations.
|
||||
inherit (checked) outputs out outPath outputName drvPath name system;
|
||||
inherit (checked) outPath outputName drvPath name system;
|
||||
inherit outputs;
|
||||
|
||||
# The meta attribute can either be taken from the derivation, or if the
|
||||
# `lazyDerivation` caller knew a shortcut, be taken from there.
|
||||
meta = args.meta or checked.meta;
|
||||
} // passthru;
|
||||
}
|
||||
// genAttrs outputs (outputName: checked.${outputName})
|
||||
// passthru;
|
||||
|
||||
/* Conditionally set a derivation attribute.
|
||||
|
||||
|
|
|
@ -23,6 +23,10 @@
|
|||
|
||||
Add files in file sets to the store to use as derivation sources.
|
||||
|
||||
- [`lib.fileset.toList`](#function-library-lib.fileset.toList):
|
||||
|
||||
The list of files contained in a file set.
|
||||
|
||||
Combinators:
|
||||
- [`lib.fileset.union`](#function-library-lib.fileset.union)/[`lib.fileset.unions`](#function-library-lib.fileset.unions):
|
||||
|
||||
|
@ -102,6 +106,7 @@ let
|
|||
_coerceMany
|
||||
_toSourceFilter
|
||||
_fromSourceFilter
|
||||
_toList
|
||||
_unionMany
|
||||
_fileFilter
|
||||
_printFileset
|
||||
|
@ -412,6 +417,38 @@ in {
|
|||
filter = sourceFilter;
|
||||
};
|
||||
|
||||
|
||||
/*
|
||||
The list of file paths contained in the given file set.
|
||||
|
||||
:::{.note}
|
||||
This function is strict in the entire file set.
|
||||
This is in contrast with combinators [`lib.fileset.union`](#function-library-lib.fileset.union),
|
||||
[`lib.fileset.intersection`](#function-library-lib.fileset.intersection) and [`lib.fileset.difference`](#function-library-lib.fileset.difference).
|
||||
|
||||
Thus it is recommended to call `toList` on file sets created using the combinators,
|
||||
instead of doing list processing on the result of `toList`.
|
||||
:::
|
||||
|
||||
The resulting list of files can be turned back into a file set using [`lib.fileset.unions`](#function-library-lib.fileset.unions).
|
||||
|
||||
Type:
|
||||
toList :: FileSet -> [ Path ]
|
||||
|
||||
Example:
|
||||
toList ./.
|
||||
[ ./README.md ./Makefile ./src/main.c ./src/main.h ]
|
||||
|
||||
toList (difference ./. ./src)
|
||||
[ ./README.md ./Makefile ]
|
||||
*/
|
||||
toList =
|
||||
# The file set whose file paths to return.
|
||||
# This argument can also be a path,
|
||||
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
|
||||
fileset:
|
||||
_toList (_coerce "lib.fileset.toList: Argument" fileset);
|
||||
|
||||
/*
|
||||
The file set containing all files that are in either of two given file sets.
|
||||
This is the same as [`unions`](#function-library-lib.fileset.unions),
|
||||
|
|
|
@ -18,6 +18,7 @@ let
|
|||
attrNames
|
||||
attrValues
|
||||
mapAttrs
|
||||
mapAttrsToList
|
||||
optionalAttrs
|
||||
zipAttrsWith
|
||||
;
|
||||
|
@ -29,6 +30,7 @@ let
|
|||
inherit (lib.lists)
|
||||
all
|
||||
commonPrefix
|
||||
concatLists
|
||||
elemAt
|
||||
filter
|
||||
findFirst
|
||||
|
@ -539,6 +541,27 @@ rec {
|
|||
${baseNameOf root} = rootPathType;
|
||||
};
|
||||
|
||||
# Turns a file set into the list of file paths it includes.
|
||||
# Type: fileset -> [ Path ]
|
||||
_toList = fileset:
|
||||
let
|
||||
recurse = path: tree:
|
||||
if isAttrs tree then
|
||||
concatLists (mapAttrsToList (name: value:
|
||||
recurse (path + "/${name}") value
|
||||
) tree)
|
||||
else if tree == "directory" then
|
||||
recurse path (readDir path)
|
||||
else if tree == null then
|
||||
[ ]
|
||||
else
|
||||
[ path ];
|
||||
in
|
||||
if fileset._internalIsEmptyWithoutBase then
|
||||
[ ]
|
||||
else
|
||||
recurse fileset._internalBase fileset._internalTree;
|
||||
|
||||
# Transforms the filesetTree of a file set to a shorter base path, e.g.
|
||||
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
|
||||
# => { bar = null; }
|
||||
|
|
|
@ -275,7 +275,6 @@ createTree() {
|
|||
# )
|
||||
# checkFileset './a' # Pass the fileset as the argument
|
||||
checkFileset() {
|
||||
# New subshell so that we can have a separate trap handler, see `trap` below
|
||||
local fileset=$1
|
||||
|
||||
# Create the tree
|
||||
|
@ -283,16 +282,20 @@ checkFileset() {
|
|||
|
||||
# Process the tree into separate arrays for included paths, excluded paths and excluded files.
|
||||
local -a included=()
|
||||
local -a includedFiles=()
|
||||
local -a excluded=()
|
||||
local -a excludedFiles=()
|
||||
for p in "${!tree[@]}"; do
|
||||
case "${tree[$p]}" in
|
||||
1)
|
||||
included+=("$p")
|
||||
# If keys end with a `/` we treat them as directories, otherwise files
|
||||
if [[ ! "$p" =~ /$ ]]; then
|
||||
includedFiles+=("$p")
|
||||
fi
|
||||
;;
|
||||
0)
|
||||
excluded+=("$p")
|
||||
# If keys end with a `/` we treat them as directories, otherwise files
|
||||
if [[ ! "$p" =~ /$ ]]; then
|
||||
excludedFiles+=("$p")
|
||||
fi
|
||||
|
@ -302,6 +305,10 @@ checkFileset() {
|
|||
esac
|
||||
done
|
||||
|
||||
# Test that lib.fileset.toList contains exactly the included files.
|
||||
# The /#/./ part prefixes each element with `./`
|
||||
expectEqual "toList ($fileset)" "sort lessThan [ ${includedFiles[*]/#/./} ]"
|
||||
|
||||
expression="toSource { root = ./.; fileset = $fileset; }"
|
||||
|
||||
# We don't have lambda's in bash unfortunately,
|
||||
|
@ -338,13 +345,17 @@ checkFileset() {
|
|||
|
||||
#### Error messages #####
|
||||
|
||||
# We're using [[:blank:]] here instead of \s, because only the former is POSIX
|
||||
# (see https://pubs.opengroup.org/onlinepubs/007908799/xbd/re.html#tag_007_003_005).
|
||||
# And indeed, Darwin's bash only supports the former
|
||||
|
||||
# Absolute paths in strings cannot be passed as `root`
|
||||
expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
|
||||
\s*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
|
||||
[[:blank:]]*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
|
||||
|
||||
expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
|
||||
\s*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
|
||||
\s*Note that this only works for sources created from paths.'
|
||||
[[:blank:]]*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
|
||||
[[:blank:]]*Note that this only works for sources created from paths.'
|
||||
|
||||
# Only paths are accepted as `root`
|
||||
expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
|
||||
|
@ -354,9 +365,9 @@ mkdir -p {foo,bar}/mock-root
|
|||
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
|
||||
toSource { root = ./foo/mock-root; fileset = ./bar/mock-root; }
|
||||
' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \('"$work"'/foo/mock-root\):
|
||||
\s*`root`: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
\s*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
\s*Different filesystem roots are not supported.'
|
||||
[[:blank:]]*`root`: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
[[:blank:]]*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
[[:blank:]]*Different filesystem roots are not supported.'
|
||||
rm -rf -- *
|
||||
|
||||
# `root` needs to exist
|
||||
|
@ -365,8 +376,8 @@ expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `
|
|||
# `root` needs to be a file
|
||||
touch a
|
||||
expectFailure 'toSource { root = ./a; fileset = ./a; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a file, but it should be a directory instead. Potential solutions:
|
||||
\s*- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
|
||||
\s*- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as '"$work"', and set `fileset` to the file path.'
|
||||
[[:blank:]]*- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
|
||||
[[:blank:]]*- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as '"$work"', and set `fileset` to the file path.'
|
||||
rm -rf -- *
|
||||
|
||||
# The fileset argument should be evaluated, even if the directory is empty
|
||||
|
@ -375,36 +386,36 @@ expectFailure 'toSource { root = ./.; fileset = abort "This should be evaluated"
|
|||
# Only paths under `root` should be able to influence the result
|
||||
mkdir a
|
||||
expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
|
||||
\s*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
|
||||
\s*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
|
||||
[[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
|
||||
[[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
|
||||
rm -rf -- *
|
||||
|
||||
# non-regular and non-symlink files cannot be added to the Nix store
|
||||
mkfifo a
|
||||
expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` contains a file that cannot be added to the store: '"$work"'/a
|
||||
\s*This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
|
||||
\s*Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.'
|
||||
[[:blank:]]*This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
|
||||
[[:blank:]]*Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.'
|
||||
rm -rf -- *
|
||||
|
||||
# Path coercion only works for paths
|
||||
expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
|
||||
expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
|
||||
\s*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
|
||||
[[:blank:]]*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
|
||||
expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
|
||||
\s*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
|
||||
\s*Note that this only works for sources created from paths.'
|
||||
[[:blank:]]*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
|
||||
[[:blank:]]*Note that this only works for sources created from paths.'
|
||||
|
||||
# Path coercion errors for non-existent paths
|
||||
expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.
|
||||
\s*To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.'
|
||||
[[:blank:]]*To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.'
|
||||
|
||||
# File sets cannot be evaluated directly
|
||||
expectFailure 'union ./. ./.' 'lib.fileset: Directly evaluating a file set is not supported.
|
||||
\s*To turn it into a usable source, use `lib.fileset.toSource`.
|
||||
\s*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
|
||||
[[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
|
||||
[[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
|
||||
expectFailure '_emptyWithoutBase' 'lib.fileset: Directly evaluating a file set is not supported.
|
||||
\s*To turn it into a usable source, use `lib.fileset.toSource`.
|
||||
\s*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
|
||||
[[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
|
||||
[[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
|
||||
|
||||
# Past versions of the internal representation are supported
|
||||
expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 0; _internalBase = ./.; }' \
|
||||
|
@ -416,9 +427,9 @@ expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 2;
|
|||
|
||||
# Future versions of the internal representation are unsupported
|
||||
expectFailure '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 4; }' '<tests>: value is a file set created from a future version of the file set library with a different internal representation:
|
||||
\s*- Internal version of the file set: 4
|
||||
\s*- Internal version of the library: 3
|
||||
\s*Make sure to update your Nixpkgs to have a newer version of `lib.fileset`.'
|
||||
[[:blank:]]*- Internal version of the file set: 4
|
||||
[[:blank:]]*- Internal version of the library: 3
|
||||
[[:blank:]]*Make sure to update your Nixpkgs to have a newer version of `lib.fileset`.'
|
||||
|
||||
# _create followed by _coerce should give the inputs back without any validation
|
||||
expectEqual '{
|
||||
|
@ -511,6 +522,19 @@ expectEqual '_toSourceFilter (_create /. { foo = "regular"; }) "/foo" ""' 'true'
|
|||
expectEqual '_toSourceFilter (_create /. { foo = null; }) "/foo" ""' 'false'
|
||||
|
||||
|
||||
## lib.fileset.toList
|
||||
# This function is mainly tested in checkFileset
|
||||
|
||||
# The error context for an invalid argument must be correct
|
||||
expectFailure 'toList null' 'lib.fileset.toList: Argument is of type null, but it should be a file set or a path instead.'
|
||||
|
||||
# Works for the empty fileset
|
||||
expectEqual 'toList _emptyWithoutBase' '[ ]'
|
||||
|
||||
# Works on empty paths
|
||||
expectEqual 'toList ./.' '[ ]'
|
||||
|
||||
|
||||
## lib.fileset.union, lib.fileset.unions
|
||||
|
||||
|
||||
|
@ -519,16 +543,16 @@ mkdir -p {foo,bar}/mock-root
|
|||
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
|
||||
toSource { root = ./.; fileset = union ./foo/mock-root ./bar/mock-root; }
|
||||
' 'lib.fileset.union: Filesystem roots are not the same:
|
||||
\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
\s*Different filesystem roots are not supported.'
|
||||
[[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
[[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
[[:blank:]]*Different filesystem roots are not supported.'
|
||||
|
||||
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
|
||||
toSource { root = ./.; fileset = unions [ ./foo/mock-root ./bar/mock-root ]; }
|
||||
' 'lib.fileset.unions: Filesystem roots are not the same:
|
||||
\s*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
\s*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
\s*Different filesystem roots are not supported.'
|
||||
[[:blank:]]*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
[[:blank:]]*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
[[:blank:]]*Different filesystem roots are not supported.'
|
||||
rm -rf -- *
|
||||
|
||||
# Coercion errors show the correct context
|
||||
|
@ -632,9 +656,9 @@ mkdir -p {foo,bar}/mock-root
|
|||
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
|
||||
toSource { root = ./.; fileset = intersection ./foo/mock-root ./bar/mock-root; }
|
||||
' 'lib.fileset.intersection: Filesystem roots are not the same:
|
||||
\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
\s*Different filesystem roots are not supported.'
|
||||
[[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
|
||||
[[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
|
||||
[[:blank:]]*Different filesystem roots are not supported.'
|
||||
rm -rf -- *
|
||||
|
||||
# Coercion errors show the correct context
|
||||
|
@ -741,8 +765,8 @@ rm -rf -- *
|
|||
# Also not the other way around
|
||||
mkdir a
|
||||
expectFailure 'toSource { root = ./a; fileset = difference ./. ./a; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
|
||||
\s*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
|
||||
\s*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
|
||||
[[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
|
||||
[[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
|
||||
rm -rf -- *
|
||||
|
||||
# Difference actually works
|
||||
|
@ -819,7 +843,7 @@ expectFailure 'fileFilter null (abort "this is not needed")' 'lib.fileset.fileFi
|
|||
|
||||
# The second argument needs to be an existing path
|
||||
expectFailure 'fileFilter (file: abort "this is not needed") _emptyWithoutBase' 'lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
|
||||
\s*If you need to filter files in a file set, use `intersection fileset \(fileFilter pred \./\.\)` instead.'
|
||||
[[:blank:]]*If you need to filter files in a file set, use `intersection fileset \(fileFilter pred \./\.\)` instead.'
|
||||
expectFailure 'fileFilter (file: abort "this is not needed") null' 'lib.fileset.fileFilter: Second argument is of type null, but it should be a path instead.'
|
||||
expectFailure 'fileFilter (file: abort "this is not needed") ./a' 'lib.fileset.fileFilter: Second argument \('"$work"'/a\) is a path that does not exist.'
|
||||
|
||||
|
@ -1083,7 +1107,7 @@ rm -rf -- *
|
|||
|
||||
# String-like values are not supported
|
||||
expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
|
||||
\s*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
|
||||
[[:blank:]]*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
|
||||
|
||||
# Wrong type
|
||||
expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
|
||||
|
@ -1400,10 +1424,10 @@ expectEqual '(import '"$storePath"' { fs = lib.fileset; }).outPath' \""$storePat
|
|||
|
||||
## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
|
||||
expectFailure 'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*\) is a store path within a working tree of a Git repository.
|
||||
\s*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
|
||||
\s*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
|
||||
\s*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
|
||||
\s*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
|
||||
[[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
|
||||
[[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
|
||||
[[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
|
||||
[[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
|
||||
|
||||
## Even with submodules
|
||||
if [[ -n "$fetchGitSupportsSubmodules" ]]; then
|
||||
|
@ -1427,15 +1451,15 @@ if [[ -n "$fetchGitSupportsSubmodules" ]]; then
|
|||
|
||||
## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
|
||||
expectFailure 'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTrackedWith: The second argument \(.*\) is a store path within a working tree of a Git repository.
|
||||
\s*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
|
||||
\s*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
|
||||
\s*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
|
||||
\s*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
|
||||
[[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
|
||||
[[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
|
||||
[[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
|
||||
[[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
|
||||
expectFailure 'import "${./.}/sub" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*/sub\) is a store path within a working tree of a Git repository.
|
||||
\s*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
|
||||
\s*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
|
||||
\s*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
|
||||
\s*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
|
||||
[[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
|
||||
[[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
|
||||
[[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
|
||||
[[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
|
||||
fi
|
||||
rm -rf -- *
|
||||
|
||||
|
|
|
@ -145,6 +145,12 @@ rec {
|
|||
in fix g
|
||||
```
|
||||
|
||||
:::{.note}
|
||||
The argument to the given fixed-point function after applying an overlay will *not* refer to its own return value, but rather to the value after evaluating the overlay function.
|
||||
|
||||
The given fixed-point function is called with a separate argument than if it was evaluated with `lib.fix`.
|
||||
:::
|
||||
|
||||
:::{.example}
|
||||
|
||||
# Extend a fixed-point function with an overlay
|
||||
|
@ -230,13 +236,6 @@ rec {
|
|||
|
||||
fix (extends (final: prev: { c = final.a + final.b; }) f)
|
||||
=> { a = 1; b = 3; c = 4; }
|
||||
|
||||
:::{.note}
|
||||
The argument to the given fixed-point function after applying an overlay will *not* refer to its own return value, but rather to the value after evaluating the overlay function.
|
||||
|
||||
The given fixed-point function is called with a separate argument than if it was evaluated with `lib.fix`.
|
||||
The new argument
|
||||
:::
|
||||
*/
|
||||
extends =
|
||||
# The overlay to apply to the fixed-point function
|
||||
|
|
|
@ -14,15 +14,58 @@
|
|||
* Documentation in the manual, #sec-generators
|
||||
*/
|
||||
{ lib }:
|
||||
with (lib).trivial;
|
||||
|
||||
let
|
||||
libStr = lib.strings;
|
||||
libAttr = lib.attrsets;
|
||||
inherit (lib)
|
||||
addErrorContext
|
||||
assertMsg
|
||||
attrNames
|
||||
concatLists
|
||||
concatMapStringsSep
|
||||
concatStrings
|
||||
concatStringsSep
|
||||
const
|
||||
elem
|
||||
escape
|
||||
filter
|
||||
flatten
|
||||
foldl
|
||||
functionArgs # Note: not the builtin; considers `__functor` in attrsets.
|
||||
gvariant
|
||||
hasInfix
|
||||
head
|
||||
id
|
||||
init
|
||||
isAttrs
|
||||
isBool
|
||||
isDerivation
|
||||
isFloat
|
||||
isFunction # Note: not the builtin; considers `__functor` in attrsets.
|
||||
isInt
|
||||
isList
|
||||
isPath
|
||||
isString
|
||||
last
|
||||
length
|
||||
mapAttrs
|
||||
mapAttrsToList
|
||||
optionals
|
||||
recursiveUpdate
|
||||
replaceStrings
|
||||
reverseList
|
||||
splitString
|
||||
tail
|
||||
toList
|
||||
;
|
||||
|
||||
inherit (lib) isFunction;
|
||||
in
|
||||
|
||||
rec {
|
||||
inherit (lib.strings)
|
||||
escapeNixIdentifier
|
||||
floatToString
|
||||
match
|
||||
split
|
||||
toJSON
|
||||
typeOf
|
||||
;
|
||||
|
||||
## -- HELPER FUNCTIONS & DEFAULTS --
|
||||
|
||||
|
@ -30,13 +73,13 @@ rec {
|
|||
* The builtin `toString` function has some strange defaults,
|
||||
* suitable for bash scripts but not much else.
|
||||
*/
|
||||
mkValueStringDefault = {}: v: with builtins;
|
||||
mkValueStringDefault = {}: v:
|
||||
let err = t: v: abort
|
||||
("generators.mkValueStringDefault: " +
|
||||
"${t} not supported: ${toPretty {} v}");
|
||||
in if isInt v then toString v
|
||||
# convert derivations to store paths
|
||||
else if lib.isDerivation v then toString v
|
||||
else if isDerivation v then toString v
|
||||
# we default to not quoting strings
|
||||
else if isString v then v
|
||||
# isString returns "1", which is not a good default
|
||||
|
@ -53,7 +96,7 @@ rec {
|
|||
# Floats currently can't be converted to precise strings,
|
||||
# condition warning on nix version once this isn't a problem anymore
|
||||
# See https://github.com/NixOS/nix/pull/3480
|
||||
else if isFloat v then libStr.floatToString v
|
||||
else if isFloat v then floatToString v
|
||||
else err "this value is" (toString v);
|
||||
|
||||
|
||||
|
@ -69,7 +112,7 @@ rec {
|
|||
mkKeyValueDefault = {
|
||||
mkValueString ? mkValueStringDefault {}
|
||||
}: sep: k: v:
|
||||
"${libStr.escape [sep] k}${sep}${mkValueString v}";
|
||||
"${escape [sep] k}${sep}${mkValueString v}";
|
||||
|
||||
|
||||
## -- FILE FORMAT GENERATORS --
|
||||
|
@ -86,9 +129,9 @@ rec {
|
|||
}:
|
||||
let mkLine = k: v: indent + mkKeyValue k v + "\n";
|
||||
mkLines = if listsAsDuplicateKeys
|
||||
then k: v: map (mkLine k) (if lib.isList v then v else [v])
|
||||
then k: v: map (mkLine k) (if isList v then v else [v])
|
||||
else k: v: [ (mkLine k v) ];
|
||||
in attrs: libStr.concatStrings (lib.concatLists (libAttr.mapAttrsToList mkLines attrs));
|
||||
in attrs: concatStrings (concatLists (mapAttrsToList mkLines attrs));
|
||||
|
||||
|
||||
/* Generate an INI-style config file from an
|
||||
|
@ -113,7 +156,7 @@ rec {
|
|||
*/
|
||||
toINI = {
|
||||
# apply transformations (e.g. escapes) to section names
|
||||
mkSectionName ? (name: libStr.escape [ "[" "]" ] name),
|
||||
mkSectionName ? (name: escape [ "[" "]" ] name),
|
||||
# format a setting line from key and value
|
||||
mkKeyValue ? mkKeyValueDefault {} "=",
|
||||
# allow lists as values for duplicate keys
|
||||
|
@ -122,8 +165,8 @@ rec {
|
|||
let
|
||||
# map function to string for each key val
|
||||
mapAttrsToStringsSep = sep: mapFn: attrs:
|
||||
libStr.concatStringsSep sep
|
||||
(libAttr.mapAttrsToList mapFn attrs);
|
||||
concatStringsSep sep
|
||||
(mapAttrsToList mapFn attrs);
|
||||
mkSection = sectName: sectValues: ''
|
||||
[${mkSectionName sectName}]
|
||||
'' + toKeyValue { inherit mkKeyValue listsAsDuplicateKeys; } sectValues;
|
||||
|
@ -164,7 +207,7 @@ rec {
|
|||
*/
|
||||
toINIWithGlobalSection = {
|
||||
# apply transformations (e.g. escapes) to section names
|
||||
mkSectionName ? (name: libStr.escape [ "[" "]" ] name),
|
||||
mkSectionName ? (name: escape [ "[" "]" ] name),
|
||||
# format a setting line from key and value
|
||||
mkKeyValue ? mkKeyValueDefault {} "=",
|
||||
# allow lists as values for duplicate keys
|
||||
|
@ -195,12 +238,11 @@ rec {
|
|||
*> name = "edolstra"
|
||||
*/
|
||||
toGitINI = attrs:
|
||||
with builtins;
|
||||
let
|
||||
mkSectionName = name:
|
||||
let
|
||||
containsQuote = libStr.hasInfix ''"'' name;
|
||||
sections = libStr.splitString "." name;
|
||||
containsQuote = hasInfix ''"'' name;
|
||||
sections = splitString "." name;
|
||||
section = head sections;
|
||||
subsections = tail sections;
|
||||
subsection = concatStringsSep "." subsections;
|
||||
|
@ -220,19 +262,19 @@ rec {
|
|||
# generation for multiple ini values
|
||||
mkKeyValue = k: v:
|
||||
let mkKeyValue = mkKeyValueDefault { inherit mkValueString; } " = " k;
|
||||
in concatStringsSep "\n" (map (kv: "\t" + mkKeyValue kv) (lib.toList v));
|
||||
in concatStringsSep "\n" (map (kv: "\t" + mkKeyValue kv) (toList v));
|
||||
|
||||
# converts { a.b.c = 5; } to { "a.b".c = 5; } for toINI
|
||||
gitFlattenAttrs = let
|
||||
recurse = path: value:
|
||||
if isAttrs value && !lib.isDerivation value then
|
||||
lib.mapAttrsToList (name: value: recurse ([ name ] ++ path) value) value
|
||||
if isAttrs value && !isDerivation value then
|
||||
mapAttrsToList (name: value: recurse ([ name ] ++ path) value) value
|
||||
else if length path > 1 then {
|
||||
${concatStringsSep "." (lib.reverseList (tail path))}.${head path} = value;
|
||||
${concatStringsSep "." (reverseList (tail path))}.${head path} = value;
|
||||
} else {
|
||||
${head path} = value;
|
||||
};
|
||||
in attrs: lib.foldl lib.recursiveUpdate { } (lib.flatten (recurse [ ] attrs));
|
||||
in attrs: foldl recursiveUpdate { } (flatten (recurse [ ] attrs));
|
||||
|
||||
toINI_ = toINI { inherit mkKeyValue mkSectionName; };
|
||||
in
|
||||
|
@ -240,25 +282,12 @@ rec {
|
|||
|
||||
# mkKeyValueDefault wrapper that handles dconf INI quirks.
|
||||
# The main differences of the format is that it requires strings to be quoted.
|
||||
mkDconfKeyValue = mkKeyValueDefault { mkValueString = v: toString (lib.gvariant.mkValue v); } "=";
|
||||
mkDconfKeyValue = mkKeyValueDefault { mkValueString = v: toString (gvariant.mkValue v); } "=";
|
||||
|
||||
# Generates INI in dconf keyfile style. See https://help.gnome.org/admin/system-admin-guide/stable/dconf-keyfiles.html.en
|
||||
# for details.
|
||||
toDconfINI = toINI { mkKeyValue = mkDconfKeyValue; };
|
||||
|
||||
/* Generates JSON from an arbitrary (non-function) value.
|
||||
* For more information see the documentation of the builtin.
|
||||
*/
|
||||
toJSON = {}: builtins.toJSON;
|
||||
|
||||
|
||||
/* YAML has been a strict superset of JSON since 1.2, so we
|
||||
* use toJSON. Before it only had a few differences referring
|
||||
* to implicit typing rules, so it should work with older
|
||||
* parsers as well.
|
||||
*/
|
||||
toYAML = toJSON;
|
||||
|
||||
withRecursion =
|
||||
{
|
||||
/* If this option is not null, the given value will stop evaluating at a certain depth */
|
||||
|
@ -266,7 +295,7 @@ rec {
|
|||
/* If this option is true, an error will be thrown, if a certain given depth is exceeded */
|
||||
, throwOnDepthLimit ? true
|
||||
}:
|
||||
assert builtins.isInt depthLimit;
|
||||
assert isInt depthLimit;
|
||||
let
|
||||
specialAttrs = [
|
||||
"__functor"
|
||||
|
@ -275,7 +304,7 @@ rec {
|
|||
"__pretty"
|
||||
];
|
||||
stepIntoAttr = evalNext: name:
|
||||
if builtins.elem name specialAttrs
|
||||
if elem name specialAttrs
|
||||
then id
|
||||
else evalNext;
|
||||
transform = depth:
|
||||
|
@ -284,7 +313,7 @@ rec {
|
|||
then throw "Exceeded maximum eval-depth limit of ${toString depthLimit} while trying to evaluate with `generators.withRecursion'!"
|
||||
else const "<unevaluated>"
|
||||
else id;
|
||||
mapAny = with builtins; depth: v:
|
||||
mapAny = depth: v:
|
||||
let
|
||||
evalNext = x: mapAny (depth + 1) (transform (depth + 1) x);
|
||||
in
|
||||
|
@ -311,9 +340,8 @@ rec {
|
|||
indent ? ""
|
||||
}:
|
||||
let
|
||||
go = indent: v: with builtins;
|
||||
let isPath = v: typeOf v == "path";
|
||||
introSpace = if multiline then "\n${indent} " else " ";
|
||||
go = indent: v:
|
||||
let introSpace = if multiline then "\n${indent} " else " ";
|
||||
outroSpace = if multiline then "\n${indent}" else " ";
|
||||
in if isInt v then toString v
|
||||
# toString loses precision on floats, so we use toJSON instead. This isn't perfect
|
||||
|
@ -322,16 +350,16 @@ rec {
|
|||
else if isFloat v then builtins.toJSON v
|
||||
else if isString v then
|
||||
let
|
||||
lines = filter (v: ! isList v) (builtins.split "\n" v);
|
||||
escapeSingleline = libStr.escape [ "\\" "\"" "\${" ];
|
||||
escapeMultiline = libStr.replaceStrings [ "\${" "''" ] [ "''\${" "'''" ];
|
||||
lines = filter (v: ! isList v) (split "\n" v);
|
||||
escapeSingleline = escape [ "\\" "\"" "\${" ];
|
||||
escapeMultiline = replaceStrings [ "\${" "''" ] [ "''\${" "'''" ];
|
||||
singlelineResult = "\"" + concatStringsSep "\\n" (map escapeSingleline lines) + "\"";
|
||||
multilineResult = let
|
||||
escapedLines = map escapeMultiline lines;
|
||||
# The last line gets a special treatment: if it's empty, '' is on its own line at the "outer"
|
||||
# indentation level. Otherwise, '' is appended to the last line.
|
||||
lastLine = lib.last escapedLines;
|
||||
in "''" + introSpace + concatStringsSep introSpace (lib.init escapedLines)
|
||||
lastLine = last escapedLines;
|
||||
in "''" + introSpace + concatStringsSep introSpace (init escapedLines)
|
||||
+ (if lastLine == "" then outroSpace else introSpace + lastLine) + "''";
|
||||
in
|
||||
if multiline && length lines > 1 then multilineResult else singlelineResult
|
||||
|
@ -342,11 +370,11 @@ rec {
|
|||
else if isList v then
|
||||
if v == [] then "[ ]"
|
||||
else "[" + introSpace
|
||||
+ libStr.concatMapStringsSep introSpace (go (indent + " ")) v
|
||||
+ concatMapStringsSep introSpace (go (indent + " ")) v
|
||||
+ outroSpace + "]"
|
||||
else if isFunction v then
|
||||
let fna = lib.functionArgs v;
|
||||
showFnas = concatStringsSep ", " (libAttr.mapAttrsToList
|
||||
let fna = functionArgs v;
|
||||
showFnas = concatStringsSep ", " (mapAttrsToList
|
||||
(name: hasDefVal: if hasDefVal then name + "?" else name)
|
||||
fna);
|
||||
in if fna == {} then "<function>"
|
||||
|
@ -359,10 +387,10 @@ rec {
|
|||
else if v ? type && v.type == "derivation" then
|
||||
"<derivation ${v.name or "???"}>"
|
||||
else "{" + introSpace
|
||||
+ libStr.concatStringsSep introSpace (libAttr.mapAttrsToList
|
||||
+ concatStringsSep introSpace (mapAttrsToList
|
||||
(name: value:
|
||||
"${libStr.escapeNixIdentifier name} = ${
|
||||
builtins.addErrorContext "while evaluating an attribute `${name}`"
|
||||
"${escapeNixIdentifier name} = ${
|
||||
addErrorContext "while evaluating an attribute `${name}`"
|
||||
(go (indent + " ") value)
|
||||
};") v)
|
||||
+ outroSpace + "}"
|
||||
|
@ -371,9 +399,7 @@ rec {
|
|||
|
||||
# PLIST handling
|
||||
toPlist = {}: v: let
|
||||
isFloat = builtins.isFloat or (x: false);
|
||||
isPath = x: builtins.typeOf x == "path";
|
||||
expr = ind: x: with builtins;
|
||||
expr = ind: x:
|
||||
if x == null then "" else
|
||||
if isBool x then bool ind x else
|
||||
if isInt x then int ind x else
|
||||
|
@ -394,23 +420,23 @@ rec {
|
|||
|
||||
indent = ind: expr "\t${ind}";
|
||||
|
||||
item = ind: libStr.concatMapStringsSep "\n" (indent ind);
|
||||
item = ind: concatMapStringsSep "\n" (indent ind);
|
||||
|
||||
list = ind: x: libStr.concatStringsSep "\n" [
|
||||
list = ind: x: concatStringsSep "\n" [
|
||||
(literal ind "<array>")
|
||||
(item ind x)
|
||||
(literal ind "</array>")
|
||||
];
|
||||
|
||||
attrs = ind: x: libStr.concatStringsSep "\n" [
|
||||
attrs = ind: x: concatStringsSep "\n" [
|
||||
(literal ind "<dict>")
|
||||
(attr ind x)
|
||||
(literal ind "</dict>")
|
||||
];
|
||||
|
||||
attr = let attrFilter = name: value: name != "_module" && value != null;
|
||||
in ind: x: libStr.concatStringsSep "\n" (lib.flatten (lib.mapAttrsToList
|
||||
(name: value: lib.optionals (attrFilter name value) [
|
||||
in ind: x: concatStringsSep "\n" (flatten (mapAttrsToList
|
||||
(name: value: optionals (attrFilter name value) [
|
||||
(key "\t${ind}" name)
|
||||
(expr "\t${ind}" value)
|
||||
]) x));
|
||||
|
@ -426,11 +452,10 @@ ${expr "" v}
|
|||
* the Natural type.
|
||||
*/
|
||||
toDhall = { }@args: v:
|
||||
with builtins;
|
||||
let concatItems = lib.strings.concatStringsSep ", ";
|
||||
let concatItems = concatStringsSep ", ";
|
||||
in if isAttrs v then
|
||||
"{ ${
|
||||
concatItems (lib.attrsets.mapAttrsToList
|
||||
concatItems (mapAttrsToList
|
||||
(key: value: "${key} = ${toDhall args value}") v)
|
||||
} }"
|
||||
else if isList v then
|
||||
|
@ -444,7 +469,7 @@ ${expr "" v}
|
|||
else if v == null then
|
||||
abort "generators.toDhall: cannot convert a null to Dhall"
|
||||
else
|
||||
builtins.toJSON v;
|
||||
toJSON v;
|
||||
|
||||
/*
|
||||
Translate a simple Nix expression to Lua representation with occasional
|
||||
|
@ -488,7 +513,6 @@ ${expr "" v}
|
|||
/* Interpret as variable bindings */
|
||||
asBindings ? false,
|
||||
}@args: v:
|
||||
with builtins;
|
||||
let
|
||||
innerIndent = "${indent} ";
|
||||
introSpace = if multiline then "\n${innerIndent}" else " ";
|
||||
|
@ -501,9 +525,9 @@ ${expr "" v}
|
|||
isLuaInline = { _type ? null, ... }: _type == "lua-inline";
|
||||
|
||||
generatedBindings =
|
||||
assert lib.assertMsg (badVarNames == []) "Bad Lua var names: ${toPretty {} badVarNames}";
|
||||
libStr.concatStrings (
|
||||
lib.attrsets.mapAttrsToList (key: value: "${indent}${key} = ${toLua innerArgs value}\n") v
|
||||
assert assertMsg (badVarNames == []) "Bad Lua var names: ${toPretty {} badVarNames}";
|
||||
concatStrings (
|
||||
mapAttrsToList (key: value: "${indent}${key} = ${toLua innerArgs value}\n") v
|
||||
);
|
||||
|
||||
# https://en.wikibooks.org/wiki/Lua_Programming/variable#Variable_names
|
||||
|
@ -515,7 +539,7 @@ ${expr "" v}
|
|||
else if v == null then
|
||||
"nil"
|
||||
else if isInt v || isFloat v || isString v || isBool v then
|
||||
builtins.toJSON v
|
||||
toJSON v
|
||||
else if isList v then
|
||||
(if v == [ ] then "{}" else
|
||||
"{${introSpace}${concatItems (map (value: "${toLua innerArgs value}") v)}${outroSpace}}")
|
||||
|
@ -525,11 +549,11 @@ ${expr "" v}
|
|||
"(${v.expr})"
|
||||
else if v == { } then
|
||||
"{}"
|
||||
else if libAttr.isDerivation v then
|
||||
else if isDerivation v then
|
||||
''"${toString v}"''
|
||||
else
|
||||
"{${introSpace}${concatItems (
|
||||
lib.attrsets.mapAttrsToList (key: value: "[${builtins.toJSON key}] = ${toLua innerArgs value}") v
|
||||
mapAttrsToList (key: value: "[${toJSON key}] = ${toLua innerArgs value}") v
|
||||
)}${outroSpace}}"
|
||||
)
|
||||
else
|
||||
|
@ -542,4 +566,37 @@ ${expr "" v}
|
|||
mkLuaInline :: String -> AttrSet
|
||||
*/
|
||||
mkLuaInline = expr: { _type = "lua-inline"; inherit expr; };
|
||||
|
||||
in
|
||||
|
||||
# Everything in this attrset is the public interface of the file.
|
||||
{
|
||||
inherit
|
||||
mkDconfKeyValue
|
||||
mkKeyValueDefault
|
||||
mkLuaInline
|
||||
mkValueStringDefault
|
||||
toDconfINI
|
||||
toDhall
|
||||
toGitINI
|
||||
toINI
|
||||
toINIWithGlobalSection
|
||||
toKeyValue
|
||||
toLua
|
||||
toPlist
|
||||
toPretty
|
||||
withRecursion
|
||||
;
|
||||
|
||||
/* Generates JSON from an arbitrary (non-function) value.
|
||||
* For more information see the documentation of the builtin.
|
||||
*/
|
||||
toJSON = {}: toJSON;
|
||||
|
||||
/* YAML has been a strict superset of JSON since 1.2, so we
|
||||
* use toJSON. Before it only had a few differences referring
|
||||
* to implicit typing rules, so it should work with older
|
||||
* parsers as well.
|
||||
*/
|
||||
toYAML = {}: toJSON;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
{ lib }:
|
||||
|
||||
with lib;
|
||||
let
|
||||
inherit (lib) mkIf versionAtLeast versionOlder;
|
||||
in
|
||||
{
|
||||
|
||||
|
||||
|
|
|
@ -412,6 +412,11 @@ in mkLicense lset) ({
|
|||
fullName = "Detection Rule License 1.0";
|
||||
};
|
||||
|
||||
dtoa = {
|
||||
spdxId = "dtoa";
|
||||
fullName = "dtoa License";
|
||||
};
|
||||
|
||||
eapl = {
|
||||
fullName = "EPSON AVASYS PUBLIC LICENSE";
|
||||
url = "https://avasys.jp/hp/menu000000700/hpg000000603.htm";
|
||||
|
@ -594,6 +599,11 @@ in mkLicense lset) ({
|
|||
url = "https://fedoraproject.org/wiki/Licensing/GPL_Classpath_Exception";
|
||||
};
|
||||
|
||||
giftware = {
|
||||
spdxId = "Giftware";
|
||||
fullName = "Giftware License";
|
||||
};
|
||||
|
||||
hpnd = {
|
||||
spdxId = "HPND";
|
||||
fullName = "Historic Permission Notice and Disclaimer";
|
||||
|
@ -604,6 +614,11 @@ in mkLicense lset) ({
|
|||
spdxId = "HPND-sell-variant";
|
||||
};
|
||||
|
||||
hpndUc = {
|
||||
spdxId = "HPND-UC";
|
||||
fullName = "Historical Permission Notice and Disclaimer - University of California variant";
|
||||
};
|
||||
|
||||
# Intel's license, seems free
|
||||
iasl = {
|
||||
spdxId = "Intel-ACPI";
|
||||
|
@ -889,6 +904,11 @@ in mkLicense lset) ({
|
|||
url = "https://raw.githubusercontent.com/netdata/netdata/master/web/gui/v2/LICENSE.md";
|
||||
};
|
||||
|
||||
nistSoftware = {
|
||||
spdxId = "NIST-Software";
|
||||
fullName = "NIST Software License";
|
||||
};
|
||||
|
||||
nlpl = {
|
||||
spdxId = "NLPL";
|
||||
fullName = "No Limit Public License";
|
||||
|
@ -1066,6 +1086,11 @@ in mkLicense lset) ({
|
|||
url = "https://sources.debian.org/copyright/license/debianutils/4.9.1/";
|
||||
};
|
||||
|
||||
smlnj = {
|
||||
spdxId = "SMLNJ";
|
||||
fullName = "Standard ML of New Jersey License";
|
||||
};
|
||||
|
||||
sspl = {
|
||||
shortName = "SSPL";
|
||||
fullName = "Server Side Public License";
|
||||
|
@ -1215,6 +1240,11 @@ in mkLicense lset) ({
|
|||
url = "https://mcj.sourceforge.net/authors.html#xfig";
|
||||
};
|
||||
|
||||
xinetd = {
|
||||
spdxId = "xinetd";
|
||||
fullName = "xinetd License";
|
||||
};
|
||||
|
||||
zlib = {
|
||||
spdxId = "Zlib";
|
||||
fullName = "zlib License";
|
||||
|
@ -1229,13 +1259,13 @@ in mkLicense lset) ({
|
|||
spdxId = "ZPL-2.1";
|
||||
fullName = "Zope Public License 2.1";
|
||||
};
|
||||
|
||||
xskat = {
|
||||
spdxId = "XSkat";
|
||||
fullName = "XSkat License";
|
||||
};
|
||||
} // {
|
||||
# TODO: remove legacy aliases
|
||||
agpl3 = {
|
||||
spdxId = "AGPL-3.0";
|
||||
fullName = "GNU Affero General Public License v3.0";
|
||||
deprecated = true;
|
||||
};
|
||||
gpl2 = {
|
||||
spdxId = "GPL-2.0";
|
||||
fullName = "GNU General Public License v2.0";
|
||||
|
|
1737
lib/lists.nix
1737
lib/lists.nix
File diff suppressed because it is too large
Load Diff
|
@ -87,6 +87,10 @@ rec {
|
|||
|
||||
We can inject these into a pattern for the whole of a structured platform,
|
||||
and then match that.
|
||||
|
||||
Example:
|
||||
lib.meta.platformMatch { system = "aarch64-darwin"; } "aarch64-darwin"
|
||||
=> true
|
||||
*/
|
||||
platformMatch = platform: elem: (
|
||||
# Check with simple string comparison if elem was a string.
|
||||
|
@ -112,6 +116,10 @@ rec {
|
|||
platform, or `meta.platforms` is not present.
|
||||
|
||||
2. None of `meta.badPlatforms` pattern matches the given platform.
|
||||
|
||||
Example:
|
||||
lib.meta.availableOn { system = "aarch64-darwin"; } pkg.zsh
|
||||
=> true
|
||||
*/
|
||||
availableOn = platform: pkg:
|
||||
((!pkg?meta.platforms) || any (platformMatch platform) pkg.meta.platforms) &&
|
||||
|
|
|
@ -81,9 +81,9 @@ let
|
|||
, # `class`:
|
||||
# A nominal type for modules. When set and non-null, this adds a check to
|
||||
# make sure that only compatible modules are imported.
|
||||
# This would be remove in the future, Prefer _module.args option instead.
|
||||
class ? null
|
||||
, args ? {}
|
||||
, # This would be remove in the future, Prefer _module.args option instead.
|
||||
args ? {}
|
||||
, # This would be remove in the future, Prefer _module.check option instead.
|
||||
check ? true
|
||||
}:
|
||||
|
@ -1256,7 +1256,78 @@ let
|
|||
(opt.highestPrio or defaultOverridePriority)
|
||||
(f opt.value);
|
||||
|
||||
doRename = { from, to, visible, warn, use, withPriority ? true, condition ? true }:
|
||||
/*
|
||||
Return a module that help declares an option that has been renamed.
|
||||
When a value is defined for the old option, it is forwarded to the `to` option.
|
||||
*/
|
||||
doRename = {
|
||||
# List of strings representing the attribute path of the old option.
|
||||
from,
|
||||
# List of strings representing the attribute path of the new option.
|
||||
to,
|
||||
# Boolean, whether the old option is to be included in documentation.
|
||||
visible,
|
||||
# Whether to warn when a value is defined for the old option.
|
||||
# NOTE: This requires the NixOS assertions module to be imported, so
|
||||
# - this generally does not work in submodules
|
||||
# - this may or may not work outside NixOS
|
||||
warn,
|
||||
# A function that is applied to the option value, to form the value
|
||||
# of the old `from` option.
|
||||
#
|
||||
# For example, the identity function can be passed, to return the option value unchanged.
|
||||
# ```nix
|
||||
# use = x: x;
|
||||
# ```
|
||||
#
|
||||
# To add a warning, you can pass the partially applied `warn` function.
|
||||
# ```nix
|
||||
# use = lib.warn "Obsolete option `${opt.old}' is used. Use `${opt.to}' instead.";
|
||||
# ```
|
||||
use,
|
||||
# Legacy option, enabled by default: whether to preserve the priority of definitions in `old`.
|
||||
withPriority ? true,
|
||||
# A boolean that defines the `mkIf` condition for `to`.
|
||||
# If the condition evaluates to `true`, and the `to` path points into an
|
||||
# `attrsOf (submodule ...)`, then `doRename` would cause an empty module to
|
||||
# be created, even if the `from` option is undefined.
|
||||
# By setting this to an expression that may return `false`, you can inhibit
|
||||
# this undesired behavior.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# ```nix
|
||||
# { config, lib, ... }:
|
||||
# let
|
||||
# inherit (lib) mkOption mkEnableOption types doRename;
|
||||
# in
|
||||
# {
|
||||
# options = {
|
||||
#
|
||||
# # Old service
|
||||
# services.foo.enable = mkEnableOption "foo";
|
||||
#
|
||||
# # New multi-instance service
|
||||
# services.foos = mkOption {
|
||||
# type = types.attrsOf (types.submodule …);
|
||||
# };
|
||||
# };
|
||||
# imports = [
|
||||
# (doRename {
|
||||
# from = [ "services" "foo" "bar" ];
|
||||
# to = [ "services" "foos" "" "bar" ];
|
||||
# visible = true;
|
||||
# warn = false;
|
||||
# use = x: x;
|
||||
# withPriority = true;
|
||||
# # Only define services.foos."" if needed. (It's not just about `bar`)
|
||||
# condition = config.services.foo.enable;
|
||||
# })
|
||||
# ];
|
||||
# }
|
||||
# ```
|
||||
condition ? true
|
||||
}:
|
||||
{ config, options, ... }:
|
||||
let
|
||||
fromOpt = getAttrFromPath from options;
|
||||
|
|
|
@ -95,8 +95,7 @@ rec {
|
|||
concatStringsSep "/" ["usr" "local" "bin"]
|
||||
=> "usr/local/bin"
|
||||
*/
|
||||
concatStringsSep = builtins.concatStringsSep or (separator: list:
|
||||
lib.foldl' (x: y: x + y) "" (intersperse separator list));
|
||||
concatStringsSep = builtins.concatStringsSep;
|
||||
|
||||
/* Maps a function over a list of strings and then concatenates the
|
||||
result with the specified separator interspersed between
|
||||
|
@ -1039,30 +1038,32 @@ rec {
|
|||
toInt "3.14"
|
||||
=> error: floating point JSON numbers are not supported
|
||||
*/
|
||||
toInt = str:
|
||||
toInt =
|
||||
let
|
||||
matchStripInput = match "[[:space:]]*(-?[[:digit:]]+)[[:space:]]*";
|
||||
matchLeadingZero = match "0[[:digit:]]+";
|
||||
in
|
||||
str:
|
||||
let
|
||||
# RegEx: Match any leading whitespace, possibly a '-', one or more digits,
|
||||
# and finally match any trailing whitespace.
|
||||
strippedInput = match "[[:space:]]*(-?[[:digit:]]+)[[:space:]]*" str;
|
||||
strippedInput = matchStripInput str;
|
||||
|
||||
# RegEx: Match a leading '0' then one or more digits.
|
||||
isLeadingZero = match "0[[:digit:]]+" (head strippedInput) == [];
|
||||
isLeadingZero = matchLeadingZero (head strippedInput) == [];
|
||||
|
||||
# Attempt to parse input
|
||||
parsedInput = fromJSON (head strippedInput);
|
||||
|
||||
generalError = "toInt: Could not convert ${escapeNixString str} to int.";
|
||||
|
||||
octalAmbigError = "toInt: Ambiguity in interpretation of ${escapeNixString str}"
|
||||
+ " between octal and zero padded integer.";
|
||||
|
||||
in
|
||||
# Error on presence of non digit characters.
|
||||
if strippedInput == null
|
||||
then throw generalError
|
||||
# Error on presence of leading zero/octal ambiguity.
|
||||
else if isLeadingZero
|
||||
then throw octalAmbigError
|
||||
then throw "toInt: Ambiguity in interpretation of ${escapeNixString str} between octal and zero padded integer."
|
||||
# Error if parse function fails.
|
||||
else if !isInt parsedInput
|
||||
then throw generalError
|
||||
|
@ -1090,15 +1091,20 @@ rec {
|
|||
toIntBase10 "3.14"
|
||||
=> error: floating point JSON numbers are not supported
|
||||
*/
|
||||
toIntBase10 = str:
|
||||
toIntBase10 =
|
||||
let
|
||||
matchStripInput = match "[[:space:]]*0*(-?[[:digit:]]+)[[:space:]]*";
|
||||
matchZero = match "0+";
|
||||
in
|
||||
str:
|
||||
let
|
||||
# RegEx: Match any leading whitespace, then match any zero padding,
|
||||
# capture possibly a '-' followed by one or more digits,
|
||||
# and finally match any trailing whitespace.
|
||||
strippedInput = match "[[:space:]]*0*(-?[[:digit:]]+)[[:space:]]*" str;
|
||||
strippedInput = matchStripInput str;
|
||||
|
||||
# RegEx: Match at least one '0'.
|
||||
isZero = match "0+" (head strippedInput) == [];
|
||||
isZero = matchZero (head strippedInput) == [];
|
||||
|
||||
# Attempt to parse input
|
||||
parsedInput = fromJSON (head strippedInput);
|
||||
|
|
|
@ -13,9 +13,96 @@ Alternatively, to run all `lib` tests:
|
|||
|
||||
[nixpkgs]$ nix-build lib/tests/release.nix
|
||||
*/
|
||||
with import ../default.nix;
|
||||
|
||||
let
|
||||
lib = import ../default.nix;
|
||||
|
||||
inherit (lib)
|
||||
allUnique
|
||||
and
|
||||
attrNames
|
||||
attrsets
|
||||
attrsToList
|
||||
bitAnd
|
||||
bitOr
|
||||
bitXor
|
||||
boolToString
|
||||
callPackagesWith
|
||||
callPackageWith
|
||||
cartesianProductOfSets
|
||||
cli
|
||||
composeExtensions
|
||||
composeManyExtensions
|
||||
concatLines
|
||||
concatMapAttrs
|
||||
concatMapStrings
|
||||
concatStrings
|
||||
concatStringsSep
|
||||
const
|
||||
escapeXML
|
||||
evalModules
|
||||
filter
|
||||
fix
|
||||
fold
|
||||
foldAttrs
|
||||
foldl
|
||||
foldl'
|
||||
foldlAttrs
|
||||
foldr
|
||||
functionArgs
|
||||
generators
|
||||
genList
|
||||
getExe
|
||||
getExe'
|
||||
groupBy
|
||||
groupBy'
|
||||
hasAttrByPath
|
||||
hasInfix
|
||||
id
|
||||
isStorePath
|
||||
lazyDerivation
|
||||
lists
|
||||
listToAttrs
|
||||
makeExtensible
|
||||
makeOverridable
|
||||
mapAttrs
|
||||
matchAttrs
|
||||
mergeAttrs
|
||||
meta
|
||||
mkOption
|
||||
mod
|
||||
nameValuePair
|
||||
optionalDrvAttr
|
||||
optionAttrSetToDocList
|
||||
overrideExisting
|
||||
packagesFromDirectoryRecursive
|
||||
pipe
|
||||
range
|
||||
recursiveUpdateUntil
|
||||
removePrefix
|
||||
replicate
|
||||
runTests
|
||||
setFunctionArgs
|
||||
showAttrPath
|
||||
sort
|
||||
sortOn
|
||||
stringLength
|
||||
strings
|
||||
stringToCharacters
|
||||
systems
|
||||
tail
|
||||
take
|
||||
testAllTrue
|
||||
toBaseDigits
|
||||
toHexString
|
||||
toInt
|
||||
toIntBase10
|
||||
toShellVars
|
||||
types
|
||||
updateManyAttrsByPath
|
||||
versions
|
||||
;
|
||||
|
||||
testingThrow = expr: {
|
||||
expr = (builtins.tryEval (builtins.seq expr "didn't throw"));
|
||||
expected = { success = false; value = false; };
|
||||
|
@ -55,6 +142,24 @@ runTests {
|
|||
expected = { a = false; b = false; c = true; };
|
||||
};
|
||||
|
||||
testCallPackageWithOverridePreservesArguments =
|
||||
let
|
||||
f = { a ? 0, b }: {};
|
||||
f' = callPackageWith { a = 1; b = 2; } f {};
|
||||
in {
|
||||
expr = functionArgs f'.override;
|
||||
expected = functionArgs f;
|
||||
};
|
||||
|
||||
testCallPackagesWithOverridePreservesArguments =
|
||||
let
|
||||
f = { a ? 0, b }: { nested = {}; };
|
||||
f' = callPackagesWith { a = 1; b = 2; } f {};
|
||||
in {
|
||||
expr = functionArgs f'.nested.override;
|
||||
expected = functionArgs f;
|
||||
};
|
||||
|
||||
# TRIVIAL
|
||||
|
||||
testId = {
|
||||
|
@ -1973,6 +2078,24 @@ runTests {
|
|||
}).drvPath;
|
||||
};
|
||||
|
||||
testLazyDerivationMultiOutputReturnsDerivationAttrs = let
|
||||
derivation = {
|
||||
type = "derivation";
|
||||
outputs = ["out" "dev"];
|
||||
dev = "test dev";
|
||||
out = "test out";
|
||||
outPath = "test outPath";
|
||||
outputName = "out";
|
||||
drvPath = "test drvPath";
|
||||
name = "test name";
|
||||
system = "test system";
|
||||
meta.position = "/hi:23";
|
||||
};
|
||||
in {
|
||||
expr = lazyDerivation { inherit derivation; outputs = ["out" "dev"]; passthru.meta.position = "/hi:23"; };
|
||||
expected = derivation;
|
||||
};
|
||||
|
||||
testTypeDescriptionInt = {
|
||||
expr = (with types; int).description;
|
||||
expected = "signed integer";
|
||||
|
|
|
@ -6,12 +6,19 @@
|
|||
|
||||
{ config, lib, ... }:
|
||||
|
||||
with lib;
|
||||
let
|
||||
inherit (lib)
|
||||
mkAliasOptionModule
|
||||
mkForce
|
||||
mkOption
|
||||
types
|
||||
;
|
||||
in
|
||||
|
||||
{
|
||||
options = {
|
||||
# A simple boolean option that can be enabled or disabled.
|
||||
enable = lib.mkOption {
|
||||
enable = mkOption {
|
||||
type = types.nullOr types.bool;
|
||||
default = null;
|
||||
example = true;
|
||||
|
@ -41,7 +48,7 @@ with lib;
|
|||
# should override the next import.
|
||||
( { config, lib, ... }:
|
||||
{
|
||||
enableAlias = lib.mkForce false;
|
||||
enableAlias = mkForce false;
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -6,12 +6,19 @@
|
|||
|
||||
{ config, lib, ... }:
|
||||
|
||||
with lib;
|
||||
let
|
||||
inherit (lib)
|
||||
mkAliasOptionModule
|
||||
mkDefault
|
||||
mkOption
|
||||
types
|
||||
;
|
||||
in
|
||||
|
||||
{
|
||||
options = {
|
||||
# A simple boolean option that can be enabled or disabled.
|
||||
enable = lib.mkOption {
|
||||
enable = mkOption {
|
||||
type = types.nullOr types.bool;
|
||||
default = null;
|
||||
example = true;
|
||||
|
@ -41,7 +48,7 @@ with lib;
|
|||
# should be able to be overridden by the next import.
|
||||
( { config, lib, ... }:
|
||||
{
|
||||
enableAlias = lib.mkDefault false;
|
||||
enableAlias = mkDefault false;
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -2,7 +2,14 @@
|
|||
, extendModules
|
||||
, ...
|
||||
}:
|
||||
with lib;
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
mkOption
|
||||
mkOverride
|
||||
types
|
||||
;
|
||||
in
|
||||
{
|
||||
imports = [
|
||||
|
||||
|
|
|
@ -9,60 +9,7 @@
|
|||
let
|
||||
lib = import ../.;
|
||||
testWithNix = nix:
|
||||
pkgs.runCommand "nixpkgs-lib-tests-nix-${nix.version}" {
|
||||
buildInputs = [
|
||||
(import ./check-eval.nix)
|
||||
(import ./maintainers.nix {
|
||||
inherit pkgs;
|
||||
lib = import ../.;
|
||||
})
|
||||
(import ./teams.nix {
|
||||
inherit pkgs;
|
||||
lib = import ../.;
|
||||
})
|
||||
(import ../path/tests {
|
||||
inherit pkgs;
|
||||
})
|
||||
];
|
||||
nativeBuildInputs = [
|
||||
nix
|
||||
pkgs.gitMinimal
|
||||
] ++ lib.optional pkgs.stdenv.isLinux pkgs.inotify-tools;
|
||||
strictDeps = true;
|
||||
} ''
|
||||
datadir="${nix}/share"
|
||||
export TEST_ROOT=$(pwd)/test-tmp
|
||||
export HOME=$(mktemp -d)
|
||||
export NIX_BUILD_HOOK=
|
||||
export NIX_CONF_DIR=$TEST_ROOT/etc
|
||||
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
|
||||
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
|
||||
export NIX_STATE_DIR=$TEST_ROOT/var/nix
|
||||
export NIX_STORE_DIR=$TEST_ROOT/store
|
||||
export PAGER=cat
|
||||
cacheDir=$TEST_ROOT/binary-cache
|
||||
|
||||
nix-store --init
|
||||
|
||||
cp -r ${../.} lib
|
||||
echo "Running lib/tests/modules.sh"
|
||||
bash lib/tests/modules.sh
|
||||
|
||||
echo "Running lib/tests/filesystem.sh"
|
||||
TEST_LIB=$PWD/lib bash lib/tests/filesystem.sh
|
||||
|
||||
echo "Running lib/tests/sources.sh"
|
||||
TEST_LIB=$PWD/lib bash lib/tests/sources.sh
|
||||
|
||||
echo "Running lib/fileset/tests.sh"
|
||||
TEST_LIB=$PWD/lib bash lib/fileset/tests.sh
|
||||
|
||||
echo "Running lib/tests/systems.nix"
|
||||
[[ $(nix-instantiate --eval --strict lib/tests/systems.nix | tee /dev/stderr) == '[ ]' ]];
|
||||
|
||||
mkdir $out
|
||||
echo success > $out/${nix.version}
|
||||
'';
|
||||
import ./test-with-nix.nix { inherit lib nix pkgs; };
|
||||
|
||||
in
|
||||
pkgs.symlinkJoin {
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
/**
|
||||
* Instantiate the library tests for a given Nix version.
|
||||
*
|
||||
* IMPORTANT:
|
||||
* This is used by the github.com/NixOS/nix CI.
|
||||
*
|
||||
* Try not to change the interface of this file, or if you need to, ping the
|
||||
* Nix maintainers for help. Thank you!
|
||||
*/
|
||||
{
|
||||
pkgs,
|
||||
lib,
|
||||
# Only ever use this nix; see comment at top
|
||||
nix,
|
||||
}:
|
||||
|
||||
pkgs.runCommand "nixpkgs-lib-tests-nix-${nix.version}" {
|
||||
buildInputs = [
|
||||
(import ./check-eval.nix)
|
||||
(import ./maintainers.nix {
|
||||
inherit pkgs;
|
||||
lib = import ../.;
|
||||
})
|
||||
(import ./teams.nix {
|
||||
inherit pkgs;
|
||||
lib = import ../.;
|
||||
})
|
||||
(import ../path/tests {
|
||||
inherit pkgs;
|
||||
})
|
||||
];
|
||||
nativeBuildInputs = [
|
||||
nix
|
||||
pkgs.gitMinimal
|
||||
] ++ lib.optional pkgs.stdenv.isLinux pkgs.inotify-tools;
|
||||
strictDeps = true;
|
||||
} ''
|
||||
datadir="${nix}/share"
|
||||
export TEST_ROOT=$(pwd)/test-tmp
|
||||
export HOME=$(mktemp -d)
|
||||
export NIX_BUILD_HOOK=
|
||||
export NIX_CONF_DIR=$TEST_ROOT/etc
|
||||
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
|
||||
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
|
||||
export NIX_STATE_DIR=$TEST_ROOT/var/nix
|
||||
export NIX_STORE_DIR=$TEST_ROOT/store
|
||||
export PAGER=cat
|
||||
cacheDir=$TEST_ROOT/binary-cache
|
||||
|
||||
nix-store --init
|
||||
|
||||
cp -r ${../.} lib
|
||||
echo "Running lib/tests/modules.sh"
|
||||
bash lib/tests/modules.sh
|
||||
|
||||
echo "Checking lib.version"
|
||||
nix-instantiate lib -A version --eval || {
|
||||
echo "lib.version does not evaluate when lib is isolated from the rest of the nixpkgs tree"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Running lib/tests/filesystem.sh"
|
||||
TEST_LIB=$PWD/lib bash lib/tests/filesystem.sh
|
||||
|
||||
echo "Running lib/tests/sources.sh"
|
||||
TEST_LIB=$PWD/lib bash lib/tests/sources.sh
|
||||
|
||||
echo "Running lib/fileset/tests.sh"
|
||||
TEST_LIB=$PWD/lib bash lib/fileset/tests.sh
|
||||
|
||||
echo "Running lib/tests/systems.nix"
|
||||
[[ $(nix-instantiate --eval --strict lib/tests/systems.nix | tee /dev/stderr) == '[ ]' ]];
|
||||
|
||||
mkdir $out
|
||||
echo success > $out/${nix.version}
|
||||
''
|
960
lib/trivial.nix
960
lib/trivial.nix
File diff suppressed because it is too large
Load Diff
|
@ -9,7 +9,7 @@ rec {
|
|||
splitVersion "1.2.3"
|
||||
=> ["1" "2" "3"]
|
||||
*/
|
||||
splitVersion = builtins.splitVersion or (lib.splitString ".");
|
||||
splitVersion = builtins.splitVersion;
|
||||
|
||||
/* Get the major version string from a string.
|
||||
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
/* Helper function to implement a fallback for the bit operators
|
||||
`bitAnd`, `bitOr` and `bitXor` on older nix version.
|
||||
See ./trivial.nix
|
||||
*/
|
||||
f: x: y:
|
||||
let
|
||||
# (intToBits 6) -> [ 0 1 1 ]
|
||||
intToBits = x:
|
||||
if x == 0 || x == -1 then
|
||||
[]
|
||||
else
|
||||
let
|
||||
headbit = if (x / 2) * 2 != x then 1 else 0; # x & 1
|
||||
tailbits = if x < 0 then ((x + 1) / 2) - 1 else x / 2; # x >> 1
|
||||
in
|
||||
[headbit] ++ (intToBits tailbits);
|
||||
|
||||
# (bitsToInt [ 0 1 1 ] 0) -> 6
|
||||
# (bitsToInt [ 0 1 0 ] 1) -> -6
|
||||
bitsToInt = l: signum:
|
||||
if l == [] then
|
||||
(if signum == 0 then 0 else -1)
|
||||
else
|
||||
(builtins.head l) + (2 * (bitsToInt (builtins.tail l) signum));
|
||||
|
||||
xsignum = if x < 0 then 1 else 0;
|
||||
ysignum = if y < 0 then 1 else 0;
|
||||
zipListsWith' = fst: snd:
|
||||
if fst==[] && snd==[] then
|
||||
[]
|
||||
else if fst==[] then
|
||||
[(f xsignum (builtins.head snd))] ++ (zipListsWith' [] (builtins.tail snd))
|
||||
else if snd==[] then
|
||||
[(f (builtins.head fst) ysignum )] ++ (zipListsWith' (builtins.tail fst) [] )
|
||||
else
|
||||
[(f (builtins.head fst) (builtins.head snd))] ++ (zipListsWith' (builtins.tail fst) (builtins.tail snd));
|
||||
in
|
||||
assert (builtins.isInt x) && (builtins.isInt y);
|
||||
bitsToInt (zipListsWith' (intToBits x) (intToBits y)) (f xsignum ysignum)
|
File diff suppressed because it is too large
Load Diff
|
@ -6,8 +6,9 @@ binaries (without the reliance on external inputs):
|
|||
- `bootstrap-tools`: an archive with the compiler toolchain and other
|
||||
helper tools enough to build the rest of the `nixpkgs`.
|
||||
- initial binaries needed to unpack `bootstrap-tools.*`. On `linux`
|
||||
it's just `busybox`, on `darwin` it's `sh`, `bzip2`, `mkdir` and
|
||||
`cpio`. These binaries can be executed directly from the store.
|
||||
it's just `busybox`, on `darwin` it is unpack.nar.xz which contains
|
||||
the binaries and script needed to unpack the tools. These binaries
|
||||
can be executed directly from the store.
|
||||
|
||||
These are called "bootstrap files".
|
||||
|
||||
|
@ -39,7 +40,7 @@ target:
|
|||
```
|
||||
|
||||
To validate cross-targets `binfmt` `NixOS` helper can be useful.
|
||||
For `riscv64-unknown-linux-gnu` the `/etc/nixox/configuraqtion.nix`
|
||||
For `riscv64-unknown-linux-gnu` the `/etc/nixos/configuration.nix`
|
||||
entry would be `boot.binfmt.emulatedSystems = [ "riscv64-linux" ]`.
|
||||
|
||||
3. Propose the commit as a PR to update bootstrap tarballs, tag people
|
||||
|
@ -74,12 +75,3 @@ There are two types of bootstrap files:
|
|||
The `.build` job contains `/on-server/` subdirectory with binaries to
|
||||
be uploaded to `tarballs.nixos.org`.
|
||||
The files are uploaded to `tarballs.nixos.org` by writers to `S3` store.
|
||||
|
||||
## TODOs
|
||||
|
||||
- `pkgs/stdenv/darwin` file layout is slightly different from
|
||||
`pkgs/stdenv/linux`. Once `linux` seed update becomes a routine we can
|
||||
bring `darwin` in sync if it's feasible.
|
||||
- `darwin` definition of `.build` `on-server/` directory layout differs
|
||||
and should be updated.
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
#! nix-shell -p nix
|
||||
#! nix-shell -p jq
|
||||
|
||||
set -o pipefail
|
||||
|
||||
# How the refresher works:
|
||||
#
|
||||
# For a given list of <targets>:
|
||||
|
@ -15,6 +17,9 @@
|
|||
# 4. calculate hashes and craft the commit message with the details on
|
||||
# how to upload the result to 'tarballs.nixos.org'
|
||||
|
||||
scratch_dir=$(mktemp -d)
|
||||
trap 'rm -rf -- "${scratch_dir}"' EXIT
|
||||
|
||||
usage() {
|
||||
cat >&2 <<EOF
|
||||
Usage:
|
||||
|
@ -67,14 +72,8 @@ NATIVE_TARGETS=(
|
|||
i686-unknown-linux-gnu
|
||||
x86_64-unknown-linux-gnu
|
||||
x86_64-unknown-linux-musl
|
||||
|
||||
# TODO: add darwin here once a few prerequisites are satisfied:
|
||||
# - bootstrap-files are factored out into a separate file
|
||||
# - the build artifacts are factored out into an `on-server`
|
||||
# directory. Right onw if does not match `linux` layout.
|
||||
#
|
||||
#aarch64-apple-darwin
|
||||
#x86_64-apple-darwin
|
||||
aarch64-apple-darwin
|
||||
x86_64-apple-darwin
|
||||
)
|
||||
|
||||
is_native() {
|
||||
|
@ -93,6 +92,7 @@ CROSS_TARGETS=(
|
|||
mips64el-unknown-linux-gnuabi64
|
||||
mips64el-unknown-linux-gnuabin32
|
||||
mipsel-unknown-linux-gnu
|
||||
powerpc64-unknown-linux-gnuabielfv2
|
||||
powerpc64le-unknown-linux-gnu
|
||||
riscv64-unknown-linux-gnu
|
||||
)
|
||||
|
@ -105,6 +105,20 @@ is_cross() {
|
|||
return 1
|
||||
}
|
||||
|
||||
nar_sri_get() {
|
||||
local restore_path store_path
|
||||
((${#@} != 2)) && die "nar_sri_get /path/to/name.nar.xz name"
|
||||
restore_path="${scratch_dir}/$2"
|
||||
xz -d < "$1" | nix-store --restore "${restore_path}"
|
||||
[[ $? -ne 0 ]] && die "Failed to unpack '$1'"
|
||||
|
||||
store_path=$(nix-store --add "${restore_path}")
|
||||
[[ $? -ne 0 ]] && die "Failed to add '$restore_path' to store"
|
||||
rm -rf -- "${restore_path}"
|
||||
|
||||
nix-hash --to-sri "$(nix-store --query --hash "${store_path}")"
|
||||
}
|
||||
|
||||
# collect passed options
|
||||
|
||||
targets=()
|
||||
|
@ -221,6 +235,7 @@ EOF
|
|||
case "$fname" in
|
||||
bootstrap-tools.tar.xz) attr=bootstrapTools ;;
|
||||
busybox) attr=$fname ;;
|
||||
unpack.nar.xz) attr=unpack ;;
|
||||
*) die "Don't know how to map '$fname' to attribute name. Please update me."
|
||||
esac
|
||||
|
||||
|
@ -228,18 +243,31 @@ EOF
|
|||
executable_nix=
|
||||
if [[ -x "$p" ]]; then
|
||||
executable_arg="--executable"
|
||||
executable_nix=" executable = true;"
|
||||
executable_nix="executable = true;"
|
||||
fi
|
||||
unpack_nix=
|
||||
name_nix=
|
||||
if [[ $fname = *.nar.xz ]]; then
|
||||
unpack_nix="unpack = true;"
|
||||
name_nix="name = \"${fname%.nar.xz}\";"
|
||||
sri=$(nar_sri_get "$p" "${fname%.nar.xz}")
|
||||
[[ $? -ne 0 ]] && die "Failed to get hash of '$p'"
|
||||
else
|
||||
sha256=$(nix-prefetch-url $executable_arg --name "$fname" "file://$p")
|
||||
[[ $? -ne 0 ]] && die "Failed to get the hash for '$p'"
|
||||
sri=$(nix-hash --to-sri "sha256:$sha256")
|
||||
[[ $? -ne 0 ]] && die "Failed to convert '$sha256' hash to an SRI form"
|
||||
fi
|
||||
sha256=$(nix-prefetch-url $executable_arg --name "$fname" "file://$p")
|
||||
[[ $? -ne 0 ]] && die "Failed to get the hash for '$p'"
|
||||
sri=$(nix-hash --to-sri "sha256:$sha256")
|
||||
[[ $? -ne 0 ]] && die "Failed to convert '$sha256' hash to an SRI form"
|
||||
|
||||
# individual file entries
|
||||
cat <<EOF
|
||||
$attr = import <nix/fetchurl.nix> {
|
||||
url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname";
|
||||
hash = "${sri}";$(printf "\n%s" "${executable_nix}")
|
||||
hash = "${sri}";$(
|
||||
[[ -n ${executable_nix} ]] && printf "\n %s" "${executable_nix}"
|
||||
[[ -n ${name_nix} ]] && printf "\n %s" "${name_nix}"
|
||||
[[ -n ${unpack_nix} ]] && printf "\n %s" "${unpack_nix}"
|
||||
)
|
||||
};
|
||||
EOF
|
||||
done
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
# Evaluate `release.nix' like Hydra would. Too bad nix-instantiate
|
||||
# can't to do this.
|
||||
|
||||
with import ../../lib;
|
||||
# Evaluate `release.nix' like Hydra would. Too bad nix-instantiate can't to do this.
|
||||
|
||||
let
|
||||
inherit (import ../../lib) isDerivation mapAttrs;
|
||||
|
||||
trace = if builtins.getEnv "VERBOSE" == "1" then builtins.trace else (x: y: y);
|
||||
|
||||
rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" "xbursttools" ];
|
||||
|
|
|
@ -1,11 +1,22 @@
|
|||
# This expression returns a list of all fetchurl calls used by ‘expr’.
|
||||
|
||||
with import ../.. { };
|
||||
with lib;
|
||||
|
||||
{ expr }:
|
||||
{ expr, lib ? import ../../lib }:
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
addErrorContext
|
||||
attrNames
|
||||
concatLists
|
||||
const
|
||||
filter
|
||||
genericClosure
|
||||
isAttrs
|
||||
isDerivation
|
||||
isList
|
||||
mapAttrsToList
|
||||
optional
|
||||
optionals
|
||||
;
|
||||
|
||||
root = expr;
|
||||
|
||||
|
|
|
@ -2,8 +2,10 @@
|
|||
let
|
||||
pkgs = import ../../.. {};
|
||||
inherit (pkgs) lib;
|
||||
getDeps = _: pkg: {
|
||||
deps = builtins.filter (x: x != null) (map (x: x.pname or null) (pkg.propagatedBuildInputs or []));
|
||||
getDeps = _: pkg: let
|
||||
pname = pkg.pname or null;
|
||||
in {
|
||||
deps = builtins.filter (x: x != null && x != pname) (map (x: x.pname or null) (pkg.propagatedBuildInputs or []));
|
||||
broken = (pkg.meta.hydraPlatforms or [null]) == [];
|
||||
};
|
||||
in
|
||||
|
|
|
@ -83,7 +83,7 @@ import Prelude hiding (id)
|
|||
import Data.List (sortOn)
|
||||
import Control.Concurrent.Async (concurrently)
|
||||
import Control.Exception (evaluate)
|
||||
import qualified Data.IntMap.Strict as IntMap
|
||||
import qualified Data.IntMap.Lazy as IntMap
|
||||
import qualified Data.IntSet as IntSet
|
||||
import Data.Bifunctor (second)
|
||||
import Data.Data (Proxy)
|
||||
|
@ -299,7 +299,7 @@ calculateReverseDependencies depMap =
|
|||
Map.fromDistinctAscList $ zip keys (zip (rdepMap False) (rdepMap True))
|
||||
where
|
||||
-- This code tries to efficiently invert the dependency map and calculate
|
||||
-- it’s transitive closure by internally identifying every pkg with it’s index
|
||||
-- its transitive closure by internally identifying every pkg with its index
|
||||
-- in the package list and then using memoization.
|
||||
keys :: [PkgName]
|
||||
keys = Map.keys depMap
|
||||
|
@ -317,11 +317,11 @@ calculateReverseDependencies depMap =
|
|||
intDeps :: [(Int, (Bool, [Int]))]
|
||||
intDeps = zip [0..] (fmap depInfoToIdx depInfos)
|
||||
|
||||
rdepMap onlyUnbroken = IntSet.size <$> resultList
|
||||
rdepMap onlyUnbroken = IntSet.size <$> IntMap.elems resultList
|
||||
where
|
||||
resultList = go <$> [0..]
|
||||
resultList = IntMap.fromDistinctAscList [(i, go i) | i <- [0..length keys - 1]]
|
||||
oneStepMap = IntMap.fromListWith IntSet.union $ (\(key,(_,deps)) -> (,IntSet.singleton key) <$> deps) <=< filter (\(_, (broken,_)) -> not (broken && onlyUnbroken)) $ intDeps
|
||||
go pkg = IntSet.unions (oneStep:((resultList !!) <$> IntSet.toList oneStep))
|
||||
go pkg = IntSet.unions (oneStep:((resultList IntMap.!) <$> IntSet.toList oneStep))
|
||||
where oneStep = IntMap.findWithDefault mempty pkg oneStepMap
|
||||
|
||||
-- | Generate a mapping of Hydra job names to maintainer GitHub handles. Calls
|
||||
|
|
|
@ -66,10 +66,6 @@ done
|
|||
|
||||
HACKAGE2NIX="${HACKAGE2NIX:-hackage2nix}"
|
||||
|
||||
# To prevent hackage2nix fails because of encoding.
|
||||
# See: https://github.com/NixOS/nixpkgs/pull/122023
|
||||
export LC_ALL=C.UTF-8
|
||||
|
||||
config_dir=pkgs/development/haskell-modules/configuration-hackage2nix
|
||||
|
||||
run_hackage2nix() {
|
||||
|
|
|
@ -8,7 +8,7 @@ set -eu -o pipefail
|
|||
# (should be capitalized like the display name)
|
||||
SOLVER=LTS
|
||||
# Stackage solver verson, if any. Use latest if empty
|
||||
VERSION=21
|
||||
VERSION=
|
||||
TMP_TEMPLATE=update-stackage.XXXXXXX
|
||||
readonly SOLVER
|
||||
readonly VERSION
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p gnutar jq reuse
|
||||
set -eu
|
||||
cd "$(dirname "$(readlink -f "$0")")"/../../..
|
||||
|
||||
TMPDIR=$(mktemp -d)
|
||||
trap 'rm -rf $TMPDIR' EXIT
|
||||
|
||||
echo "# Prebuilding sources..."
|
||||
nix-build -A kdePackages.sources --no-link || true
|
||||
|
||||
echo "# Evaluating sources..."
|
||||
declare -A sources
|
||||
eval "$(nix-instantiate --eval -A kdePackages.sources --json --strict | jq 'to_entries[] | "sources[" + .key + "]=" + .value' -r)"
|
||||
|
||||
echo "# Collecting licenses..."
|
||||
for k in "${!sources[@]}"; do
|
||||
echo "- Processing $k..."
|
||||
|
||||
if [ ! -f "${sources[$k]}" ]; then
|
||||
echo "Not found!"
|
||||
continue
|
||||
fi
|
||||
|
||||
mkdir "$TMPDIR/$k"
|
||||
tar -C "$TMPDIR/$k" -xf "${sources[$k]}"
|
||||
|
||||
(cd "$TMPDIR/$k"; reuse lint --json) | jq --arg name "$k" '{$name: .summary.used_licenses | sort}' -c > "$TMPDIR/$k.json"
|
||||
done
|
||||
|
||||
jq -s 'add' -S "$TMPDIR"/*.json > pkgs/kde/generated/licenses.json
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i nu -p nushell
|
||||
cd $"($env.FILE_PWD)/../../.."
|
||||
|
||||
mkdir logs
|
||||
nix-env -qaP -f . -A kdePackages --json --out-path | from json | values | par-each { |it|
|
||||
echo $"Processing ($it.pname)..."
|
||||
if "outputs" in $it {
|
||||
nix-store --read-log $it.outputs.out | save -f $"logs/($it.pname).log"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i python3 -p "python3.withPackages(ps: [ ps.click ps.pyyaml ])"
|
||||
import pathlib
|
||||
|
||||
import click
|
||||
|
||||
import utils
|
||||
|
||||
@click.command
|
||||
@click.argument(
|
||||
"repo-metadata",
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
resolve_path=True,
|
||||
path_type=pathlib.Path,
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--nixpkgs",
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
resolve_path=True,
|
||||
writable=True,
|
||||
path_type=pathlib.Path,
|
||||
),
|
||||
default=pathlib.Path(__file__).parent.parent.parent.parent
|
||||
)
|
||||
def main(repo_metadata: pathlib.Path, nixpkgs: pathlib.Path):
|
||||
metadata = utils.KDERepoMetadata.from_repo_metadata_checkout(repo_metadata)
|
||||
out_dir = nixpkgs / "pkgs/kde/generated"
|
||||
metadata.write_json(out_dir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main() # type: ignore
|
|
@ -0,0 +1,127 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i python3 -p python3
|
||||
import pathlib
|
||||
|
||||
OK_MISSING = {
|
||||
# we don't use precompiled QML
|
||||
'Qt6QuickCompiler',
|
||||
'Qt6QmlCompilerPlusPrivate',
|
||||
# usually used for version numbers
|
||||
'Git',
|
||||
# useless by itself, will warn if something else is not found
|
||||
'PkgConfig',
|
||||
# license verification
|
||||
'ReuseTool',
|
||||
# dev only
|
||||
'ClangFormat',
|
||||
# doesn't exist
|
||||
'Qt6X11Extras',
|
||||
}
|
||||
|
||||
OK_MISSING_BY_PACKAGE = {
|
||||
"angelfish": {
|
||||
"Qt6Feedback", # we don't have it
|
||||
},
|
||||
"attica": {
|
||||
"Python3", # only used for license checks
|
||||
},
|
||||
"discover": {
|
||||
"rpm-ostree-1", # we don't have rpm-ostree (duh)
|
||||
"Snapd", # we don't have snaps and probably never will
|
||||
},
|
||||
"elisa": {
|
||||
"UPNPQT", # upstream says it's broken
|
||||
},
|
||||
"extra-cmake-modules": {
|
||||
"Sphinx", # only used for docs, bloats closure size
|
||||
"QCollectionGenerator"
|
||||
},
|
||||
"kio-extras-kf5": {
|
||||
"KDSoapWSDiscoveryClient", # actually vendored on KF5 version
|
||||
},
|
||||
"kitinerary": {
|
||||
"OsmTools", # used for map data updates, we use prebuilt
|
||||
},
|
||||
"kosmindoormap": {
|
||||
"OsmTools", # same
|
||||
"Protobuf",
|
||||
},
|
||||
"kpty": {
|
||||
"UTEMPTER", # we don't have it and it probably wouldn't work anyway
|
||||
},
|
||||
"kpublictransport": {
|
||||
"OsmTools", # same
|
||||
"PolyClipping",
|
||||
"Protobuf",
|
||||
},
|
||||
"krfb": {
|
||||
"Qt6XkbCommonSupport", # not real
|
||||
},
|
||||
"kuserfeedback": {
|
||||
"Qt6Svg", # all used for backend console stuff we don't ship
|
||||
"QmlLint",
|
||||
"Qt6Charts",
|
||||
"FLEX",
|
||||
"BISON",
|
||||
"Php",
|
||||
"PhpUnit",
|
||||
},
|
||||
"kwin": {
|
||||
"display-info", # newer versions identify as libdisplay-info
|
||||
},
|
||||
"mlt": {
|
||||
"Qt5", # intentionally disabled
|
||||
"SWIG",
|
||||
},
|
||||
"plasma-desktop": {
|
||||
"scim", # upstream is dead, not packaged in Nixpkgs
|
||||
},
|
||||
"powerdevil": {
|
||||
"DDCUtil", # cursed, intentionally disabled
|
||||
},
|
||||
"pulseaudio-qt": {
|
||||
"Qt6Qml", # tests only
|
||||
"Qt6Quick",
|
||||
},
|
||||
"syntax-highlighting": {
|
||||
"XercesC", # only used for extra validation at build time
|
||||
}
|
||||
}
|
||||
|
||||
def main():
|
||||
here = pathlib.Path(__file__).parent.parent.parent.parent
|
||||
logs = (here / "logs").glob("*.log")
|
||||
|
||||
for log in sorted(logs):
|
||||
pname = log.stem
|
||||
|
||||
missing = []
|
||||
is_in_block = False
|
||||
with log.open(errors="replace") as fd:
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
if line.startswith("-- No package '"):
|
||||
package = line.removeprefix("-- No package '").removesuffix("' found")
|
||||
missing.append(package)
|
||||
if line == "-- The following OPTIONAL packages have not been found:" or line == "-- The following RECOMMENDED packages have not been found:":
|
||||
is_in_block = True
|
||||
elif line.startswith("--") and is_in_block:
|
||||
is_in_block = False
|
||||
elif line.startswith("*") and is_in_block:
|
||||
package = line.removeprefix("* ")
|
||||
missing.append(package)
|
||||
|
||||
missing = {
|
||||
package
|
||||
for package in missing
|
||||
if not any(package.startswith(i) for i in OK_MISSING | OK_MISSING_BY_PACKAGE.get(pname, set()))
|
||||
}
|
||||
|
||||
if missing:
|
||||
print(pname + ":")
|
||||
for line in missing:
|
||||
print(" -", line)
|
||||
print()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,121 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i python3 -p "python3.withPackages(ps: [ ps.beautifulsoup4 ps.click ps.httpx ps.jinja2 ps.pyyaml ])
|
||||
import base64
|
||||
import binascii
|
||||
import json
|
||||
import pathlib
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import bs4
|
||||
import click
|
||||
import httpx
|
||||
import jinja2
|
||||
|
||||
import utils
|
||||
|
||||
|
||||
LEAF_TEMPLATE = jinja2.Template('''
|
||||
{mkKdeDerivation}:
|
||||
mkKdeDerivation {
|
||||
pname = "{{ pname }}";
|
||||
}
|
||||
'''.strip())
|
||||
|
||||
ROOT_TEMPLATE = jinja2.Template('''
|
||||
{callPackage}: {
|
||||
{%- for p in packages %}
|
||||
{{ p }} = callPackage ./{{ p }} {};
|
||||
{%- endfor %}
|
||||
}
|
||||
'''.strip());
|
||||
|
||||
def to_sri(hash):
|
||||
raw = binascii.unhexlify(hash)
|
||||
b64 = base64.b64encode(raw).decode()
|
||||
return f"sha256-{b64}"
|
||||
|
||||
|
||||
@click.command
|
||||
@click.argument(
|
||||
"set",
|
||||
type=click.Choice(["frameworks", "gear", "plasma"]),
|
||||
required=True
|
||||
)
|
||||
@click.argument(
|
||||
"version",
|
||||
type=str,
|
||||
required=True
|
||||
)
|
||||
@click.option(
|
||||
"--nixpkgs",
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
resolve_path=True,
|
||||
writable=True,
|
||||
path_type=pathlib.Path,
|
||||
),
|
||||
default=pathlib.Path(__file__).parent.parent.parent.parent
|
||||
)
|
||||
@click.option(
|
||||
"--sources-url",
|
||||
type=str,
|
||||
default=None,
|
||||
)
|
||||
def main(set: str, version: str, nixpkgs: pathlib.Path, sources_url: Optional[str]):
|
||||
root_dir = nixpkgs / "pkgs/kde"
|
||||
set_dir = root_dir / set
|
||||
generated_dir = root_dir / "generated"
|
||||
metadata = utils.KDERepoMetadata.from_json(generated_dir)
|
||||
|
||||
if sources_url is None:
|
||||
set_url = {
|
||||
"frameworks": "kf",
|
||||
"gear": "releases",
|
||||
"plasma": "plasma",
|
||||
}[set]
|
||||
sources_url = f"https://kde.org/info/sources/source-{set_url}-{version}.html"
|
||||
|
||||
sources = httpx.get(sources_url)
|
||||
sources.raise_for_status()
|
||||
bs = bs4.BeautifulSoup(sources.text, features="html.parser")
|
||||
|
||||
results = {}
|
||||
for item in bs.select("tr")[1:]:
|
||||
link = item.select_one("td:nth-child(1) a")
|
||||
assert link
|
||||
|
||||
hash = item.select_one("td:nth-child(3) tt")
|
||||
assert hash
|
||||
|
||||
project_name, version = link.text.rsplit("-", maxsplit=1)
|
||||
if project_name not in metadata.projects_by_name:
|
||||
print(f"Warning: unknown tarball: {project_name}")
|
||||
|
||||
results[project_name] = {
|
||||
"version": version,
|
||||
"url": "mirror://kde" + urlparse(link.attrs["href"]).path,
|
||||
"hash": to_sri(hash.text)
|
||||
}
|
||||
|
||||
pkg_dir = set_dir / project_name
|
||||
pkg_file = pkg_dir / "default.nix"
|
||||
if not pkg_file.exists():
|
||||
print(f"Generated new package: {set}/{project_name}")
|
||||
pkg_dir.mkdir(parents=True, exist_ok=True)
|
||||
with pkg_file.open("w") as fd:
|
||||
fd.write(LEAF_TEMPLATE.render(pname=project_name) + "\n")
|
||||
|
||||
set_dir.mkdir(parents=True, exist_ok=True)
|
||||
with (set_dir / "default.nix").open("w") as fd:
|
||||
fd.write(ROOT_TEMPLATE.render(packages=sorted(results.keys())) + "\n")
|
||||
|
||||
sources_dir = generated_dir / "sources"
|
||||
sources_dir.mkdir(parents=True, exist_ok=True)
|
||||
with (sources_dir / f"{set}.json").open("w") as fd:
|
||||
json.dump(results, fd, indent=2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main() # type: ignore
|
|
@ -0,0 +1,185 @@
|
|||
import collections
|
||||
import dataclasses
|
||||
import functools
|
||||
import json
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
import yaml
|
||||
|
||||
class DataclassEncoder(json.JSONEncoder):
|
||||
def default(self, it):
|
||||
if dataclasses.is_dataclass(it):
|
||||
return dataclasses.asdict(it)
|
||||
return super().default(it)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Project:
|
||||
name: str
|
||||
description: str | None
|
||||
project_path: str
|
||||
repo_path: str | None
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.name)
|
||||
|
||||
@classmethod
|
||||
def from_yaml(cls, path: pathlib.Path):
|
||||
data = yaml.safe_load(path.open())
|
||||
return cls(
|
||||
name=data["identifier"],
|
||||
description=data["description"],
|
||||
project_path=data["projectpath"],
|
||||
repo_path=data["repopath"]
|
||||
)
|
||||
|
||||
|
||||
def get_git_commit(path: pathlib.Path):
|
||||
return subprocess.check_output(["git", "-C", path, "rev-parse", "--short", "HEAD"]).decode().strip()
|
||||
|
||||
|
||||
def validate_unique(projects: list[Project], attr: str):
|
||||
seen = set()
|
||||
for item in projects:
|
||||
attr_value = getattr(item, attr)
|
||||
if attr_value in seen:
|
||||
raise Exception(f"Duplicate {attr}: {attr_value}")
|
||||
seen.add(attr_value)
|
||||
|
||||
|
||||
THIRD_PARTY = {
|
||||
"third-party/appstream": "appstream-qt",
|
||||
"third-party/cmark": "cmark",
|
||||
"third-party/gpgme": "gpgme",
|
||||
"third-party/kdsoap": "kdsoap",
|
||||
"third-party/libaccounts-qt": "accounts-qt",
|
||||
"third-party/libgpg-error": "libgpg-error",
|
||||
"third-party/libquotient": "libquotient",
|
||||
"third-party/packagekit-qt": "packagekit-qt",
|
||||
"third-party/poppler": "poppler",
|
||||
"third-party/qcoro": "qcoro",
|
||||
"third-party/qmltermwidget": "qmltermwidget",
|
||||
"third-party/qtkeychain": "qtkeychain",
|
||||
"third-party/signond": "signond",
|
||||
"third-party/taglib": "taglib",
|
||||
"third-party/wayland-protocols": "wayland-protocols",
|
||||
"third-party/wayland": "wayland",
|
||||
"third-party/zxing-cpp": "zxing-cpp",
|
||||
}
|
||||
|
||||
IGNORE = {
|
||||
"kdesupport/phonon-directshow",
|
||||
"kdesupport/phonon-mmf",
|
||||
"kdesupport/phonon-mplayer",
|
||||
"kdesupport/phonon-quicktime",
|
||||
"kdesupport/phonon-waveout",
|
||||
"kdesupport/phonon-xine"
|
||||
}
|
||||
|
||||
WARNED = set()
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class KDERepoMetadata:
|
||||
version: str
|
||||
projects: list[Project]
|
||||
dep_graph: dict[Project, set[Project]]
|
||||
|
||||
@functools.cached_property
|
||||
def projects_by_name(self):
|
||||
return {p.name: p for p in self.projects}
|
||||
|
||||
@functools.cached_property
|
||||
def projects_by_path(self):
|
||||
return {p.project_path: p for p in self.projects}
|
||||
|
||||
def try_lookup_package(self, path):
|
||||
if path in IGNORE:
|
||||
return None
|
||||
project = self.projects_by_path.get(path)
|
||||
if project is None and path not in WARNED:
|
||||
WARNED.add(path)
|
||||
print(f"Warning: unknown project {path}")
|
||||
return project
|
||||
|
||||
@classmethod
|
||||
def from_repo_metadata_checkout(cls, repo_metadata: pathlib.Path):
|
||||
projects = [
|
||||
Project.from_yaml(metadata_file)
|
||||
for metadata_file in repo_metadata.glob("projects-invent/**/metadata.yaml")
|
||||
] + [
|
||||
Project(id, None, project_path, None)
|
||||
for project_path, id in THIRD_PARTY.items()
|
||||
]
|
||||
|
||||
validate_unique(projects, "name")
|
||||
validate_unique(projects, "project_path")
|
||||
|
||||
self = cls(
|
||||
version=get_git_commit(repo_metadata),
|
||||
projects=projects,
|
||||
dep_graph={},
|
||||
)
|
||||
|
||||
dep_specs = [
|
||||
"dependency-data-common",
|
||||
"dependency-data-kf6-qt6"
|
||||
]
|
||||
dep_graph = collections.defaultdict(set)
|
||||
|
||||
for spec in dep_specs:
|
||||
spec_path = repo_metadata / "dependencies" / spec
|
||||
for line in spec_path.open():
|
||||
line = line.strip()
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
if not line:
|
||||
continue
|
||||
|
||||
dependent, dependency = line.split(": ")
|
||||
|
||||
dependent = self.try_lookup_package(dependent)
|
||||
if dependent is None:
|
||||
continue
|
||||
|
||||
dependency = self.try_lookup_package(dependency)
|
||||
if dependency is None:
|
||||
continue
|
||||
|
||||
dep_graph[dependent].add(dependency)
|
||||
|
||||
self.dep_graph = dep_graph
|
||||
|
||||
return self
|
||||
|
||||
def write_json(self, root: pathlib.Path):
|
||||
root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with (root / "projects.json").open("w") as fd:
|
||||
json.dump(self.projects_by_name, fd, cls=DataclassEncoder, sort_keys=True, indent=2)
|
||||
|
||||
with (root / "dependencies.json").open("w") as fd:
|
||||
deps = {k.name: sorted(dep.name for dep in v) for k, v in self.dep_graph.items()}
|
||||
json.dump({"version": self.version, "dependencies": deps}, fd, cls=DataclassEncoder, sort_keys=True, indent=2)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, root: pathlib.Path):
|
||||
projects = [
|
||||
Project(**v) for v in json.load((root / "projects.json").open()).values()
|
||||
]
|
||||
|
||||
deps = json.load((root / "dependencies.json").open())
|
||||
self = cls(
|
||||
version=deps["version"],
|
||||
projects=projects,
|
||||
dep_graph={},
|
||||
)
|
||||
|
||||
dep_graph = collections.defaultdict(set)
|
||||
for dependent, dependencies in deps["dependencies"].items():
|
||||
for dependency in dependencies:
|
||||
dep_graph[self.projects_by_name[dependent]].add(self.projects_by_name[dependency])
|
||||
|
||||
self.dep_graph = dep_graph
|
||||
return self
|
|
@ -95,6 +95,7 @@ magick,,,,,5.1,donovanglover
|
|||
markdown,,,,,,
|
||||
mediator_lua,,,,,,
|
||||
middleclass,,,,,,
|
||||
mimetypes,,,,,,
|
||||
mpack,,,,,,
|
||||
moonscript,https://github.com/leafo/moonscript.git,dev-1,,,,arobyn
|
||||
nlua,,,,,,teto
|
||||
|
@ -116,6 +117,7 @@ stdlib,,,,41.2.2,,vyp
|
|||
teal-language-server,,,http://luarocks.org/dev,,,
|
||||
telescope.nvim,,,,,5.1,
|
||||
telescope-manix,,,,,,
|
||||
tiktoken_core,,,,,,natsukium
|
||||
tl,,,,,,mephistophiles
|
||||
toml,,,,,,mrcjkb
|
||||
toml-edit,,,,,5.1,mrcjkb
|
||||
|
|
|
|
@ -21,6 +21,7 @@ stdenv.mkDerivation {
|
|||
meta = {
|
||||
maintainers = with lib.maintainers; [ eelco ];
|
||||
description = "Utility to generate a Nix expression for a Perl package from CPAN";
|
||||
mainProgram = "nix-generate-from-cpan";
|
||||
platforms = lib.platforms.unix;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ stdenv.mkDerivation {
|
|||
meta = with lib; {
|
||||
maintainers = [ maintainers.eelco ];
|
||||
description = "A utility for Nixpkgs contributors to check Nixpkgs for common errors";
|
||||
mainProgram = "nixpkgs-lint";
|
||||
platforms = platforms.unix;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -96,6 +96,15 @@ with lib.maintainers; {
|
|||
shortName = "Blockchains";
|
||||
};
|
||||
|
||||
budgie = {
|
||||
members = [
|
||||
bobby285271
|
||||
federicoschonborn
|
||||
];
|
||||
scope = "Maintain Budgie desktop environment";
|
||||
shortName = "Budgie";
|
||||
};
|
||||
|
||||
buildbot = {
|
||||
members = [
|
||||
lopsided98
|
||||
|
@ -176,7 +185,7 @@ with lib.maintainers; {
|
|||
|
||||
cosmopolitan = {
|
||||
members = [
|
||||
lourkeur
|
||||
bbjubjub
|
||||
tomberek
|
||||
];
|
||||
scope = "Maintain the Cosmopolitan LibC and related programs.";
|
||||
|
@ -188,6 +197,7 @@ with lib.maintainers; {
|
|||
ivar
|
||||
mdarocha
|
||||
corngood
|
||||
ggg
|
||||
raphaelr
|
||||
jamiemagee
|
||||
anpin
|
||||
|
@ -302,6 +312,8 @@ with lib.maintainers; {
|
|||
dpausp
|
||||
frlan
|
||||
leona
|
||||
osnyx
|
||||
ma27
|
||||
];
|
||||
scope = "Team for Flying Circus employees who collectively maintain packages.";
|
||||
shortName = "Flying Circus employees";
|
||||
|
@ -342,7 +354,6 @@ with lib.maintainers; {
|
|||
imincik
|
||||
nh2
|
||||
nialov
|
||||
r-burns
|
||||
sikmir
|
||||
willcohen
|
||||
];
|
||||
|
@ -430,7 +441,6 @@ with lib.maintainers; {
|
|||
helsinki-systems = {
|
||||
# Verify additions to this team with at least one already existing member of the team.
|
||||
members = [
|
||||
ajs124
|
||||
das_j
|
||||
];
|
||||
scope = "Group registration for packages maintained by Helsinki Systems";
|
||||
|
@ -495,6 +505,7 @@ with lib.maintainers; {
|
|||
members = [
|
||||
aanderse
|
||||
cpages
|
||||
dschrempf
|
||||
edwtjo
|
||||
minijackson
|
||||
peterhoeg
|
||||
|
@ -775,7 +786,6 @@ with lib.maintainers; {
|
|||
members = [
|
||||
aanderse
|
||||
drupol
|
||||
etu
|
||||
ma27
|
||||
talyz
|
||||
];
|
||||
|
@ -821,12 +831,18 @@ with lib.maintainers; {
|
|||
|
||||
qt-kde = {
|
||||
members = [
|
||||
ilya-fedin
|
||||
k900
|
||||
LunNova
|
||||
mjm
|
||||
nickcao
|
||||
SuperSandro2000
|
||||
ttuegel
|
||||
];
|
||||
githubTeams = [
|
||||
"qt-kde"
|
||||
];
|
||||
scope = "Maintain the KDE desktop environment and Qt.";
|
||||
scope = "Maintain the Qt framework, KDE application suite, Plasma desktop environment and related projects.";
|
||||
shortName = "Qt / KDE";
|
||||
enableFeatureFreezePing = true;
|
||||
};
|
||||
|
@ -864,6 +880,7 @@ with lib.maintainers; {
|
|||
members = [
|
||||
Madouura
|
||||
Flakebi
|
||||
mschwaig
|
||||
];
|
||||
githubTeams = [
|
||||
"rocm-maintainers"
|
||||
|
@ -923,6 +940,18 @@ with lib.maintainers; {
|
|||
shortName = "Serokell employees";
|
||||
};
|
||||
|
||||
steam = {
|
||||
members = [
|
||||
atemu
|
||||
eclairevoyant
|
||||
jonringer
|
||||
k900
|
||||
mkg20001
|
||||
];
|
||||
scope = "Maintain steam module and packages";
|
||||
shortName = "Steam";
|
||||
};
|
||||
|
||||
systemd = {
|
||||
members = [ ];
|
||||
githubTeams = [
|
||||
|
|
|
@ -150,6 +150,7 @@ Or if you have an older card, you may have to use one of the legacy
|
|||
drivers:
|
||||
|
||||
```nix
|
||||
services.xserver.videoDrivers = [ "nvidiaLegacy470" ];
|
||||
services.xserver.videoDrivers = [ "nvidiaLegacy390" ];
|
||||
services.xserver.videoDrivers = [ "nvidiaLegacy340" ];
|
||||
services.xserver.videoDrivers = [ "nvidiaLegacy304" ];
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue