Compare commits

..

No commits in common. "master" and "pr-libjxl-cross" have entirely different histories.

21229 changed files with 439263 additions and 507956 deletions

View File

@ -105,19 +105,3 @@ fb0e5be84331188a69b3edd31679ca6576edb75a
# {pkgs/development/cuda-modules,pkgs/test/cuda,pkgs/top-level/cuda-packages.nix}: reformat all CUDA files with nixfmt-rfc-style 2023-03-01
802a1b4d3338f24cbc4efd704616654456d75a94
# postgresql: move packages.nix to ext/default.nix
719034f6f6749d624faa28dff259309fc0e3e730
# php ecosystem: reformat with nixfmt-rfc-style
75ae7621330ff8db944ce4dff4374e182d5d151f
c759efa5e7f825913f9a69ef20f025f50f56dc4d
# pkgs/os-specific/bsd: Reformat with nixfmt-rfc-style 2024-03-01
3fe3b055adfc020e6a923c466b6bcd978a13069a
# k3s: format with nixfmt-rfc-style
6cfcd3c75428ede517bc6b15a353d704837a2830
# python3Packages: format with nixfmt
59b1aef59071cae6e87859dc65de973d2cc595c0

39
.github/CODEOWNERS vendored
View File

@ -13,6 +13,7 @@
# GitHub actions
/.github/workflows @NixOS/Security @Mic92 @zowoq
/.github/workflows/merge-staging @FRidh
# EditorConfig
/.editorconfig @Mic92 @zowoq
@ -41,7 +42,7 @@
/pkgs/top-level/splice.nix @Ericson2314
/pkgs/top-level/release-cross.nix @Ericson2314
/pkgs/stdenv/generic @Ericson2314
/pkgs/stdenv/generic/check-meta.nix @Ericson2314
/pkgs/stdenv/generic/check-meta.nix @Ericson2314 @piegamesde
/pkgs/stdenv/cross @Ericson2314
/pkgs/build-support/cc-wrapper @Ericson2314
/pkgs/build-support/bintools-wrapper @Ericson2314
@ -67,8 +68,8 @@
/nixos/lib/make-disk-image.nix @raitobezarius
# Nix, the package manager
pkgs/tools/package-management/nix/ @raitobezarius
nixos/modules/installer/tools/nix-fallback-paths.nix @raitobezarius
pkgs/tools/package-management/nix/ @raitobezarius @ma27
nixos/modules/installer/tools/nix-fallback-paths.nix @raitobezarius @ma27
# Nixpkgs documentation
/maintainers/scripts/db-to-md.sh @jtojnar @ryantm
@ -124,8 +125,10 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @raitobezarius
/pkgs/common-updater/scripts/update-source-version @jtojnar
# Python-related code and docs
/doc/languages-frameworks/python.section.md @mweinelt
/pkgs/development/interpreters/python/hooks @jonringer
/maintainers/scripts/update-python-libraries @FRidh
/pkgs/development/interpreters/python @FRidh
/doc/languages-frameworks/python.section.md @FRidh @mweinelt
/pkgs/development/interpreters/python/hooks @FRidh @jonringer
# Haskell
/doc/languages-frameworks/haskell.section.md @sternenseemann @maralorn @ncfavier
@ -137,14 +140,18 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @raitobezarius
/pkgs/top-level/haskell-packages.nix @sternenseemann @maralorn @ncfavier
# Perl
/pkgs/development/interpreters/perl @stigtsp @zakame @dasJ @marcusramberg
/pkgs/top-level/perl-packages.nix @stigtsp @zakame @dasJ @marcusramberg
/pkgs/development/perl-modules @stigtsp @zakame @dasJ @marcusramberg
/pkgs/development/interpreters/perl @stigtsp @zakame @dasJ
/pkgs/top-level/perl-packages.nix @stigtsp @zakame @dasJ
/pkgs/development/perl-modules @stigtsp @zakame @dasJ
# R
/pkgs/applications/science/math/R @jbedo
/pkgs/development/r-modules @jbedo
# Ruby
/pkgs/development/interpreters/ruby @marsam
/pkgs/development/ruby-modules @marsam
# Rust
/pkgs/development/compilers/rust @Mic92 @zowoq @winterqt @figsoda
/pkgs/build-support/rust @zowoq @winterqt @figsoda
@ -191,7 +198,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
/maintainers/scripts/kde @K900 @NickCao @SuperSandro2000 @ttuegel
# PostgreSQL and related stuff
/pkgs/servers/sql/postgresql @thoughtpolice
/pkgs/servers/sql/postgresql @thoughtpolice @marsam
/nixos/modules/services/databases/postgresql.xml @thoughtpolice
/nixos/modules/services/databases/postgresql.nix @thoughtpolice
/nixos/tests/postgresql.nix @thoughtpolice
@ -305,9 +312,16 @@ nixos/modules/services/networking/networkmanager.nix @Janik-Haag
# terraform providers
/pkgs/applications/networking/cluster/terraform-providers @zowoq
# Matrix
/pkgs/servers/heisenbridge @piegamesde
/pkgs/servers/matrix-conduit @piegamesde
/nixos/modules/services/misc/heisenbridge.nix @piegamesde
/nixos/modules/services/misc/matrix-conduit.nix @piegamesde
/nixos/tests/matrix-conduit.nix @piegamesde
# Forgejo
nixos/modules/services/misc/forgejo.nix @adamcstephens @bendlas @emilylange
pkgs/by-name/fo/forgejo/package.nix @adamcstephens @bendlas @emilylange
nixos/modules/services/misc/forgejo.nix @bendlas @emilylange
pkgs/applications/version-management/forgejo @bendlas @emilylange
# Dotnet
/pkgs/build-support/dotnet @IvarWithoutBones
@ -345,11 +359,8 @@ pkgs/development/tools/continuous-integration/buildbot @Mic92 @zowoq
# Pretix
pkgs/by-name/pr/pretix/ @mweinelt
pkgs/by-name/pr/pretalx/ @mweinelt
nixos/modules/services/web-apps/pretix.nix @mweinelt
nixos/modules/services/web-apps/pretalx.nix @mweinelt
nixos/tests/web-apps/pretix.nix @mweinelt
nixos/tests/web-apps/pretalx.nix @mweinelt
# incus/lxc/lxd
nixos/maintainers/scripts/lxd/ @adamcstephens

View File

@ -24,7 +24,7 @@ For new packages please briefly describe the package or provide a link to its ho
- made sure NixOS tests are [linked](https://nixos.org/manual/nixpkgs/unstable/#ssec-nixos-tests-linking) to the relevant packages
- [ ] Tested compilation of all packages that depend on this change using `nix-shell -p nixpkgs-review --run "nixpkgs-review rev HEAD"`. Note: all changes have to be committed, also see [nixpkgs-review usage](https://github.com/Mic92/nixpkgs-review#usage)
- [ ] Tested basic functionality of all binary files (usually in `./result/bin/`)
- [24.11 Release Notes](https://github.com/NixOS/nixpkgs/blob/master/nixos/doc/manual/release-notes/rl-2411.section.md) (or backporting [23.11](https://github.com/NixOS/nixpkgs/blob/master/nixos/doc/manual/release-notes/rl-2311.section.md) and [24.05](https://github.com/NixOS/nixpkgs/blob/master/nixos/doc/manual/release-notes/rl-2405.section.md) Release notes)
- [24.05 Release Notes](https://github.com/NixOS/nixpkgs/blob/master/nixos/doc/manual/release-notes/rl-2405.section.md) (or backporting [23.05](https://github.com/NixOS/nixpkgs/blob/master/nixos/doc/manual/release-notes/rl-2305.section.md) and [23.11](https://github.com/NixOS/nixpkgs/blob/master/nixos/doc/manual/release-notes/rl-2311.section.md) Release notes)
- [ ] (Package updates) Added a release notes entry if the change is major or breaking
- [ ] (Module updates) Added a release notes entry if the change is significant
- [ ] (Module addition) Added a release notes entry if adding a new NixOS module

31
.github/labeler.yml vendored
View File

@ -16,17 +16,6 @@
- nixos/modules/services/x11/desktop-managers/cinnamon.nix
- nixos/tests/cinnamon.nix
"6.topic: dotnet":
- any:
- changed-files:
- any-glob-to-any-file:
- doc/languages-frameworks/dotnet.section.md
- maintainers/scripts/update-dotnet-lockfiles.nix
- pkgs/build-support/dotnet/**/*
- pkgs/development/compilers/dotnet/**/*
- pkgs/test/dotnet/**/*
- pkgs/top-level/dotnet-packages.nix
"6.topic: emacs":
- any:
- changed-files:
@ -124,14 +113,6 @@
- pkgs/applications/editors/jupyter-kernels/**/*
- pkgs/applications/editors/jupyter/**/*
"6.topic: k3s":
- any:
- changed-files:
- any-glob-to-any-file:
- nixos/modules/services/cluster/k3s/**/*
- nixos/tests/k3s/**/*
- pkgs/applications/networking/cluster/k3s/**/*
"6.topic: kernel":
- any:
- changed-files:
@ -145,12 +126,6 @@
- any-glob-to-any-file:
- lib/**
"6.topic: llvm/clang":
- any:
- changed-files:
- any-glob-to-any-file:
- pkgs/development/compilers/llvm/*
"6.topic: lua":
- any:
- changed-files:
@ -218,7 +193,6 @@
- pkgs/development/node-packages/**/*
- pkgs/development/tools/yarn/*
- pkgs/development/tools/yarn2nix-moretea/**/*
- pkgs/development/tools/pnpm/**/*
- pkgs/development/web/nodejs/*
"6.topic: ocaml":
@ -395,8 +369,3 @@
- changed-files:
- any-glob-to-any-file:
- nixos/modules/**/*
"8.has: maintainer-list (update)":
- any:
- changed-files:
- any-glob-to-any-file:
- maintainers/maintainer-list.nix

View File

@ -20,7 +20,7 @@ jobs:
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Create backport PRs

View File

@ -18,9 +18,9 @@ jobs:
runs-on: ubuntu-latest
# we don't limit this action to only NixOS repo since the checks are cheap and useful developer feedback
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/cachix-action@ad2ddac53f961de1989924296a1f236fcfbaa4fc # v15
- uses: cachix/cachix-action@18cf96c7c98e048e10a83abd92116114cd8504be # v14
with:
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
name: nixpkgs-ci

View File

@ -14,16 +14,11 @@ on:
# While `edited` is also triggered when the PR title/body is changed,
# this PR action is fairly quick, and PR's don't get edited that often,
# so it shouldn't be a problem
# There is a feature request for adding a `base_changed` event:
# https://github.com/orgs/community/discussions/35058
types: [opened, synchronize, reopened, edited]
permissions: {}
# We don't use a concurrency group here, because the action is triggered quite often (due to the PR edit
# trigger), and contributers would get notified on any canceled run.
# There is a feature request for supressing notifications on concurrency-canceled runs:
# https://github.com/orgs/community/discussions/13015
permissions:
# We need this permission to cancel the workflow run if there's a merge conflict
actions: write
jobs:
check:
@ -44,7 +39,7 @@ jobs:
# https://docs.github.com/en/rest/guides/using-the-rest-api-to-interact-with-your-git-database?apiVersion=2022-11-28#checking-mergeability-of-pull-requests
# Retry the API query this many times
retryCount=5
retryCount=3
# Start with 5 seconds, but double every retry
retryInterval=5
while true; do
@ -77,27 +72,31 @@ jobs:
if [[ "$mergeable" == "true" ]]; then
echo "The PR can be merged, checking the merge commit $mergedSha"
echo "mergedSha=$mergedSha" >> "$GITHUB_ENV"
else
echo "The PR cannot be merged, it has a merge conflict, skipping the rest.."
echo "The PR cannot be merged, it has a merge conflict, cancelling the workflow.."
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
/repos/"$GITHUB_REPOSITORY"/actions/runs/"$GITHUB_RUN_ID"/cancel
sleep 60
# If it's still not canceled after a minute, something probably went wrong, just exit
exit 1
fi
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
if: env.mergedSha
echo "mergedSha=$mergedSha" >> "$GITHUB_ENV"
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
# pull_request_target checks out the base branch by default
ref: ${{ env.mergedSha }}
# Fetches the merge commit and its parents
fetch-depth: 2
- name: Checking out base branch
if: env.mergedSha
run: |
base=$(mktemp -d)
git worktree add "$base" "$(git rev-parse HEAD^1)"
echo "base=$base" >> "$GITHUB_ENV"
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
if: env.mergedSha
- name: Fetching the pinned tool
if: env.mergedSha
# Update the pinned version using pkgs/test/check-by-name/update-pinned-tool.sh
run: |
# The pinned version of the tooling to use
@ -108,7 +107,6 @@ jobs:
# Adds a result symlink as a GC root
nix-store --realise "$toolPath" --add-root result
- name: Running nixpkgs-check-by-name
if: env.mergedSha
env:
# Force terminal colors to be enabled. The library that
# nixpkgs-check-by-name uses respects: https://bixense.com/clicolors/

View File

@ -4,7 +4,6 @@ on:
branches:
- 'release-**'
- 'staging-**'
- '!staging-next'
permissions: {}
@ -13,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS'
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 0
filter: blob:none

View File

@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS'
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge

View File

@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS'
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
@ -32,20 +32,11 @@ jobs:
# Each environment variable beginning with NIX_FMT_PATHS_ is a list of
# paths to check with nixfmt.
env:
NIX_FMT_PATHS_BSD: pkgs/os-specific/bsd
NIX_FMT_PATHS_MPVSCRIPTS: pkgs/applications/video/mpv/scripts
# Format paths related to the Nixpkgs CUDA ecosystem.
NIX_FMT_PATHS_CUDA: |
pkgs/development/cuda-modules
pkgs/test/cuda
pkgs/top-level/cuda-packages.nix
NIX_FMT_PATHS_K3S: |
nixos/modules/services/cluster/k3s
nixos/tests/k3s
pkgs/applications/networking/cluster/k3s
NIX_FMT_PATHS_VSCODE_EXTS: pkgs/applications/editors/vscode/extensions
NIX_FMT_PATHS_PHP_PACKAGES: pkgs/development/php-packages
NIX_FMT_PATHS_BUILD_SUPPORT_PHP: pkgs/build-support/php
# Iterate over all environment variables beginning with NIX_FMT_PATHS_.
run: |
for env_var in "${!NIX_FMT_PATHS_@}"; do

View File

@ -24,7 +24,7 @@ jobs:
- name: print list of changed files
run: |
cat "$HOME/changed_files"
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge

View File

@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS'
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
@ -22,7 +22,7 @@ jobs:
with:
# explicitly enable sandbox
extra_nix_config: sandbox = true
- uses: cachix/cachix-action@ad2ddac53f961de1989924296a1f236fcfbaa4fc # v15
- uses: cachix/cachix-action@18cf96c7c98e048e10a83abd92116114cd8504be # v14
with:
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
name: nixpkgs-ci

View File

@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS'
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
@ -24,7 +24,7 @@ jobs:
with:
# explicitly enable sandbox
extra_nix_config: sandbox = true
- uses: cachix/cachix-action@ad2ddac53f961de1989924296a1f236fcfbaa4fc # v15
- uses: cachix/cachix-action@18cf96c7c98e048e10a83abd92116114cd8504be # v14
with:
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
name: nixpkgs-ci

View File

@ -24,7 +24,7 @@ jobs:
if [[ -s "$HOME/changed_files" ]]; then
echo "CHANGED_FILES=$HOME/changed_files" > "$GITHUB_ENV"
fi
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge

View File

@ -39,13 +39,9 @@ jobs:
into: staging-next-23.11
- from: staging-next-23.11
into: staging-23.11
- from: release-24.05
into: staging-next-24.05
- from: staging-next-24.05
into: staging-24.05
name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
uses: devmasx/merge-branch@854d3ac71ed1e9deb668e0074781b81fdd6e771f # 1.4.0

View File

@ -39,7 +39,7 @@ jobs:
into: staging
name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- name: ${{ matrix.pairs.from }} → ${{ matrix.pairs.into }}
uses: devmasx/merge-branch@854d3ac71ed1e9deb668e0074781b81fdd6e771f # 1.4.0

View File

@ -16,7 +16,7 @@ jobs:
if: github.repository_owner == 'NixOS' && github.ref == 'refs/heads/master' # ensure workflow_dispatch only runs on master
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
with:
nix_path: nixpkgs=channel:nixpkgs-unstable
@ -46,7 +46,7 @@ jobs:
run: |
git clean -f
- name: create PR
uses: peter-evans/create-pull-request@9153d834b60caba6d51c9b9510b087acf9f33f83 # v6.0.4
uses: peter-evans/create-pull-request@70a41aba780001da0a30141984ae2a0c95d8704e # v6.0.2
with:
body: |
Automatic update by [update-terraform-providers](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/update-terraform-providers.yml) action.

View File

@ -10,7 +10,6 @@ Robert Hensing <robert@roberthensing.nl> <roberth@users.noreply.github.com>
Sandro Jäckel <sandro.jaeckel@gmail.com>
Sandro Jäckel <sandro.jaeckel@gmail.com> <sandro.jaeckel@sap.com>
superherointj <5861043+superherointj@users.noreply.github.com>
Tomodachi94 <tomodachi94@protonmail.com> Tomo <68489118+Tomodachi94@users.noreply.github.com>
Vladimír Čunát <v@cunat.cz> <vcunat@gmail.com>
Vladimír Čunát <v@cunat.cz> <vladimir.cunat@nic.cz>
Yifei Sun <ysun@hey.com> StepBroBD <Hi@StepBroBD.com>

View File

@ -321,7 +321,7 @@ All the review template samples provided in this section are generic and meant a
To get more information about how to review specific parts of Nixpkgs, refer to the documents linked to in the [overview section][overview].
If a pull request contains documentation changes that might require feedback from the documentation team, ping [@NixOS/documentation-team](https://github.com/orgs/nixos/teams/documentation-team) on the pull request.
If a pull request contains documentation changes that might require feedback from the documentation team, ping [@NixOS/documentation-reviewers](https://github.com/orgs/nixos/teams/documentation-reviewers) on the pull request.
If you consider having enough knowledge and experience in a topic and would like to be a long-term reviewer for related submissions, please contact the current reviewers for that topic. They will give you information about the reviewing process. The main reviewers for a topic can be hard to find as there is no list, but checking past pull requests to see who reviewed or git-blaming the code to see who committed to that topic can give some hints.
@ -330,14 +330,7 @@ Container system, boot system and library changes are some examples of the pull
## How to merge pull requests
[pr-merge]: #how-to-merge-pull-requests
To streamline automated updates, leverage the nixpkgs-merge-bot by simply commenting `@NixOS/nixpkgs-merge-bot merge`. The bot will verify if the following conditions are met, refusing to merge otherwise:
- the commenter that issued the command should be among the package maintainers;
- the package should reside in `pkgs/by-name`.
Further, nixpkgs-merge-bot will ensure all ofBorg checks (except the Darwin-related ones) are successfully completed before merging the pull request. Should the checks still be underway, the bot patiently waits for ofBorg to finish before attempting the merge again.
For other pull requests, the *Nixpkgs committers* are people who have been given
The *Nixpkgs committers* are people who have been given
permission to merge.
It is possible for community members that have enough knowledge and experience on a special topic to contribute by merging pull requests.
@ -366,7 +359,7 @@ See [Nix Channel Status](https://status.nixos.org/) for the current channels and
Here's a brief overview of the main Git branches and what channels they're used for:
- `master`: The main branch, used for the unstable channels such as `nixpkgs-unstable`, `nixos-unstable` and `nixos-unstable-small`.
- `release-YY.MM` (e.g. `release-24.05`): The NixOS release branches, used for the stable channels such as `nixos-24.05`, `nixos-24.05-small` and `nixpkgs-24.05-darwin`.
- `release-YY.MM` (e.g. `release-23.11`): The NixOS release branches, used for the stable channels such as `nixos-23.11`, `nixos-23.11-small` and `nixpkgs-23.11-darwin`.
When a channel is updated, a corresponding Git branch is also updated to point to the corresponding commit.
So e.g. the [`nixpkgs-unstable` branch](https://github.com/nixos/nixpkgs/tree/nixpkgs-unstable) corresponds to the Git commit from the [`nixpkgs-unstable` channel](https://channels.nixos.org/nixpkgs-unstable).
@ -519,7 +512,6 @@ To get a sense for what changes are considered mass rebuilds, see [previously me
- Check for unnecessary whitespace with `git diff --check` before committing.
- If you have commits `pkg-name: oh, forgot to insert whitespace`: squash commits in this case. Use `git rebase -i`.
See [Squashing Commits](https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History#_squashing) for additional information.
- For consistency, there should not be a period at the end of the commit message's summary line (the first line of the commit message).

View File

@ -52,9 +52,9 @@ Nixpkgs and NixOS are built and tested by our continuous integration
system, [Hydra](https://hydra.nixos.org/).
* [Continuous package builds for unstable/master](https://hydra.nixos.org/jobset/nixos/trunk-combined)
* [Continuous package builds for the NixOS 24.05 release](https://hydra.nixos.org/jobset/nixos/release-24.05)
* [Continuous package builds for the NixOS 23.11 release](https://hydra.nixos.org/jobset/nixos/release-23.11)
* [Tests for unstable/master](https://hydra.nixos.org/job/nixos/trunk-combined/tested#tabs-constituents)
* [Tests for the NixOS 24.05 release](https://hydra.nixos.org/job/nixos/release-24.05/tested#tabs-constituents)
* [Tests for the NixOS 23.11 release](https://hydra.nixos.org/job/nixos/release-23.11/tested#tabs-constituents)
Artifacts successfully built with Hydra are published to cache at
https://cache.nixos.org/. When successful build and test criteria are

View File

@ -62,7 +62,7 @@ Allow linking arbitrary place in the text (e.g. individual list items, sentences
They are defined using a hybrid of the link syntax with the attributes syntax known from headings, called [bracketed spans](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/bracketed_spans.md):
```markdown
- []{#ssec-gnome-hooks-glib} `glib` setup hook will populate `GSETTINGS_SCHEMAS_PATH` and then `wrapGApps*` hook will prepend it to `XDG_DATA_DIRS`.
- []{#ssec-gnome-hooks-glib} `glib` setup hook will populate `GSETTINGS_SCHEMAS_PATH` and then `wrapGAppsHook` will prepend it to `XDG_DATA_DIRS`.
```
#### Automatic links
@ -345,4 +345,4 @@ Typographic replacements are enabled. Check the [list of possible replacement pa
## Getting help
If you need documentation-specific help or reviews, ping [@NixOS/documentation-team](https://github.com/orgs/nixos/teams/documentation-team) on your pull request.
If you need documentation-specific help or reviews, ping [@NixOS/documentation-reviewers](https://github.com/orgs/nixos/teams/documentation-reviewers) on your pull request.

View File

@ -365,8 +365,8 @@ If `pname` and `version` are specified, `fetchurl` will use those values and wil
_Default value:_ `{}`.
`passthru` (Attribute Set; _optional_)
: Specifies any extra [`passthru`](#chap-passthru) attributes for the derivation returned by `fetchurl`.
Note that `fetchurl` defines [`passthru` attributes of its own](#ssec-pkgs-fetchers-fetchurl-passthru-outputs).
: Specifies any extra [passthru](#var-stdenv-passthru) attributes for the derivation returned by `fetchurl`.
Note that `fetchurl` defines [passthru attributes of its own](#ssec-pkgs-fetchers-fetchurl-passthru-outputs).
Attributes specified in `passthru` can override the default attributes returned by `fetchurl`.
_Default value:_ `{}`.
@ -387,7 +387,7 @@ If `pname` and `version` are specified, `fetchurl` will use those values and wil
### Passthru outputs {#ssec-pkgs-fetchers-fetchurl-passthru-outputs}
`fetchurl` also defines its own [`passthru`](#chap-passthru) attributes:
`fetchurl` also defines its own [`passthru`](#var-stdenv-passthru) attributes:
`url` (String)

View File

@ -191,7 +191,7 @@ Similarly, if you encounter errors similar to `Error_Protocol ("certificate has
### Passthru outputs {#ssec-pkgs-dockerTools-buildImage-passthru-outputs}
`buildImage` defines a few [`passthru`](#chap-passthru) attributes:
`buildImage` defines a few [`passthru`](#var-stdenv-passthru) attributes:
`buildArgs` (Attribute Set)
@ -576,13 +576,13 @@ This allows the function to produce reproducible images.
`passthru` (Attribute Set; _optional_)
: Use this to pass any attributes as [`passthru`](#chap-passthru) for the resulting derivation.
: Use this to pass any attributes as [passthru](#var-stdenv-passthru) for the resulting derivation.
_Default value:_ `{}`
### Passthru outputs {#ssec-pkgs-dockerTools-streamLayeredImage-passthru-outputs}
`streamLayeredImage` also defines its own [`passthru`](#chap-passthru) attributes:
`streamLayeredImage` also defines its own [`passthru`](#var-stdenv-passthru) attributes:
`imageTag` (String)

View File

@ -85,14 +85,14 @@ let
in
make-disk-image {
inherit pkgs lib;
inherit (evalConfig {
config = evalConfig {
modules = [
{
fileSystems."/" = { device = "/dev/vda"; fsType = "ext4"; autoFormat = true; };
boot.grub.device = "/dev/vda";
}
];
}) config;
};
format = "qcow2";
onlyNixStore = false;
partitionTableType = "legacy+gpt";
@ -104,3 +104,5 @@ in
memSize = 2048; # Qemu VM memory size in megabytes. Defaults to 1024M.
}
```

View File

@ -6,11 +6,7 @@ It uses Linux' namespaces feature to create temporary lightweight environments w
Accepted arguments are:
- `name`
The name of the environment, and the wrapper executable if `pname` is unset.
- `pname`
The pname of the environment and the wrapper executable.
- `version`
The version of the environment.
The name of the environment and the wrapper executable.
- `targetPkgs`
Packages to be installed for the main host's architecture (i.e. x86_64 on x86_64 installations). Along with libraries binaries are also installed.
- `multiPkgs`
@ -57,4 +53,4 @@ You can create a simple environment using a `shell.nix` like this:
Running `nix-shell` on it would drop you into a shell inside an FHS env where those libraries and binaries are available in FHS-compliant paths. Applications that expect an FHS structure (i.e. proprietary binaries) can run inside this environment without modification.
You can build a wrapper by running your binary in `runScript`, e.g. `./bin/start.sh`. Relative paths work as expected.
Additionally, the FHS builder links all relocated gsettings-schemas (the glib setup-hook moves them to `share/gsettings-schemas/${name}/glib-2.0/schemas`) to their standard FHS location. This means you don't need to wrap binaries with `wrapGApps*` hook.
Additionally, the FHS builder links all relocated gsettings-schemas (the glib setup-hook moves them to `share/gsettings-schemas/${name}/glib-2.0/schemas`) to their standard FHS location. This means you don't need to wrap binaries with `wrapGAppsHook`.

View File

@ -40,82 +40,6 @@ If the `moduleNames` argument is omitted, `hasPkgConfigModules` will use `meta.p
:::
## `lycheeLinkCheck` {#tester-lycheeLinkCheck}
Check a packaged static site's links with the [`lychee` package](https://search.nixos.org/packages?show=lychee&type=packages&query=lychee).
You may use Nix to reproducibly build static websites, such as for software documentation.
Some packages will install documentation in their `out` or `doc` outputs, or maybe you have dedicated package where you've made your static site reproducible by running a generator, such as [Hugo](https://gohugo.io/) or [mdBook](https://rust-lang.github.io/mdBook/), in a derivation.
If you have a static site that can be built with Nix, you can use `lycheeLinkCheck` to check that the hyperlinks in your site are correct, and do so as part of your Nix workflow and CI.
:::{.example #ex-lycheelinkcheck}
# Check hyperlinks in the `nix` documentation
```nix
testers.lycheeLinkCheck {
site = nix.doc + "/share/doc/nix/manual";
}
```
:::
### Return value {#tester-lycheeLinkCheck-return}
This tester produces a package that does not produce useful outputs, but only succeeds if the hyperlinks in your site are correct. The build log will list the broken links.
It has two modes:
- Build the returned derivation; its build process will check that internal hyperlinks are correct. This runs in the sandbox, so it will not check external hyperlinks, but it is quick and reliable.
- Invoke the `.online` attribute with [`nix run`](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-run) ([experimental](https://nixos.org/manual/nix/stable/contributing/experimental-features#xp-feature-nix-command)). This runs outside the sandbox, and checks that both internal and external hyperlinks are correct.
Example:
```shell
nix run nixpkgs#lychee.tests.ok.online
```
### Inputs {#tester-lycheeLinkCheck-inputs}
`site` (path or derivation) {#tester-lycheeLinkCheck-param-site}
: The path to the files to check.
`remap` (attribe set, optional) {#tester-lycheeLinkCheck-param-remap}
: An attribute set where the attribute names are regular expressions.
The values should be strings, derivations, or path values.
In the returned check's default configuration, external URLs are only checked when you run the `.online` attribute.
By adding remappings, you can check offline that URLs to external resources are correct, by providing a stand-in from the file system.
Before checking the existence of a URL, the regular expressions are matched and replaced by their corresponding values.
Example:
```nix
{
"https://nix\\.dev/manual/nix/[a-z0-9.-]*" = "${nix.doc}/share/doc/nix/manual";
"https://nixos\\.org/manual/nix/(un)?stable" = "${emptyDirectory}/placeholder-to-disallow-old-nix-docs-urls";
}
```
Store paths in the attribute values are automatically prefixed with `file://`, because lychee requires this for paths in the file system.
If this is a problem, or if you need to control the order in which replacements are performed, use `extraConfig.remap` instead.
`extraConfig` (attribute set) {#tester-lycheeLinkCheck-param-extraConfig}
: Extra configuration to pass to `lychee` in its [configuration file](https://github.com/lycheeverse/lychee/blob/master/lychee.example.toml).
It is automatically [translated](https://nixos.org/manual/nixos/stable/index.html#sec-settings-nix-representable) to TOML.
Example: `{ "include_verbatim" = true; }`
`lychee` (derivation, optional) {#tester-lycheeLinkCheck-param-lychee}
: The `lychee` package to use.
## `testVersion` {#tester-testVersion}
Checks that the output from running a command contains the specified version string in it as a whole word.
@ -205,7 +129,7 @@ runCommand "example" {
:::
## `testEqualContents` {#tester-testEqualContents}
## `testEqualContents` {#tester-equalContents}
Check that two paths have the same contents.

View File

@ -105,21 +105,13 @@ in pkgs.stdenv.mkDerivation {
ln -s ${optionsDoc.optionsJSON}/share/doc/nixos/options.json ./config-options.json
'';
buildPhase = let
pythonInterpreterTable = pkgs.callPackage ./doc-support/python-interpreter-table.nix {};
pythonSection = with lib.strings; replaceStrings
[ "@python-interpreter-table@" ]
[ pythonInterpreterTable ]
(readFile ./languages-frameworks/python.section.md);
in ''
cp ${builtins.toFile "python.section.md" pythonSection} ./languages-frameworks/python.section.md
buildPhase = ''
cat \
./functions/library.md.in \
${lib-docs}/index.md \
> ./functions/library.md
substitute ./manual.md.in ./manual.md \
--replace-fail '@MANUAL_VERSION@' '${pkgs.lib.version}'
--replace '@MANUAL_VERSION@' '${pkgs.lib.version}'
mkdir -p out/media

View File

@ -1,63 +0,0 @@
# For debugging, run in this directory:
# nix eval --impure --raw --expr 'import ./python-interpreter-table.nix {}'
{ pkgs ? (import ../.. { config = { }; overlays = []; }) }:
let
lib = pkgs.lib;
inherit (lib.attrsets) attrNames filterAttrs;
inherit (lib.lists) elem filter map naturalSort reverseList;
inherit (lib.strings) concatStringsSep;
isPythonInterpreter = name:
/* NB: Package names that don't follow the regular expression:
- `python-cosmopolitan` is not part of `pkgs.pythonInterpreters`.
- `_prebuilt` interpreters are used for bootstrapping internally.
- `python3Minimal` contains python packages, left behind conservatively.
- `rustpython` lacks `pythonVersion` and `implementation`.
*/
(lib.strings.match "(pypy|python)([[:digit:]]*)" name) != null;
interpreterName = pname:
let
cuteName = {
cpython = "CPython";
pypy = "PyPy";
};
interpreter = pkgs.${pname};
in
"${cuteName.${interpreter.implementation}} ${interpreter.pythonVersion}";
interpreters = reverseList (naturalSort (
filter isPythonInterpreter (attrNames pkgs.pythonInterpreters)
));
aliases = pname:
attrNames (
filterAttrs (name: value:
isPythonInterpreter name
&& name != pname
&& interpreterName name == interpreterName pname
) pkgs
);
result = map (pname: {
inherit pname;
aliases = aliases pname;
interpreter = interpreterName pname;
}) interpreters;
toMarkdown = data:
let
line = package: ''
| ${package.pname} | ${join ", " package.aliases or [ ]} | ${package.interpreter} |
'';
in
join "" (map line data);
join = lib.strings.concatStringsSep;
in
''
| Package | Aliases | Interpeter |
|---------|---------|------------|
${toMarkdown result}
''

View File

@ -20,6 +20,10 @@ If `wafPath` doesn't exist, then `wafHook` will copy the `waf` provided from Nix
Controls the flags passed to waf tool during build and install phases. For settings specific to build or install phases, use `wafBuildFlags` or `wafInstallFlags` respectively.
#### `dontAddWafCrossFlags` {#dont-add-waf-cross-flags}
When set to `true`, don't add cross compilation flags during configure phase.
#### `dontUseWafConfigure` {#dont-use-waf-configure}
When set to true, don't use the predefined `wafConfigurePhase`.

View File

@ -9,14 +9,14 @@ In Nixpkgs, `zig.hook` overrides the default build, check and install phases.
```nix
{ lib
, stdenv
, zig
, zig_0_11
}:
stdenv.mkDerivation {
# . . .
nativeBuildInputs = [
zig.hook
zig_0_11.hook
];
zigBuildFlags = [ "-Dman-pages=true" ];

View File

@ -148,4 +148,4 @@ All new projects should use the CUDA redistributables available in [`cudaPackage
| Find libraries | `configurePhase` | Missing dependency on a `dev` output | Add the missing dependency | The `dev` output typically contain CMake configuration files |
| Find libraries | `buildPhase` or `patchelf` | Missing dependency on a `lib` or `static` output | Add the missing dependency | The `lib` or `static` output typically contain the libraries |
In the scenario you are unable to run the resulting binary: this is arguably the most complicated as it could be any combination of the previous reasons. This type of failure typically occurs when a library attempts to load or open a library it depends on that it does not declare in its `DT_NEEDED` section. As a first step, ensure that dependencies are patched with [`autoAddDriverRunpath`](https://search.nixos.org/packages?channel=unstable&type=packages&query=autoAddDriverRunpath). Failing that, try running the application with [`nixGL`](https://github.com/guibou/nixGL) or a similar wrapper tool. If that works, it likely means that the application is attempting to load a library that is not in the `RPATH` or `RUNPATH` of the binary.
In the scenario you are unable to run the resulting binary: this is arguably the most complicated as it could be any combination of the previous reasons. This type of failure typically occurs when a library attempts to load or open a library it depends on that it does not declare in its `DT_NEEDED` section. As a first step, ensure that dependencies are patched with [`cudaPackages.autoAddDriverRunpath`](https://search.nixos.org/packages?channel=unstable&type=packages&query=cudaPackages.autoAddDriverRunpath). Failing that, try running the application with [`nixGL`](https://github.com/guibou/nixGL) or a similar wrapper tool. If that works, it likely means that the application is attempting to load a library that is not in the `RPATH` or `RUNPATH` of the binary.

View File

@ -98,12 +98,10 @@ The function `buildFlutterApplication` builds Flutter applications.
See the [Dart documentation](#ssec-dart-applications) for more details on required files and arguments.
`flutter` in Nixpkgs always points to `flutterPackages.stable`, which is the latest packaged version. To avoid unforeseen breakage during upgrade, packages in Nixpkgs should use a specific flutter version, such as `flutter319` and `flutter322`, instead of using `flutter` directly.
```nix
{ flutter, fetchFromGitHub }:
flutter322.buildFlutterApplication {
flutter.buildFlutterApplication {
pname = "firmware-updater";
version = "0-unstable-2023-04-30";

View File

@ -117,6 +117,7 @@ For more detail about managing the `deps.nix` file, see [Generating and updating
* `useDotnetFromEnv` will change the binary wrapper so that it uses the .NET from the environment. The runtime specified by `dotnet-runtime` is given as a fallback in case no .NET is installed in the user's environment. This is most useful for .NET global tools and LSP servers, which often extend the .NET CLI and their runtime should match the users' .NET runtime.
* `dotnet-sdk` is useful in cases where you need to change what dotnet SDK is being used. You can also set this to the result of `dotnetSdkPackages.combinePackages`, if the project uses multiple SDKs to build.
* `dotnet-runtime` is useful in cases where you need to change what dotnet runtime is being used. This can be either a regular dotnet runtime, or an aspnetcore.
* `dotnet-test-sdk` is useful in cases where unit tests expect a different dotnet SDK. By default, this is set to the `dotnet-sdk` attribute.
* `testProjectFile` is useful in cases where the regular project file does not contain the unit tests. It gets restored and build, but not installed. You may need to regenerate your nuget lockfile after setting this. Note that if set, only tests from this project are executed.
* `disabledTests` is used to disable running specific unit tests. This gets passed as: `dotnet test --filter "FullyQualifiedName!={}"`, to ensure compatibility with all unit test frameworks.
* `dotnetRestoreFlags` can be used to pass flags to `dotnet restore`.

View File

@ -8,7 +8,7 @@ Programs in the GNOME universe are written in various languages but they all use
[GSettings](https://developer.gnome.org/gio/stable/GSettings.html) API is often used for storing settings. GSettings schemas are required, to know the type and other metadata of the stored values. GLib looks for `glib-2.0/schemas/gschemas.compiled` files inside the directories of `XDG_DATA_DIRS`.
On Linux, GSettings API is implemented using [dconf](https://gitlab.gnome.org/GNOME/dconf) backend. You will need to add `dconf` [GIO module](#ssec-gnome-gio-modules) to `GIO_EXTRA_MODULES` variable, otherwise the `memory` backend will be used and the saved settings will not be persistent.
On Linux, GSettings API is implemented using [dconf](https://wiki.gnome.org/Projects/dconf) backend. You will need to add `dconf` [GIO module](#ssec-gnome-gio-modules) to `GIO_EXTRA_MODULES` variable, otherwise the `memory` backend will be used and the saved settings will not be persistent.
Last you will need the dconf database D-Bus service itself. You can enable it using `programs.dconf.enable`.
@ -76,13 +76,13 @@ Previously, a GTK theme needed to be in `XDG_DATA_DIRS`. This is no longer neces
### GObject introspection typelibs {#ssec-gnome-typelibs}
[GObject introspection](https://gitlab.gnome.org/GNOME/gobject-introspection) allows applications to use C libraries in other languages easily. It does this through `typelib` files searched in `GI_TYPELIB_PATH`.
[GObject introspection](https://wiki.gnome.org/Projects/GObjectIntrospection) allows applications to use C libraries in other languages easily. It does this through `typelib` files searched in `GI_TYPELIB_PATH`.
### Various plug-ins {#ssec-gnome-plugins}
If your application uses [GStreamer](https://gstreamer.freedesktop.org/) or [Grilo](https://gitlab.gnome.org/GNOME/grilo), you should set `GST_PLUGIN_SYSTEM_PATH_1_0` and `GRL_PLUGIN_PATH`, respectively.
If your application uses [GStreamer](https://gstreamer.freedesktop.org/) or [Grilo](https://wiki.gnome.org/Projects/Grilo), you should set `GST_PLUGIN_SYSTEM_PATH_1_0` and `GRL_PLUGIN_PATH`, respectively.
## Onto `wrapGApps*` hooks {#ssec-gnome-hooks}
## Onto `wrapGAppsHook` {#ssec-gnome-hooks}
Given the requirements above, the package expression would become messy quickly:
@ -102,29 +102,27 @@ Given the requirements above, the package expression would become messy quickly:
}
```
Fortunately, we have a [family of hooks]{#ssec-gnome-hooks-wrapgappshook} that automate this. They work in conjunction with other setup hooks that populate environment variables, and will then wrap all executables in `bin` and `libexec` directories using said variables.
Fortunately, there is [`wrapGAppsHook`]{#ssec-gnome-hooks-wrapgappshook}. It works in conjunction with other setup hooks that populate environment variables, and it will then wrap all executables in `bin` and `libexec` directories using said variables. For convenience, it also adds `dconf.lib` for a GIO module implementing a GSettings backend using `dconf`, `gtk3` for GSettings schemas, and `librsvg` for GdkPixbuf loader to the closure.
- [`wrapGAppsHook3`]{#ssec-gnome-hooks-wrapgappshook3} for GTK 3 apps. For convenience, it also adds `dconf.lib` for a GIO module implementing a GSettings backend using `dconf`, `gtk3` for GSettings schemas, and `librsvg` for GdkPixbuf loader to the closure.
- [`wrapGAppsHook4`]{#ssec-gnome-hooks-wrapgappshook4} for GTK 4 apps. Same as `wrapGAppsHook3` but replaces `gtk3` with `gtk4`.
- [`wrapGAppsNoGuiHook`]{#ssec-gnome-hooks-wrapgappsnoguihook} for programs without a graphical interface. Same as the above but does not bring `gtk3` and `librsvg` into the closure.
There is also [`wrapGAppsHook4`]{#ssec-gnome-hooks-wrapgappshook4}, which replaces GTK 3 with GTK 4. Instead of `wrapGAppsHook`, this should be used for all GTK4 applications.
The hooks do the the following:
In case you are packaging a program without a graphical interface, you might want to use [`wrapGAppsNoGuiHook`]{#ssec-gnome-hooks-wrapgappsnoguihook}, which runs the same script as `wrapGAppsHook` but does not bring `gtk3` and `librsvg` into the closure.
- `wrapGApps*` hook itself will add the packages `share` directory to `XDG_DATA_DIRS`.
- `wrapGAppsHook` itself will add the packages `share` directory to `XDG_DATA_DIRS`.
- []{#ssec-gnome-hooks-glib} `glib` setup hook will populate `GSETTINGS_SCHEMAS_PATH` and then `wrapGApps*` hook will prepend it to `XDG_DATA_DIRS`.
- []{#ssec-gnome-hooks-glib} `glib` setup hook will populate `GSETTINGS_SCHEMAS_PATH` and then `wrapGAppsHook` will prepend it to `XDG_DATA_DIRS`.
- []{#ssec-gnome-hooks-gdk-pixbuf} `gdk-pixbuf` setup hook will populate `GDK_PIXBUF_MODULE_FILE` with the path to biggest `loaders.cache` file from the dependencies containing [GdkPixbuf loaders](#ssec-gnome-gdk-pixbuf-loaders). This works fine when there are only two packages containing loaders (`gdk-pixbuf` and e.g. `librsvg`) it will choose the second one, reasonably expecting that it will be bigger since it describes extra loader in addition to the default ones. But when there are more than two loader packages, this logic will break. One possible solution would be constructing a custom cache file for each package containing a program like `services/x11/gdk-pixbuf.nix` NixOS module does. `wrapGApps*` hook copies the `GDK_PIXBUF_MODULE_FILE` environment variable into the produced wrapper.
- []{#ssec-gnome-hooks-gdk-pixbuf} `gdk-pixbuf` setup hook will populate `GDK_PIXBUF_MODULE_FILE` with the path to biggest `loaders.cache` file from the dependencies containing [GdkPixbuf loaders](#ssec-gnome-gdk-pixbuf-loaders). This works fine when there are only two packages containing loaders (`gdk-pixbuf` and e.g. `librsvg`) it will choose the second one, reasonably expecting that it will be bigger since it describes extra loader in addition to the default ones. But when there are more than two loader packages, this logic will break. One possible solution would be constructing a custom cache file for each package containing a program like `services/x11/gdk-pixbuf.nix` NixOS module does. `wrapGAppsHook` copies the `GDK_PIXBUF_MODULE_FILE` environment variable into the produced wrapper.
- []{#ssec-gnome-hooks-gtk-drop-icon-theme-cache} One of `gtk3`s setup hooks will remove `icon-theme.cache` files from packages icon theme directories to avoid conflicts. Icon theme packages should prevent this with `dontDropIconThemeCache = true;`.
- []{#ssec-gnome-hooks-dconf} `dconf.lib` is a dependency of `wrapGApps*` hook, which then also adds it to the `GIO_EXTRA_MODULES` variable.
- []{#ssec-gnome-hooks-dconf} `dconf.lib` is a dependency of `wrapGAppsHook`, which then also adds it to the `GIO_EXTRA_MODULES` variable.
- []{#ssec-gnome-hooks-hicolor-icon-theme} `hicolor-icon-theme`s setup hook will add icon themes to `XDG_ICON_DIRS`.
- []{#ssec-gnome-hooks-gobject-introspection} `gobject-introspection` setup hook populates `GI_TYPELIB_PATH` variable with `lib/girepository-1.0` directories of dependencies, which is then added to wrapper by `wrapGApps*` hook. It also adds `share` directories of dependencies to `XDG_DATA_DIRS`, which is intended to promote GIR files but it also [pollutes the closures](https://github.com/NixOS/nixpkgs/issues/32790) of packages using `wrapGApps*` hook.
- []{#ssec-gnome-hooks-gobject-introspection} `gobject-introspection` setup hook populates `GI_TYPELIB_PATH` variable with `lib/girepository-1.0` directories of dependencies, which is then added to wrapper by `wrapGAppsHook`. It also adds `share` directories of dependencies to `XDG_DATA_DIRS`, which is intended to promote GIR files but it also [pollutes the closures](https://github.com/NixOS/nixpkgs/issues/32790) of packages using `wrapGAppsHook`.
- []{#ssec-gnome-hooks-gst-grl-plugins} Setup hooks of `gst_all_1.gstreamer` and `grilo` will populate the `GST_PLUGIN_SYSTEM_PATH_1_0` and `GRL_PLUGIN_PATH` variables, respectively, which will then be added to the wrapper by `wrapGApps*` hook.
- []{#ssec-gnome-hooks-gst-grl-plugins} Setup hooks of `gst_all_1.gstreamer` and `grilo` will populate the `GST_PLUGIN_SYSTEM_PATH_1_0` and `GRL_PLUGIN_PATH` variables, respectively, which will then be added to the wrapper by `wrapGAppsHook`.
You can also pass additional arguments to `makeWrapper` using `gappsWrapperArgs` in `preFixup` hook:
@ -149,15 +147,15 @@ Most GNOME package offer [`updateScript`](#var-passthru-updateScript), it is the
### `GLib-GIO-ERROR **: 06:04:50.903: No GSettings schemas are installed on the system` {#ssec-gnome-common-issues-no-schemas}
There are no schemas available in `XDG_DATA_DIRS`. Temporarily add a random package containing schemas like `gsettings-desktop-schemas` to `buildInputs`. [`glib`](#ssec-gnome-hooks-glib) and [`wrapGApps*`](#ssec-gnome-hooks-wrapgappshook) setup hooks will take care of making the schemas available to application and you will see the actual missing schemas with the [next error](#ssec-gnome-common-issues-missing-schema). Or you can try looking through the source code for the actual schemas used.
There are no schemas available in `XDG_DATA_DIRS`. Temporarily add a random package containing schemas like `gsettings-desktop-schemas` to `buildInputs`. [`glib`](#ssec-gnome-hooks-glib) and [`wrapGAppsHook`](#ssec-gnome-hooks-wrapgappshook) setup hooks will take care of making the schemas available to application and you will see the actual missing schemas with the [next error](#ssec-gnome-common-issues-missing-schema). Or you can try looking through the source code for the actual schemas used.
### `GLib-GIO-ERROR **: 06:04:50.903: Settings schema org.gnome.foo is not installed` {#ssec-gnome-common-issues-missing-schema}
Package is missing some GSettings schemas. You can find out the package containing the schema with `nix-locate org.gnome.foo.gschema.xml` and let the hooks handle the wrapping as [above](#ssec-gnome-common-issues-no-schemas).
### When using `wrapGApps*` hook with special derivers you can end up with double wrapped binaries. {#ssec-gnome-common-issues-double-wrapped}
### When using `wrapGAppsHook` with special derivers you can end up with double wrapped binaries. {#ssec-gnome-common-issues-double-wrapped}
This is because derivers like `python.pkgs.buildPythonApplication` or `qt5.mkDerivation` have setup-hooks automatically added that produce wrappers with makeWrapper. The simplest way to workaround that is to disable the `wrapGApps*` hook automatic wrapping with `dontWrapGApps = true;` and pass the arguments it intended to pass to makeWrapper to another.
This is because derivers like `python.pkgs.buildPythonApplication` or `qt5.mkDerivation` have setup-hooks automatically added that produce wrappers with makeWrapper. The simplest way to workaround that is to disable the `wrapGAppsHook` automatic wrapping with `dontWrapGApps = true;` and pass the arguments it intended to pass to makeWrapper to another.
In the case of a Python application it could look like:
@ -167,7 +165,7 @@ python3.pkgs.buildPythonApplication {
version = "3.32.2";
nativeBuildInputs = [
wrapGAppsHook3
wrapGAppsHook
gobject-introspection
# ...
];
@ -189,7 +187,7 @@ mkDerivation {
version = "3.47.0";
nativeBuildInputs = [
wrapGAppsHook3
wrapGAppsHook
qmake
# ...
];

View File

@ -2,7 +2,7 @@
## Building Go modules with `buildGoModule` {#ssec-language-go}
The function `buildGoModule` builds Go programs managed with Go modules. It builds [Go Modules](https://go.dev/wiki/Modules) through a two phase build:
The function `buildGoModule` builds Go programs managed with Go modules. It builds [Go Modules](https://github.com/golang/go/wiki/Modules) through a two phase build:
- An intermediate fetcher derivation called `goModules`. This derivation will be used to fetch all the dependencies of the Go module.
- A final derivation will use the output of the intermediate derivation to build the binaries and produce the final output.

View File

@ -1,53 +0,0 @@
# Hare {#sec-language-hare}
## Building Hare programs with `hareHook` {#ssec-language-hare}
The `hareHook` package sets up the environment for building Hare programs by
doing the following:
1. Setting the `HARECACHE`, `HAREPATH` and `NIX_HAREFLAGS` environment variables;
1. Propagating `harec`, `qbe` and two wrapper scripts for the hare binary.
It is not a function as is the case for some other languages --- *e. g.*, Go or
Rust ---, but a package to be added to `nativeBuildInputs`.
## Attributes of `hareHook` {#hareHook-attributes}
The following attributes are accepted by `hareHook`:
1. `hareBuildType`: Either `release` (default) or `debug`. It controls if the
`-R` flag is added to `NIX_HAREFLAGS`.
## Example for `hareHook` {#ex-hareHook}
```nix
{
hareHook,
lib,
stdenv,
}: stdenv.mkDerivation {
pname = "<name>";
version = "<version>";
src = "<src>";
nativeBuildInputs = [ hareHook ];
meta = {
description = "<description>";
inherit (hareHook) badPlatforms platforms;
};
}
```
## Cross Compilation {#hareHook-cross-compilation}
`hareHook` should handle cross compilation out of the box. This is the main
purpose of `NIX_HAREFLAGS`: In it, the `-a` flag is passed with the architecture
of the `hostPlatform`.
However, manual intervention may be needed when a binary compiled by the build
process must be run for the build to complete --- *e. g.*, when using Hare's
`hare` module for code generation.
In those cases, `hareHook` provides the `hare-native` script, which is a wrapper
around the hare binary for using the native (`buildPlatform`) toolchain.

View File

@ -230,7 +230,7 @@ completely incompatible with packages from `haskellPackages`.
Every haskell package set has its own haskell-aware `mkDerivation` which is used
to build its packages. Generally you won't have to interact with this builder
since [cabal2nix](#haskell-cabal2nix) can generate packages
since [cabal2nix][cabal2nix] can generate packages
using it for an arbitrary cabal package definition. Still it is useful to know
the parameters it takes when you need to
[override](#haskell-overriding-haskell-packages) a generated Nix expression.
@ -923,61 +923,14 @@ for this to work.
`justStaticExecutables drv`
: Only build and install the executables produced by `drv`, removing everything
that may refer to other Haskell packages' store paths (like libraries and
documentation). This dramatically reduces the closure size of the resulting
derivation. Note that the executables are only statically linked against their
Haskell dependencies, but will still link dynamically against libc, GMP and
other system library dependencies.
If a library or its dependencies use their Cabal-generated
`Paths_*` module, this may not work as well if GHC's dead code elimination is
unable to remove the references to the dependency's store path that module
contains.
As a consequence, an unused reference may be created from the static binary to such a _library_ store path.
(See [nixpkgs#164630][164630] for more information.)
Importing the `Paths_*` module may cause builds to fail with this message:
```
error: output '/nix/store/64k8iw0ryz76qpijsnl9v87fb26v28z8-my-haskell-package-1.0.0.0' is not allowed to refer to the following paths:
/nix/store/5q5s4a07gaz50h04zpfbda8xjs8wrnhg-ghc-9.6.3
```
If that happens, first disable the check for GHC references and rebuild the
derivation:
```nix
pkgs.haskell.lib.overrideCabal
(pkgs.haskell.lib.justStaticExecutables my-haskell-package)
(drv: {
disallowGhcReference = false;
})
```
Then use `strings` to determine which libraries are responsible:
```
$ nix-build ...
$ strings result/bin/my-haskell-binary | grep /nix/store/
...
/nix/store/n7ciwdlg8yyxdhbrgd6yc2d8ypnwpmgq-hs-opentelemetry-sdk-0.0.3.6/bin
...
```
Finally, use `remove-references-to` to delete those store paths from the produced output:
```nix
pkgs.haskell.lib.overrideCabal
(pkgs.haskell.lib.justStaticExecutables my-haskell-package)
(drv: {
postInstall = ''
${drv.postInstall or ""}
remove-references-to -t ${pkgs.haskellPackages.hs-opentelemetry-sdk}
'';
})
```
[164630]: https://github.com/NixOS/nixpkgs/issues/164630
that may refer to other Haskell packages' store paths (like libraries and
documentation). This dramatically reduces the closure size of the resulting
derivation. Note that the executables are only statically linked against their
Haskell dependencies, but will still link dynamically against libc, GMP and
other system library dependencies. If dependencies use their Cabal-generated
`Paths_*` module, this may not work as well if GHC's dead code elimination
is unable to remove the references to the dependency's store path that module
contains.
`enableSeparateBinOutput drv`
: Install executables produced by `drv` to a separate `bin` output. This
@ -1170,75 +1123,18 @@ for [this to work][optparse-applicative-completions].
Note that this feature is automatically disabled when cross-compiling, since it
requires executing the binaries in question.
## Import-from-Derivation helpers {#haskell-import-from-derivation}
### cabal2nix {#haskell-cabal2nix}
[`cabal2nix`][cabal2nix] can generate Nix package definitions for arbitrary
Haskell packages using [import from derivation][import-from-derivation].
`cabal2nix` will generate Nix expressions that look like this:
```nix
# cabal get mtl-2.2.1 && cd mtl-2.2.1 && cabal2nix .
{ mkDerivation, base, lib, transformers }:
mkDerivation {
pname = "mtl";
version = "2.2.1";
src = ./.;
libraryHaskellDepends = [ base transformers ];
homepage = "http://github.com/ekmett/mtl";
description = "Monad classes, using functional dependencies";
license = lib.licenses.bsd3;
}
```
This expression should be called with `haskellPackages.callPackage`, which will
supply [`haskellPackages.mkDerivation`](#haskell-mkderivation) and the Haskell
dependencies as arguments.
`callCabal2nix name src args`
: Create a package named `name` from the source derivation `src` using
`cabal2nix`.
`args` are extra arguments provided to `haskellPackages.callPackage`.
`callCabal2nixWithOptions name src opts args`
: Create a package named `name` from the source derivation `src` using
`cabal2nix`.
`opts` are extra options for calling `cabal2nix`. If `opts` is a string, it
will be used as extra command line arguments for `cabal2nix`, e.g. `--subpath
path/to/dir/containing/cabal-file`. Otherwise, `opts` should be an AttrSet
which can contain the following attributes:
`extraCabal2nixOptions`
: Extra command line arguments for `cabal2nix`.
`srcModifier`
: A function which is used to modify the given `src` instead of the default
filter.
The default source filter will remove all files from `src` except for
`.cabal` files and `package.yaml` files.
<!--
`callHackage`
: TODO
`callHackageDirect`
: TODO
`developPackage`
: TODO
-->
<!--
TODO(@NixOS/haskell): finish these planned sections
### Overriding the entire package set
## Import-from-Derivation helpers
* `callCabal2nix`
* `callHackage`, `callHackageDirect`
* `developPackage`
## Contributing {#haskell-contributing}
### Fixing a broken package {#haskell-fixing-a-broken-package}
@ -1413,4 +1309,3 @@ relevant.
[profiling]: https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html
[search.nixos.org]: https://search.nixos.org
[turtle]: https://hackage.haskell.org/package/turtle
[import-from-derivation]: https://nixos.org/manual/nix/stable/language/import-from-derivation

View File

@ -19,7 +19,6 @@ dotnet.section.md
emscripten.section.md
gnome.section.md
go.section.md
hare.section.md
haskell.section.md
hy.section.md
idris.section.md

View File

@ -310,71 +310,6 @@ See `node2nix` [docs](https://github.com/svanderburg/node2nix) for more info.
- `node2nix` has some [bugs](https://github.com/svanderburg/node2nix/issues/238) related to working with lock files from npm distributed with `nodejs_16`.
- `node2nix` does not like missing packages from npm. If you see something like `Cannot resolve version: vue-loader-v16@undefined` then you might want to try another tool. The package might have been pulled off of npm.
### pnpm {#javascript-pnpm}
Pnpm is available as the top-level package `pnpm`. Additionally, there are variants pinned to certain major versions, like `pnpm_8` and `pnpm_9`, which support different sets of lock file versions.
When packaging an application that includes a `pnpm-lock.yaml`, you need to fetch the pnpm store for that project using a fixed-output-derivation. The functions `pnpm_8.fetchDeps` and `pnpm_9.fetchDeps` can create this pnpm store derivation. In conjunction, the setup hooks `pnpm_8.configHook` and `pnpm_9.configHook` will prepare the build environment to install the prefetched dependencies store. Here is an example for a package that contains a `package.json` and a `pnpm-lock.yaml` files using the above `pnpm_` attributes:
```nix
{
stdenv,
nodejs,
# This is pinned as { pnpm = pnpm_9; }
pnpm
}:
stdenv.mkDerivation (finalAttrs: {
pname = "foo";
version = "0-unstable-1980-01-01";
src = ...;
nativeBuildInputs = [
nodejs
pnpm.configHook
];
pnpmDeps = pnpm.fetchDeps {
inherit (finalAttrs) pname version src;
hash = "...";
};
})
```
NOTE: It is highly recommended to use a pinned version of pnpm (i.e. `pnpm_8` or `pnpm_9`), to increase future reproducibility. It might also be required to use an older version, if the package needs support for a certain lock file version.
In case you are patching `package.json` or `pnpm-lock.yaml`, make sure to pass `finalAttrs.patches` to the function as well (i.e. `inherit (finalAttrs) patches`.
#### Dealing with `sourceRoot` {#javascript-pnpm-sourceRoot}
NOTE: Nixpkgs pnpm tooling doesn't support building projects with a `pnpm-workspace.yaml`, or building monorepos. It maybe possible to use `pnpm.fetchDeps` for these projects, but it may be hard or impossible to produce a binary from such projects ([an example attempt](https://github.com/NixOS/nixpkgs/pull/290715#issuecomment-2144543728)).
If the pnpm project is in a subdirectory, you can just define `sourceRoot` or `setSourceRoot` for `fetchDeps`. Note, that projects using `pnpm-workspace.yaml` are currently not supported, and will probably not work using this approach.
If `sourceRoot` is different between the parent derivation and `fetchDeps`, you will have to set `pnpmRoot` to effectively be the same location as it is in `fetchDeps`.
Assuming the following directory structure, we can define `sourceRoot` and `pnpmRoot` as follows:
```
.
├── frontend
│   ├── ...
│   ├── package.json
│   └── pnpm-lock.yaml
└── ...
```
```nix
...
pnpmDeps = pnpm.fetchDeps {
...
sourceRoot = "${finalAttrs.src.name}/frontend";
};
# by default the working directory is the extracted source
pnpmRoot = "frontend";
```
### yarn2nix {#javascript-yarn2nix}
#### Preparation {#javascript-yarn2nix-preparation}

View File

@ -7,7 +7,7 @@ The following example shows a Nim program that depends only on Nim libraries:
```nix
{ lib, buildNimPackage, fetchFromGitHub }:
buildNimPackage (finalAttrs: {
buildNimPackage { } (finalAttrs: {
pname = "ttop";
version = "1.2.7";
@ -80,6 +80,7 @@ For example, to propagate a dependency on SDL2 for lockfiles that select the Nim
/* … */
sdl2 =
lockAttrs:
finalAttrs:
{ buildInputs ? [ ], ... }:
{
buildInputs = buildInputs ++ [ SDL2 ];
@ -88,8 +89,9 @@ For example, to propagate a dependency on SDL2 for lockfiles that select the Nim
}
```
The annotations in the `nim-overrides.nix` set are functions that take two arguments and return a new attrset to be overlayed on the package being built.
The annotations in the `nim-overrides.nix` set are functions that take three arguments and return a new attrset to be overlayed on the package being built.
- lockAttrs: the attrset for this library from within a lockfile. This can be used to implement library version constraints, such as marking libraries as broken or insecure.
- finalAttrs: the final attrset passed by `buildNimPackage` to `stdenv.mkDerivation`.
- prevAttrs: the attrset produced by initial arguments to `buildNimPackage` and any preceding lockfile overlays.
### Overriding an Nim library override {#nim-lock-overrides-overrides}

View File

@ -7,11 +7,10 @@ Nixpkgs provides a couple of facilities for working with this tool.
## Writing packages providing pkg-config modules {#pkg-config-writing-packages}
Packages should set `meta.pkgConfigModules` with the list of package config modules they provide.
They should also use `testers.hasPkgConfigModules` to check that the final built package matches that list,
and optionally check that the pkgconf modules' version metadata matches the derivation's.
They should also use `testers.testMetaPkgConfig` to check that the final built package matches that list.
Additionally, the [`validatePkgConfig` setup hook](https://nixos.org/manual/nixpkgs/stable/#validatepkgconfig), will do extra checks on to-be-installed pkg-config modules.
A good example of all these things is miniz:
A good example of all these things is zlib:
```nix
{ pkg-config, testers, ... }:
@ -21,14 +20,11 @@ stdenv.mkDerivation (finalAttrs: {
nativeBuildInputs = [ pkg-config validatePkgConfig ];
passthru.tests.pkg-config = testers.hasPkgConfigModules {
package = finalAttrs.finalPackage;
versionCheck = true;
};
passthru.tests.pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;
meta = {
/* ... */
pkgConfigModules = [ "miniz" ];
pkgConfigModules = [ "zlib" ];
};
})
```

View File

@ -4,7 +4,16 @@
### Interpreters {#interpreters}
@python-interpreter-table@
| Package | Aliases | Interpreter |
|------------|-----------------|-------------|
| python27 | python2, python | CPython 2.7 |
| python39 | | CPython 3.9 |
| python310 | | CPython 3.10 |
| python311 | python3 | CPython 3.11 |
| python312 | | CPython 3.12 |
| python313 | | CPython 3.13 |
| pypy27 | pypy2, pypy | PyPy2.7 |
| pypy39 | pypy3 | PyPy 3.9 |
The Nix expressions for the interpreters can be found in
`pkgs/development/interpreters/python`.
@ -31,8 +40,8 @@ Each interpreter has the following attributes:
### Building packages and applications {#building-packages-and-applications}
Python libraries and applications that use tools to follow PEP 517 (e.g. `setuptools` or `hatchling`, etc.) or
previous tools such as `distutils` are typically built with respectively the [`buildPythonPackage`](#buildpythonpackage-function) and
Python libraries and applications that use `setuptools` or
`distutils` are typically built with respectively the [`buildPythonPackage`](#buildpythonpackage-function) and
[`buildPythonApplication`](#buildpythonapplication-function) functions. These two functions also support installing a `wheel`.
All Python packages reside in `pkgs/top-level/python-packages.nix` and all
@ -78,7 +87,6 @@ The following is an example:
, fetchPypi
# build-system
, setuptools
, setuptools-scm
# dependencies
@ -108,7 +116,6 @@ buildPythonPackage rec {
'';
build-system = [
setuptools
setuptools-scm
];
@ -136,13 +143,13 @@ buildPythonPackage rec {
The `buildPythonPackage` mainly does four things:
* In the [`buildPhase`](#build-phase), it calls `${python.pythonOnBuildForHost.interpreter} -m build --wheel` to
* In the [`buildPhase`](#build-phase), it calls `${python.pythonOnBuildForHost.interpreter} setup.py bdist_wheel` to
build a wheel binary zipfile.
* In the [`installPhase`](#ssec-install-phase), it installs the wheel file using `${python.pythonOnBuildForHost.interpreter} -m installer *.whl`.
* In the [`installPhase`](#ssec-install-phase), it installs the wheel file using `pip install *.whl`.
* In the [`postFixup`](#var-stdenv-postFixup) phase, the `wrapPythonPrograms` bash function is called to
wrap all programs in the `$out/bin/*` directory to include `$PATH`
environment variable and add dependent libraries to script's `sys.path`.
* In the [`installCheck`](#ssec-installCheck-phase) phase, `${python.interpreter} -m pytest` is run.
* In the [`installCheck`](#ssec-installCheck-phase) phase, `${python.interpreter} setup.py test` is run.
By default tests are run because [`doCheck = true`](#var-stdenv-doCheck). Test dependencies, like
e.g. the test runner, should be added to [`nativeCheckInputs`](#var-stdenv-nativeCheckInputs).
@ -179,6 +186,10 @@ following are specific to `buildPythonPackage`:
`makeWrapperArgs = ["--set FOO BAR" "--set BAZ QUX"]`.
* `namePrefix`: Prepends text to `${name}` parameter. In case of libraries, this
defaults to `"python3.8-"` for Python 3.8, etc., and in case of applications to `""`.
* `pipInstallFlags ? []`: A list of strings. Arguments to be passed to `pip
install`. To pass options to `python setup.py install`, use
`--install-option`. E.g., `pipInstallFlags=["--install-option='--cpp_implementation'"]`.
* `pipBuildFlags ? []`: A list of strings. Arguments to be passed to `pip wheel`.
* `pypaBuildFlags ? []`: A list of strings. Arguments to be passed to `python -m build --wheel`.
* `pythonPath ? []`: List of packages to be added into `$PYTHONPATH`. Packages
in `pythonPath` are not propagated (contrary to [`propagatedBuildInputs`](#var-stdenv-propagatedBuildInputs)).
@ -296,6 +307,7 @@ python3Packages.buildPythonApplication rec {
build-system = with python3Packages; [
setuptools
wheel
];
dependencies = with python3Packages; [
@ -462,11 +474,13 @@ are used in [`buildPythonPackage`](#buildpythonpackage-function).
with the `eggInstallHook`
- `eggBuildHook` to skip building for eggs.
- `eggInstallHook` to install eggs.
- `pipBuildHook` to build a wheel using `pip` and PEP 517. Note a build system
(e.g. `setuptools` or `flit`) should still be added as `build-system`.
- `pypaBuildHook` to build a wheel using
[`pypa/build`](https://pypa-build.readthedocs.io/en/latest/index.html) and
PEP 517/518. Note a build system (e.g. `setuptools` or `flit`) should still
be added as `build-system`.
- `pypaInstallHook` to install wheels.
- `pipInstallHook` to install wheels.
- `pytestCheckHook` to run tests with `pytest`. See [example usage](#using-pytestcheckhook).
- `pythonCatchConflictsHook` to fail if the package depends on two different versions of the same dependency.
- `pythonImportsCheckHook` to check whether importing the listed modules works.
@ -604,8 +618,7 @@ that sets up an interpreter pointing to them. This matters much more for "big"
modules like `pytorch` or `tensorflow`.
Module names usually match their names on [pypi.org](https://pypi.org/), but
normalized according to PEP 503/508. (e.g. Foo__Bar.baz -> foo-bar-baz)
You can use the [Nixpkgs search website](https://nixos.org/nixos/packages.html)
you can use the [Nixpkgs search website](https://nixos.org/nixos/packages.html)
to find them as well (along with non-python packages).
At this point we can create throwaway experimental Python environments with
@ -833,6 +846,7 @@ building Python libraries is [`buildPythonPackage`](#buildpythonpackage-function
, buildPythonPackage
, fetchPypi
, setuptools
, wheel
}:
buildPythonPackage rec {
@ -847,6 +861,7 @@ buildPythonPackage rec {
build-system = [
setuptools
wheel
];
# has no tests
@ -863,6 +878,7 @@ buildPythonPackage rec {
homepage = "https://github.com/pytoolz/toolz";
description = "List processing tools and functional utilities";
license = lib.licenses.bsd3;
maintainers = with lib.maintainers; [ fridh ];
};
}
```
@ -870,7 +886,7 @@ buildPythonPackage rec {
What happens here? The function [`buildPythonPackage`](#buildpythonpackage-function) is called and as argument
it accepts a set. In this case the set is a recursive set, `rec`. One of the
arguments is the name of the package, which consists of a basename (generally
following the name on PyPI) and a version. Another argument, `src` specifies the
following the name on PyPi) and a version. Another argument, `src` specifies the
source, which in this case is fetched from PyPI using the helper function
`fetchPypi`. The argument `doCheck` is used to set whether tests should be run
when building the package. Since there are no tests, we rely on [`pythonImportsCheck`](#using-pythonimportscheck)
@ -905,6 +921,7 @@ with import <nixpkgs> {};
build-system = [
python311.pkgs.setuptools
python311.pkgs.wheel
];
# has no tests
@ -957,13 +974,13 @@ order to build [`datashape`](https://github.com/blaze/datashape).
, fetchPypi
# build dependencies
, setuptools
, setuptools, wheel
# dependencies
, numpy, multipledispatch, python-dateutil
# tests
, pytestCheckHook
, pytest
}:
buildPythonPackage rec {
@ -978,6 +995,7 @@ buildPythonPackage rec {
build-system = [
setuptools
wheel
];
dependencies = [
@ -987,7 +1005,7 @@ buildPythonPackage rec {
];
nativeCheckInputs = [
pytestCheckHook
pytest
];
meta = {
@ -995,13 +1013,14 @@ buildPythonPackage rec {
homepage = "https://github.com/ContinuumIO/datashape";
description = "A data description language";
license = lib.licenses.bsd2;
maintainers = with lib.maintainers; [ fridh ];
};
}
```
We can see several runtime dependencies, `numpy`, `multipledispatch`, and
`python-dateutil`. Furthermore, we have [`nativeCheckInputs`](#var-stdenv-nativeCheckInputs) with `pytestCheckHook`.
`pytestCheckHook` is a test runner hook and is only used during the [`checkPhase`](#ssec-check-phase) and is
`python-dateutil`. Furthermore, we have [`nativeCheckInputs`](#var-stdenv-nativeCheckInputs) with `pytest`.
`pytest` is a test runner and is only used during the [`checkPhase`](#ssec-check-phase) and is
therefore not added to `dependencies`.
In the previous case we had only dependencies on other Python packages to consider.
@ -1014,6 +1033,7 @@ when building the bindings and are therefore added as [`buildInputs`](#var-stden
, buildPythonPackage
, fetchPypi
, setuptools
, wheel
, libxml2
, libxslt
}:
@ -1030,6 +1050,7 @@ buildPythonPackage rec {
build-system = [
setuptools
wheel
];
buildInputs = [
@ -1037,14 +1058,6 @@ buildPythonPackage rec {
libxslt
];
# tests are meant to be ran "in-place" in the same directory as src
doCheck = false;
pythonImportsCheck = [
"lxml"
"lxml.etree"
];
meta = {
changelog = "https://github.com/lxml/lxml/releases/tag/lxml-${version}";
description = "Pythonic binding for the libxml2 and libxslt libraries";
@ -1072,6 +1085,7 @@ therefore we have to set `LDFLAGS` and `CFLAGS`.
# build dependencies
, setuptools
, wheel
# dependencies
, fftw
@ -1082,7 +1096,7 @@ therefore we have to set `LDFLAGS` and `CFLAGS`.
}:
buildPythonPackage rec {
pname = "pyfftw";
pname = "pyFFTW";
version = "0.9.2";
pyproject = true;
@ -1093,6 +1107,7 @@ buildPythonPackage rec {
build-system = [
setuptools
wheel
];
buildInputs = [
@ -1114,13 +1129,12 @@ buildPythonPackage rec {
# Tests cannot import pyfftw. pyfftw works fine though.
doCheck = false;
pythonImportsCheck = [ "pyfftw" ];
meta = {
changelog = "https://github.com/pyFFTW/pyFFTW/releases/tag/v${version}";
description = "A pythonic wrapper around FFTW, the FFT library, presenting a unified interface for all the supported transforms";
homepage = "http://hgomersall.github.com/pyFFTW";
license = with lib.licenses; [ bsd2 bsd3 ];
maintainers = with lib.maintainers; [ fridh ];
};
}
```
@ -1131,8 +1145,10 @@ Note also the line [`doCheck = false;`](#var-stdenv-doCheck), we explicitly disa
It is highly encouraged to have testing as part of the package build. This
helps to avoid situations where the package was able to build and install,
but is not usable at runtime.
Your package should provide its own [`checkPhase`](#ssec-check-phase).
but is not usable at runtime. Currently, all packages will use the `test`
command provided by the setup.py (i.e. `python setup.py test`). However,
this is currently deprecated https://github.com/pypa/setuptools/pull/1878
and your package should provide its own [`checkPhase`](#ssec-check-phase).
::: {.note}
The [`checkPhase`](#ssec-check-phase) for python maps to the `installCheckPhase` on a
@ -1203,11 +1219,9 @@ been removed, in this case, it's recommended to use `pytestCheckHook`.
#### Using pytestCheckHook {#using-pytestcheckhook}
`pytestCheckHook` is a convenient hook which will set up (or configure)
a [`checkPhase`](#ssec-check-phase) to run `pytest`. This is also beneficial
`pytestCheckHook` is a convenient hook which will substitute the setuptools
`test` command for a [`checkPhase`](#ssec-check-phase) which runs `pytest`. This is also beneficial
when a package may need many items disabled to run the test suite.
Most packages use `pytest` or `unittest`, which is compatible with `pytest`,
so you will most likely use `pytestCheckHook`.
Using the example above, the analogous `pytestCheckHook` usage would be:
@ -1362,12 +1376,10 @@ instead of a dev dependency).
Keep in mind that while the examples above are done with `requirements.txt`,
`pythonRelaxDepsHook` works by modifying the resulting wheel file, so it should
work with any of the [existing hooks](#setup-hooks).
It indicates that `pythonRelaxDepsHook` has no effect on build time dependencies, such as in `build-system`.
If a package requires incompatible build time dependencies, they should be removed in `postPatch` with `substituteInPlace` or something similar.
#### Using unittestCheckHook {#using-unittestcheckhook}
`unittestCheckHook` is a hook which will set up (or configure) a [`checkPhase`](#ssec-check-phase) to run `python -m unittest discover`:
`unittestCheckHook` is a hook which will substitute the setuptools `test` command for a [`checkPhase`](#ssec-check-phase) which runs `python -m unittest discover`:
```nix
{
@ -1381,8 +1393,6 @@ If a package requires incompatible build time dependencies, they should be remov
}
```
`pytest` is compatible with `unittest`, so in most cases you can use `pytestCheckHook` instead.
#### Using sphinxHook {#using-sphinxhook}
The `sphinxHook` is a helpful tool to build documentation and manpages
@ -1461,6 +1471,7 @@ We first create a function that builds `toolz` in `~/path/to/toolz/release.nix`
, buildPythonPackage
, fetchPypi
, setuptools
, wheel
}:
buildPythonPackage rec {
@ -1475,6 +1486,7 @@ buildPythonPackage rec {
build-system = [
setuptools
wheel
];
meta = {
@ -1482,6 +1494,7 @@ buildPythonPackage rec {
homepage = "https://github.com/pytoolz/toolz/";
description = "List processing tools and functional utilities";
license = lib.licenses.bsd3;
maintainers = with lib.maintainers; [ fridh ];
};
}
```
@ -1494,9 +1507,10 @@ with import <nixpkgs> {};
( let
toolz = callPackage /path/to/toolz/release.nix {
buildPythonPackage = python3Packages.buildPythonPackage;
buildPythonPackage = python310
Packages.buildPythonPackage;
};
in python3.withPackages (ps: [
in python310.withPackages (ps: [
ps.numpy
toolz
])
@ -1917,8 +1931,6 @@ because we can only provide security support for non-vendored dependencies.
We recommend [nix-init](https://github.com/nix-community/nix-init) for creating new python packages within nixpkgs,
as it already prefetches the source, parses dependencies for common formats and prefills most things in `meta`.
See also [contributing section](#contributing).
### Are Python interpreters built deterministically? {#deterministic-builds}
The Python interpreters are now built deterministically. Minor modifications had
@ -1936,15 +1948,16 @@ Both are also exported in `nix-shell`.
It is recommended to test packages as part of the build process.
Source distributions (`sdist`) often include test files, but not always.
The best practice today is to pass a test hook (e.g. pytestCheckHook, unittestCheckHook) into nativeCheckInputs.
This will reconfigure the checkPhase to make use of that particular test framework.
Occasionally packages don't make use of a common test framework, which may then require a custom checkPhase.
By default the command `python setup.py test` is run as part of the
[`checkPhase`](#ssec-check-phase), but often it is necessary to pass a custom [`checkPhase`](#ssec-check-phase). An
example of such a situation is when `py.test` is used.
#### Common issues {#common-issues}
* Non-working tests can often be deselected. Most Python modules
do follow the standard test protocol where the pytest runner can be used.
`pytest` supports the `-k` and `--ignore` parameters to ignore test
* Non-working tests can often be deselected. By default [`buildPythonPackage`](#buildpythonpackage-function)
runs `python setup.py test`. which is deprecated. Most Python modules however
do follow the standard test protocol where the pytest runner can be used
instead. `pytest` supports the `-k` and `--ignore` parameters to ignore test
methods or classes as well as whole files. For `pytestCheckHook` these are
conveniently exposed as `disabledTests` and `disabledTestPaths` respectively.
@ -1985,25 +1998,14 @@ The following rules are desired to be respected:
* Python applications live outside of `python-packages.nix` and are packaged
with [`buildPythonApplication`](#buildpythonapplication-function).
* Make sure libraries build for all Python interpreters.
If it fails to build on some Python versions, consider disabling them by setting `disable = pythonAtLeast "3.x"` along with a comment.
* The two parameters, `pyproject` and `build-system` are set to avoid the legacy setuptools/distutils build.
* Only unversioned attributes (e.g. `pydantic`, but not `pypdantic_1`) can be included in `dependencies`,
since due to `PYTHONPATH` limitations we can only ever support a single version for libraries
without running into duplicate module name conflicts.
* The version restrictions of `dependencies` can be relaxed by [`pythonRelaxDepsHook`](#using-pythonrelaxdepshook).
* Make sure the tests are enabled using for example [`pytestCheckHook`](#using-pytestcheckhook) and, in the case of
* By default we enable tests. Make sure the tests are found and, in the case of
libraries, are passing for all interpreters. If certain tests fail they can be
disabled individually. Try to avoid disabling the tests altogether. In any
case, when you disable tests, leave a comment explaining why.
* `pythonImportsCheck` is set. This is still a good smoke test even if `pytestCheckHook` is set.
* `meta.platforms` takes the default value in many cases.
It does not need to be set explicitly unless the package requires a specific platform.
* The file is formatted with `nixfmt-rfc-style`.
* Commit names of Python libraries should reflect that they are Python
libraries, so write for example `python311Packages.numpy: 1.11 -> 1.12`.
It is highly recommended to specify the current default version to enable
automatic build by ofborg.
Note that `pythonPackages` is an alias for `python27Packages`.
* Attribute names in `python-packages.nix` as well as `pname`s should match the
library's name on PyPI, but be normalized according to [PEP
0503](https://www.python.org/dev/peps/pep-0503/#normalized-names). This means
@ -2017,8 +2019,6 @@ The following rules are desired to be respected:
* Attribute names in `python-packages.nix` should be sorted alphanumerically to
avoid merge conflicts and ease locating attributes.
This list is useful for reviewers as well as for self-checking when submitting packages.
## Package set maintenance {#python-package-set-maintenance}
The whole Python package set has a lot of packages that do not see regular

View File

@ -40,29 +40,31 @@ pkgs.linux_latest.override {
## Manual kernel configuration {#sec-manual-kernel-configuration}
Sometimes it may not be desirable to use kernels built with `pkgs.buildLinux`, especially if most of the common configuration has to be altered or disabled to achieve a kernel as expected by the target use case.
An example of this is building a kernel for use in a VM or micro VM. You can use `pkgs.linuxPackages_custom` in these cases. It requires the `src`, `version`, and `configfile` attributes to be specified.
An example of this is building a kernel for use in a VM or micro VM. You can use `pkgs.linuxManualConfig` in these cases. It requires the `src`, `version`, and `configfile` attributes to be specified.
:::{.example #ex-using-linux-manual-config}
# Using `pkgs.linuxPackages_custom` with a specific source, version, and config file
# Using `pkgs.linuxManualConfig` with a specific source, version, and config file
```nix
{ pkgs, ... }:
pkgs.linuxPackages_custom {
{ pkgs, ... }: {
version = "6.1.55";
src = pkgs.fetchurl {
url = "https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-${version}.tar.xz";
hash = "sha256:1h0mzx52q9pvdv7rhnvb8g68i7bnlc9rf8gy9qn4alsxq4g28zm8";
};
configfile = ./path_to_config_file;
linux = pkgs.linuxManualConfig {
inherit version src configfile;
allowImportFromDerivation = true;
};
}
```
If necessary, the version string can be slightly modified to explicitly mark it as a custom version. If you do so, ensure the `modDirVersion` attribute matches the source's version, otherwise the build will fail.
```nix
{ pkgs, ... }:
pkgs.linuxPackages_custom {
{ pkgs, ... }: {
version = "6.1.55-custom";
modDirVersion = "6.1.55";
src = pkgs.fetchurl {
@ -70,12 +72,16 @@ pkgs.linuxPackages_custom {
hash = "sha256:1h0mzx52q9pvdv7rhnvb8g68i7bnlc9rf8gy9qn4alsxq4g28zm8";
};
configfile = ./path_to_config_file;
linux = pkgs.linuxManualConfig {
inherit version modDirVersion src configfile;
allowImportFromDerivation = true;
};
}
```
:::
Additional attributes can be used with `linuxManualConfig` for further customisation instead of `linuxPackages_custom`. You're encouraged to read [the `pkgs.linuxManualConfig` source code](https://github.com/NixOS/nixpkgs/blob/d77bda728d5041c1294a68fb25c79e2d161f62b9/pkgs/os-specific/linux/kernel/manual-config.nix) to understand how to use them.
Additional attributes can be used with `linuxManualConfig` for further customisation. You're encouraged to read [the `pkgs.linuxManualConfig` source code](https://github.com/NixOS/nixpkgs/blob/d77bda728d5041c1294a68fb25c79e2d161f62b9/pkgs/os-specific/linux/kernel/manual-config.nix) to understand how to use them.
To edit the `.config` file for Linux X.Y from within Nix, proceed as follows:

View File

@ -3,7 +3,6 @@
```{=include=} chapters
stdenv/stdenv.chapter.md
stdenv/meta.chapter.md
stdenv/passthru.chapter.md
stdenv/multiple-output.chapter.md
stdenv/cross-compilation.chapter.md
stdenv/platform-notes.chapter.md

View File

@ -101,7 +101,7 @@ For example, a package which requires dynamic linking and cannot be linked stati
```nix
{
meta.platforms = lib.platforms.all;
meta.badPlatforms = [ lib.systems.inspect.platformPatterns.isStatic ];
meta.badPlatforms = [ lib.systems.inspect.patterns.isStatic ];
}
```
@ -110,6 +110,83 @@ Some packages use this to automatically detect the maximum set of features with
For example, `systemd` [requires dynamic linking](https://github.com/systemd/systemd/issues/20600#issuecomment-912338965), and [has a `meta.badPlatforms` setting](https://github.com/NixOS/nixpkgs/blob/b03ac42b0734da3e7be9bf8d94433a5195734b19/pkgs/os-specific/linux/systemd/default.nix#L752) similar to the one above.
Packages which can be built with or without `systemd` support will use `lib.meta.availableOn` to detect whether or not `systemd` is available on the [`hostPlatform`](#ssec-cross-platform-parameters) for which they are being built; if it is not available (e.g. due to a statically-linked host platform like `pkgsStatic`) this support will be disabled by default.
### `tests` {#var-meta-tests}
::: {.warning}
This attribute is special in that it is not actually under the `meta` attribute set but rather under the `passthru` attribute set. This is due to how `meta` attributes work, and the fact that they are supposed to contain only metadata, not derivations.
:::
An attribute set with tests as values. A test is a derivation that builds when the test passes and fails to build otherwise.
You can run these tests with:
```ShellSession
$ cd path/to/nixpkgs
$ nix-build -A your-package.tests
```
#### Package tests {#var-meta-tests-packages}
Tests that are part of the source package are often executed in the `installCheckPhase`.
Prefer `passthru.tests` for tests that are introduced in nixpkgs because:
* `passthru.tests` tests the 'real' package, independently from the environment in which it was built
* we can run `passthru.tests` independently
* `installCheckPhase` adds overhead to each build
For more on how to write and run package tests, see [](#sec-package-tests).
#### NixOS tests {#var-meta-tests-nixos}
The NixOS tests are available as `nixosTests` in parameters of derivations. For instance, the OpenSMTPD derivation includes lines similar to:
```nix
{ /* ... , */ nixosTests }:
{
# ...
passthru.tests = {
basic-functionality-and-dovecot-integration = nixosTests.opensmtpd;
};
}
```
NixOS tests run in a VM, so they are slower than regular package tests. For more information see [NixOS module tests](https://nixos.org/manual/nixos/stable/#sec-nixos-tests).
Alternatively, you can specify other derivations as tests. You can make use of
the optional parameter to inject the correct package without
relying on non-local definitions, even in the presence of `overrideAttrs`.
Here that's `finalAttrs.finalPackage`, but you could choose a different name if
`finalAttrs` already exists in your scope.
`(mypkg.overrideAttrs f).passthru.tests` will be as expected, as long as the
definition of `tests` does not rely on the original `mypkg` or overrides it in
all places.
```nix
# my-package/default.nix
{ stdenv, callPackage }:
stdenv.mkDerivation (finalAttrs: {
# ...
passthru.tests.example = callPackage ./example.nix { my-package = finalAttrs.finalPackage; };
})
```
```nix
# my-package/example.nix
{ runCommand, lib, my-package, ... }:
runCommand "my-package-test" {
nativeBuildInputs = [ my-package ];
src = lib.sources.sourcesByRegex ./. [ ".*.in" ".*.expected" ];
} ''
my-package --help
my-package <example.in >example.actual
diff -U3 --color=auto example.expected example.actual
mkdir $out
''
```
### `timeout` {#var-meta-timeout}
A timeout (in seconds) for building the derivation. If the derivation takes longer than this time to build, Hydra will fail it due to breaking the timeout. However, all computers do not have the same computing power, hence some builders may decide to apply a multiplicative factor to this value. When filling this value in, try to keep it approximately consistent with other values already present in `nixpkgs`.

View File

@ -1,157 +0,0 @@
# Passthru-attributes {#chap-passthru}
[]{#var-stdenv-passthru} []{#special-variables} <!-- legacy anchors -->
As opposed to most other `mkDerivation` input attributes, `passthru` is not passed to the derivation's [`builder` executable](https://nixos.org/manual/nix/stable/expressions/derivations.html#attr-builder).
Changing it will not trigger a rebuild it is "passed through".
Its value can be accessed as if it was set inside a derivation.
::: {.note}
`passthru` attributes follow no particular schema, but there are a few [conventional patterns](#sec-common-passthru-attributes).
:::
:::{.example #ex-accessing-passthru}
## Setting and accessing `passthru` attributes
```nix
{ stdenv, fetchGit }:
let
hello = stdenv.mkDerivation {
pname = "hello";
src = fetchGit { /* ... */ };
passthru = {
foo = "bar";
baz = {
value1 = 4;
value2 = 5;
};
};
};
in
hello.baz.value1
```
```
4
```
:::
## Common `passthru`-attributes {#sec-common-passthru-attributes}
Many `passthru` attributes are situational, so this section only lists recurring patterns.
They fall in one of these categories:
- Global conventions, which are applied almost universally in Nixpkgs.
Generally these don't entail any special support built into the derivation they belong to.
Common examples of this type are [`passthru.tests`](#var-passthru-tests) and [`passthru.updateScript`](#var-passthru-updateScript).
- Conventions for adding extra functionality to a derivation.
These tend to entail support from the derivation or the `passthru` attribute in question.
Common examples of this type are `passthru.optional-dependencies`, `passthru.withPlugins`, and `passthru.withPackages`.
All of those allow associating the package with a set of components built for that specific package, such as when building Python runtime environments using (`python.withPackages`)[#python.withpackages-function].
Attributes that apply only to particular [build helpers](#part-builders) or [language ecosystems](#chap-language-support) are documented there.
### `passthru.tests` {#var-passthru-tests}
[]{#var-meta-tests} <!-- legacy anchor -->
An attribute set with tests as values.
A test is a derivation that builds when the test passes and fails to build otherwise.
Run these tests with:
```ShellSession
$ cd path/to/nixpkgs
$ nix-build -A your-package.tests
```
:::{.note}
The Nixpkgs systems for continuous integration [Hydra](https://hydra.nixos.org/) and [`nixpkgs-review`](https://github.com/Mic92/nixpkgs-review) don't build these derivations by default, and ([`@ofborg`](https://github.com/NixOS/ofborg)) only builds them when evaluating pull requests for that particular package, or when manually instructed.
:::
#### Package tests {#var-passthru-tests-packages}
[]{#var-meta-tests-packages} <!-- legacy anchor -->
Tests that are part of the source package, if they run quickly, are typically executed in the [`installCheckPhase`](#var-stdenv-phases).
This phase is also suitable for performing a `--version` test for packages that support such flag.
Most programs distributed by Nixpkgs support such a `--version` flag, and successfully calling the program with that flag indicates that the package at least got compiled properly.
:::{.example #ex-checking-build-installCheckPhase}
## Checking builds with `installCheckPhase`
When building `git`, a rudimentary test for successful compilation would be running `git --version`:
```nix
stdenv.mkDerivation (finalAttrs: {
pname = "git";
version = "1.2.3";
# ...
doInstallCheck = true;
installCheckPhase = ''
runHook preInstallCheck
echo checking if 'git --version' mentions ${finalAttrs.version}
$out/bin/git --version | grep ${finalAttrs.version}
runHook postInstallCheck
'';
# ...
})
```
:::
However, tests that are non-trivial will better fit into `passthru.tests` because they:
- Access the package as consumers would, independently from the environment in which it was built
- Can be run and debugged without rebuilding the package, which is useful if that takes a long time
- Don't add overhad to each build, as opposed to `installCheckPhase`
It is also possible to use `passthru.tests` to test the version with [`testVersion`](#tester-testVersion).
<!-- NOTE(@fricklerhandwerk): one may argue whether that testing guide should rather be in the user's manual -->
For more on how to write and run package tests for Nixpkgs, see the [testing section in the package contributor guide](https://github.com/NixOS/nixpkgs/blob/master/pkgs/README.md#package-tests).
#### NixOS tests {#var-passthru-tests-nixos}
[]{#var-meta-tests-nixos} <!-- legacy anchor -->
Tests written for NixOS are available as the `nixosTests` argument to package recipes.
For instance, the [OpenSMTPD derivation](https://search.nixos.org/packages?show=opensmtpd) includes lines similar to:
```nix
{ nixosTests, ... }:
{
# ...
passthru.tests = {
basic-functionality-and-dovecot-integration = nixosTests.opensmtpd;
};
}
```
NixOS tests run in a virtual machine (VM), so they are slower than regular package tests.
For more information see the NixOS manual on [NixOS module tests](https://nixos.org/manual/nixos/stable/#sec-nixos-tests).
### `passthru.updateScript` {#var-passthru-updateScript}
<!-- legacy anchors -->
[]{#var-passthru-updateScript-command}
[]{#var-passthru-updateScript-set-command}
[]{#var-passthru-updateScript-set-attrPath}
[]{#var-passthru-updateScript-set-supportedFeatures}
[]{#var-passthru-updateScript-env-UPDATE_NIX_NAME}
[]{#var-passthru-updateScript-env-UPDATE_NIX_PNAME}
[]{#var-passthru-updateScript-env-UPDATE_NIX_OLD_VERSION}
[]{#var-passthru-updateScript-env-UPDATE_NIX_ATTR_PATH}
[]{#var-passthru-updateScript-execution}
[]{#var-passthru-updateScript-supported-features}
[]{#var-passthru-updateScript-commit}
[]{#var-passthru-updateScript-commit-attrPath}
[]{#var-passthru-updateScript-commit-oldVersion}
[]{#var-passthru-updateScript-commit-newVersion}
[]{#var-passthru-updateScript-commit-files}
[]{#var-passthru-updateScript-commit-commitBody}
[]{#var-passthru-updateScript-commit-commitMessage}
[]{#var-passthru-updateScript-example-commit}
Nixpkgs tries to automatically update all packages that have an `passthru.updateScript` attribute.
See the [section on automatic package updates in the package contributor guide](https://github.com/NixOS/nixpkgs/blob/master/pkgs/README.md#automatic-package-updates) for details.

View File

@ -442,6 +442,145 @@ If set to `true`, `stdenv` will pass specific flags to `make` and other build to
Unless set to `false`, some build systems with good support for parallel building including `cmake`, `meson`, and `qmake` will set it to `true`.
### Special variables {#special-variables}
#### `passthru` {#var-stdenv-passthru}
This is an attribute set which can be filled with arbitrary values. For example:
```nix
{
passthru = {
foo = "bar";
baz = {
value1 = 4;
value2 = 5;
};
};
}
```
Values inside it are not passed to the builder, so you can change them without triggering a rebuild. However, they can be accessed outside of a derivation directly, as if they were set inside a derivation itself, e.g. `hello.baz.value1`. We dont specify any usage or schema of `passthru` - it is meant for values that would be useful outside the derivation in other parts of a Nix expression (e.g. in other derivations). An example would be to convey some specific dependency of your derivation which contains a program with plugins support. Later, others who make derivations with plugins can use passed-through dependency to ensure that their plugin would be binary-compatible with built program.
#### `passthru.updateScript` {#var-passthru-updateScript}
A script to be run by `maintainers/scripts/update.nix` when the package is matched. The attribute can contain one of the following:
- []{#var-passthru-updateScript-command} an executable file, either on the file system:
```nix
{
passthru.updateScript = ./update.sh;
}
```
or inside the expression itself:
```nix
{
passthru.updateScript = writeScript "update-zoom-us" ''
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p curl pcre2 common-updater-scripts
set -eu -o pipefail
version="$(curl -sI https://zoom.us/client/latest/zoom_x86_64.tar.xz | grep -Fi 'Location:' | pcre2grep -o1 '/(([0-9]\.?)+)/')"
update-source-version zoom-us "$version"
'';
}
```
- a list, a script followed by arguments to be passed to it:
```nix
{
passthru.updateScript = [ ../../update.sh pname "--requested-release=unstable" ];
}
```
- an attribute set containing:
- [`command`]{#var-passthru-updateScript-set-command} a string or list in the [format expected by `passthru.updateScript`](#var-passthru-updateScript-command).
- [`attrPath`]{#var-passthru-updateScript-set-attrPath} (optional) a string containing the canonical attribute path for the package. If present, it will be passed to the update script instead of the attribute path on which the package was discovered during Nixpkgs traversal.
- [`supportedFeatures`]{#var-passthru-updateScript-set-supportedFeatures} (optional) a list of the [extra features](#var-passthru-updateScript-supported-features) the script supports.
```nix
{
passthru.updateScript = {
command = [ ../../update.sh pname ];
attrPath = pname;
supportedFeatures = [ /* ... */ ];
};
}
```
::: {.tip}
A common pattern is to use the [`nix-update-script`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/common-updater/nix-update.nix) attribute provided in Nixpkgs, which runs [`nix-update`](https://github.com/Mic92/nix-update):
```nix
{
passthru.updateScript = nix-update-script { };
}
```
For simple packages, this is often enough, and will ensure that the package is updated automatically by [`nixpkgs-update`](https://ryantm.github.io/nixpkgs-update) when a new version is released. The [update bot](https://nix-community.org/update-bot) runs periodically to attempt to automatically update packages, and will run `passthru.updateScript` if set. While not strictly necessary if the project is listed on [Repology](https://repology.org), using `nix-update-script` allows the package to update via many more sources (e.g. GitHub releases).
:::
##### How update scripts are executed? {#var-passthru-updateScript-execution}
Update scripts are to be invoked by `maintainers/scripts/update.nix` script. You can run `nix-shell maintainers/scripts/update.nix` in the root of Nixpkgs repository for information on how to use it. `update.nix` offers several modes for selecting packages to update (e.g. select by attribute path, traverse Nixpkgs and filter by maintainer, etc.), and it will execute update scripts for all matched packages that have an `updateScript` attribute.
Each update script will be passed the following environment variables:
- [`UPDATE_NIX_NAME`]{#var-passthru-updateScript-env-UPDATE_NIX_NAME} content of the `name` attribute of the updated package.
- [`UPDATE_NIX_PNAME`]{#var-passthru-updateScript-env-UPDATE_NIX_PNAME} content of the `pname` attribute of the updated package.
- [`UPDATE_NIX_OLD_VERSION`]{#var-passthru-updateScript-env-UPDATE_NIX_OLD_VERSION} content of the `version` attribute of the updated package.
- [`UPDATE_NIX_ATTR_PATH`]{#var-passthru-updateScript-env-UPDATE_NIX_ATTR_PATH} attribute path the `update.nix` discovered the package on (or the [canonical `attrPath`](#var-passthru-updateScript-set-attrPath) when available). Example: `pantheon.elementary-terminal`
::: {.note}
An update script will be usually run from the root of the Nixpkgs repository but you should not rely on that. Also note that `update.nix` executes update scripts in parallel by default so you should avoid running `git commit` or any other commands that cannot handle that.
:::
::: {.tip}
While update scripts should not create commits themselves, `maintainers/scripts/update.nix` supports automatically creating commits when running it with `--argstr commit true`. If you need to customize commit message, you can have the update script implement [`commit`](#var-passthru-updateScript-commit) feature.
:::
##### Supported features {#var-passthru-updateScript-supported-features}
###### `commit` {#var-passthru-updateScript-commit}
This feature allows update scripts to *ask* `update.nix` to create Git commits.
When support of this feature is declared, whenever the update script exits with `0` return status, it is expected to print a JSON list containing an object described below for each updated attribute to standard output.
When `update.nix` is run with `--argstr commit true` arguments, it will create a separate commit for each of the objects. An empty list can be returned when the script did not update any files, for example, when the package is already at the latest version.
The commit object contains the following values:
- [`attrPath`]{#var-passthru-updateScript-commit-attrPath} a string containing attribute path.
- [`oldVersion`]{#var-passthru-updateScript-commit-oldVersion} a string containing old version.
- [`newVersion`]{#var-passthru-updateScript-commit-newVersion} a string containing new version.
- [`files`]{#var-passthru-updateScript-commit-files} a non-empty list of file paths (as strings) to add to the commit.
- [`commitBody`]{#var-passthru-updateScript-commit-commitBody} (optional) a string with extra content to be appended to the default commit message (useful for adding changelog links).
- [`commitMessage`]{#var-passthru-updateScript-commit-commitMessage} (optional) a string to use instead of the default commit message.
If the returned array contains exactly one object (e.g. `[{}]`), all values are optional and will be determined automatically.
::: {.example #var-passthru-updateScript-example-commit}
# Standard output of an update script using commit feature
```json
[
{
"attrPath": "volume_key",
"oldVersion": "0.3.11",
"newVersion": "0.3.12",
"files": [
"/path/to/nixpkgs/pkgs/development/libraries/volume-key/default.nix"
]
}
]
```
:::
### Fixed-point arguments of `mkDerivation` {#mkderivation-recursive-attributes}
If you pass a function to `mkDerivation`, it will receive as its argument the final arguments, including the overrides when reinvoked via `overrideAttrs`. For example:
@ -494,7 +633,7 @@ in pkg
Unlike the `pkg` binding in the above example, the `finalAttrs` parameter always references the final attributes. For instance `(pkg.overrideAttrs(x)).finalAttrs.finalPackage` is identical to `pkg.overrideAttrs(x)`, whereas `(pkg.overrideAttrs(x)).original` is the same as the original `pkg`.
See also the section about [`passthru.tests`](#var-passthru-tests).
See also the section about [`passthru.tests`](#var-meta-tests).
## Phases {#sec-stdenv-phases}
@ -1006,7 +1145,7 @@ This setup works as follows:
The installCheck phase checks whether the package was installed correctly by running its test suite against the installed directories. The default `installCheck` calls `make installcheck`.
It is often better to add tests that are not part of the source distribution to `passthru.tests` (see
[](#var-passthru-tests)). This avoids adding overhead to every build and enables us to run them independently.
[](#var-meta-tests)). This avoids adding overhead to every build and enables us to run them independently.
#### Variables controlling the installCheck phase {#variables-controlling-the-installcheck-phase}
@ -1419,8 +1558,6 @@ Both parameters take a list of flags as strings. The special `"all"` flag can be
For more in-depth information on these hardening flags and hardening in general, refer to the [Debian Wiki](https://wiki.debian.org/Hardening), [Ubuntu Wiki](https://wiki.ubuntu.com/Security/Features), [Gentoo Wiki](https://wiki.gentoo.org/wiki/Project:Hardened), and the [Arch Wiki](https://wiki.archlinux.org/title/Security).
Note that support for some hardening flags varies by compiler, CPU architecture, target OS and libc. Combinations of these that don't support a particular hardening flag will silently ignore attempts to enable it. To see exactly which hardening flags are being employed in any invocation, the `NIX_DEBUG` environment variable can be used.
### Hardening flags enabled by default {#sec-hardening-flags-enabled-by-default}
The following flags are enabled by default and might require disabling with `hardeningDisable` if the program to package is incompatible.
@ -1470,16 +1607,6 @@ installwatch.c:3751:5: error: conflicting types for '__open_2'
fcntl2.h:50:4: error: call to '__open_missing_mode' declared with attribute error: open with O_CREAT or O_TMPFILE in second argument needs 3 arguments
```
Disabling `fortify` implies disablement of `fortify3`
#### `fortify3` {#fortify3}
Adds the `-O2 -D_FORTIFY_SOURCE=3` compiler options. This expands the cases that can be protected by fortify-checks to include some situations with dynamic-length buffers whose length can be inferred at runtime using compiler hints.
Enabling this flag implies enablement of `fortify`. Disabling this flag does not imply disablement of `fortify`.
This flag can sometimes conflict with a build-system's own attempts at enabling fortify support and result in errors complaining about `redefinition of _FORTIFY_SOURCE`.
#### `pic` {#pic}
Adds the `-fPIC` compiler options. This options adds support for position independent code in shared libraries and thus making ASLR possible.
@ -1528,16 +1655,6 @@ Adds the `-fPIE` compiler and `-pie` linker options. Position Independent Execut
Static libraries need to be compiled with `-fPIE` so that executables can link them in with the `-pie` linker option.
If the libraries lack `-fPIE`, you will get the error `recompile with -fPIE`.
#### `zerocallusedregs` {#zerocallusedregs}
Adds the `-fzero-call-used-regs=used-gpr` compiler option. This causes the general-purpose registers that an architecture's calling convention considers "call-used" to be zeroed on return from the function. This can make it harder for attackers to construct useful ROP gadgets and also reduces the chance of data leakage from a function call.
#### `trivialautovarinit` {#trivialautovarinit}
Adds the `-ftrivial-auto-var-init=pattern` compiler option. This causes "trivially-initializable" uninitialized stack variables to be forcibly initialized with a nonzero value that is likely to cause a crash (and therefore be noticed). Uninitialized variables generally take on their values based on fragments of previous program state, and attackers can carefully manipulate that state to craft malicious initial values for these variables.
Use of this flag is controversial as it can prevent tools that detect uninitialized variable use (such as valgrind) from operating correctly.
[^footnote-stdenv-ignored-build-platform]: The build platform is ignored because it is a mere implementation detail of the package satisfying the dependency: As a general programming principle, dependencies are always *specified* as interfaces, not concrete implementation.
[^footnote-stdenv-native-dependencies-in-path]: Currently, this means for native builds all dependencies are put on the `PATH`. But in the future that may not be the case for sake of matching cross: the platforms would be assumed to be unique for native and cross builds alike, so only the `depsBuild*` and `nativeBuildInputs` would be added to the `PATH`.
[^footnote-stdenv-propagated-dependencies]: Nix itself already takes a packages transitive dependencies into account, but this propagation ensures nixpkgs-specific infrastructure like [setup hooks](#ssec-setup-hooks) also are run as if it were a propagated dependency.

View File

@ -431,11 +431,6 @@ div.appendix .informaltable td {
padding: 0.5rem;
}
div.book .variablelist .term,
div.appendix .variablelist .term {
font-weight: 500;
}
/*
This relies on highlight.js applying certain classes on the prompts.
For more details, see https://highlightjs.readthedocs.io/en/latest/css-classes-reference.html#stylable-scopes

View File

@ -5,15 +5,14 @@
outputs = { self }:
let
jobs = import ./pkgs/top-level/release.nix {
nixpkgs = self;
};
libVersionInfoOverlay = import ./lib/flake-version-info.nix self;
lib = (import ./lib).extend libVersionInfoOverlay;
forAllSystems = lib.genAttrs lib.systems.flakeExposed;
jobs = forAllSystems (system: import ./pkgs/top-level/release.nix {
nixpkgs = self;
inherit system;
});
in
{
lib = lib.extend (final: prev: {
@ -44,14 +43,12 @@
);
});
checks = forAllSystems (system: {
tarball = jobs.${system}.tarball;
} // lib.optionalAttrs (self.legacyPackages.${system}.stdenv.isLinux) {
checks.x86_64-linux = {
tarball = jobs.tarball;
# Test that ensures that the nixosSystem function can accept a lib argument
# Note: prefer not to extend or modify `lib`, especially if you want to share reusable modules
# alternatives include: `import` a file, or put a custom library in an option or in `_module.args.<libname>`
nixosSystemAcceptsLib = (self.lib.nixosSystem {
pkgs = self.legacyPackages.${system};
lib = self.lib.extend (final: prev: {
ifThisFunctionIsMissingTheTestFails = final.id;
});
@ -67,13 +64,13 @@
})
];
}).config.system.build.toplevel;
});
};
htmlDocs = {
nixpkgsManual = builtins.mapAttrs (_: jobSet: jobSet.manual) jobs;
nixpkgsManual = jobs.manual;
nixosManual = (import ./nixos/release-small.nix {
nixpkgs = self;
}).nixos.manual;
}).nixos.manual.x86_64-linux;
};
# The "legacy" in `legacyPackages` doesn't imply that the packages exposed

View File

@ -1 +1 @@
24.11
24.05

View File

@ -90,16 +90,7 @@ rec {
mkOption ?
k: v: if v == null
then []
else if optionValueSeparator == null then
[ (mkOptionName k) (lib.generators.mkValueStringDefault {} v) ]
else
[ "${mkOptionName k}${optionValueSeparator}${lib.generators.mkValueStringDefault {} v}" ],
# how to separate an option from its flag;
# by default, there is no separator, so option `-c` and value `5`
# would become ["-c" "5"].
# This is useful if the command requires equals, for example, `-c=5`.
optionValueSeparator ? null
else [ (mkOptionName k) (lib.generators.mkValueStringDefault {} v) ]
}:
options:
let

View File

@ -1,17 +1,16 @@
/**
Collection of functions useful for debugging
broken nix expressions.
/* Collection of functions useful for debugging
broken nix expressions.
* `trace`-like functions take two values, print
the first to stderr and return the second.
* `traceVal`-like functions take one argument
which both printed and returned.
* `traceSeq`-like functions fully evaluate their
traced value before printing (not just to weak
head normal form like trace does by default).
* Functions that end in `-Fn` take an additional
function as their first argument, which is applied
to the traced value before it is printed.
* `trace`-like functions take two values, print
the first to stderr and return the second.
* `traceVal`-like functions take one argument
which both printed and returned.
* `traceSeq`-like functions fully evaluate their
traced value before printing (not just to weak
head normal form like trace does by default).
* Functions that end in `-Fn` take an additional
function as their first argument, which is applied
to the traced value before it is printed.
*/
{ lib }:
let
@ -33,190 +32,79 @@ rec {
# -- TRACING --
/**
Conditionally trace the supplied message, based on a predicate.
/* Conditionally trace the supplied message, based on a predicate.
Type: traceIf :: bool -> string -> a -> a
# Inputs
`pred`
: Predicate to check
`msg`
: Message that should be traced
`x`
: Value to return
# Type
```
traceIf :: bool -> string -> a -> a
```
# Examples
:::{.example}
## `lib.debug.traceIf` usage example
```nix
traceIf true "hello" 3
trace: hello
=> 3
```
:::
Example:
traceIf true "hello" 3
trace: hello
=> 3
*/
traceIf =
# Predicate to check
pred:
# Message that should be traced
msg:
# Value to return
x: if pred then trace msg x else x;
/**
Trace the supplied value after applying a function to it, and
return the original value.
/* Trace the supplied value after applying a function to it, and
return the original value.
Type: traceValFn :: (a -> b) -> a -> a
# Inputs
`f`
: Function to apply
`x`
: Value to trace and return
# Type
```
traceValFn :: (a -> b) -> a -> a
```
# Examples
:::{.example}
## `lib.debug.traceValFn` usage example
```nix
traceValFn (v: "mystring ${v}") "foo"
trace: mystring foo
=> "foo"
```
:::
Example:
traceValFn (v: "mystring ${v}") "foo"
trace: mystring foo
=> "foo"
*/
traceValFn =
# Function to apply
f:
# Value to trace and return
x: trace (f x) x;
/**
Trace the supplied value and return it.
/* Trace the supplied value and return it.
# Inputs
Type: traceVal :: a -> a
`x`
: Value to trace and return
# Type
```
traceVal :: a -> a
```
# Examples
:::{.example}
## `lib.debug.traceVal` usage example
```nix
traceVal 42
# trace: 42
=> 42
```
:::
Example:
traceVal 42
# trace: 42
=> 42
*/
traceVal = traceValFn id;
/**
`builtins.trace`, but the value is `builtins.deepSeq`ed first.
/* `builtins.trace`, but the value is `builtins.deepSeq`ed first.
Type: traceSeq :: a -> b -> b
# Inputs
`x`
: The value to trace
`y`
: The value to return
# Type
```
traceSeq :: a -> b -> b
```
# Examples
:::{.example}
## `lib.debug.traceSeq` usage example
```nix
trace { a.b.c = 3; } null
trace: { a = <CODE>; }
=> null
traceSeq { a.b.c = 3; } null
trace: { a = { b = { c = 3; }; }; }
=> null
```
:::
Example:
trace { a.b.c = 3; } null
trace: { a = <CODE>; }
=> null
traceSeq { a.b.c = 3; } null
trace: { a = { b = { c = 3; }; }; }
=> null
*/
traceSeq =
# The value to trace
x:
# The value to return
y: trace (builtins.deepSeq x x) y;
/**
Like `traceSeq`, but only evaluate down to depth n.
This is very useful because lots of `traceSeq` usages
lead to an infinite recursion.
/* Like `traceSeq`, but only evaluate down to depth n.
This is very useful because lots of `traceSeq` usages
lead to an infinite recursion.
Example:
traceSeqN 2 { a.b.c = 3; } null
trace: { a = { b = {}; }; }
=> null
# Inputs
`depth`
: 1\. Function argument
`x`
: 2\. Function argument
`y`
: 3\. Function argument
# Type
```
traceSeqN :: Int -> a -> b -> b
```
# Examples
:::{.example}
## `lib.debug.traceSeqN` usage example
```nix
traceSeqN 2 { a.b.c = 3; } null
trace: { a = { b = {}; }; }
=> null
```
:::
*/
Type: traceSeqN :: Int -> a -> b -> b
*/
traceSeqN = depth: x: y:
let snip = v: if isList v then noQuotes "[]" v
else if isAttrs v then noQuotes "{}" v
@ -230,115 +118,41 @@ rec {
in trace (generators.toPretty { allowPrettyValues = true; }
(modify depth snip x)) y;
/**
A combination of `traceVal` and `traceSeq` that applies a
provided function to the value to be traced after `deepSeq`ing
it.
# Inputs
`f`
: Function to apply
`v`
: Value to trace
/* A combination of `traceVal` and `traceSeq` that applies a
provided function to the value to be traced after `deepSeq`ing
it.
*/
traceValSeqFn =
# Function to apply
f:
# Value to trace
v: traceValFn f (builtins.deepSeq v v);
/**
A combination of `traceVal` and `traceSeq`.
# Inputs
`v`
: Value to trace
*/
/* A combination of `traceVal` and `traceSeq`. */
traceValSeq = traceValSeqFn id;
/**
A combination of `traceVal` and `traceSeqN` that applies a
provided function to the value to be traced.
# Inputs
`f`
: Function to apply
`depth`
: 2\. Function argument
`v`
: Value to trace
*/
/* A combination of `traceVal` and `traceSeqN` that applies a
provided function to the value to be traced. */
traceValSeqNFn =
# Function to apply
f:
depth:
# Value to trace
v: traceSeqN depth (f v) v;
/**
A combination of `traceVal` and `traceSeqN`.
# Inputs
`depth`
: 1\. Function argument
`v`
: Value to trace
*/
/* A combination of `traceVal` and `traceSeqN`. */
traceValSeqN = traceValSeqNFn id;
/**
Trace the input and output of a function `f` named `name`,
both down to `depth`.
/* Trace the input and output of a function `f` named `name`,
both down to `depth`.
This is useful for adding around a function call,
to see the before/after of values as they are transformed.
This is useful for adding around a function call,
to see the before/after of values as they are transformed.
# Inputs
`depth`
: 1\. Function argument
`name`
: 2\. Function argument
`f`
: 3\. Function argument
`v`
: 4\. Function argument
# Examples
:::{.example}
## `lib.debug.traceFnSeqN` usage example
```nix
traceFnSeqN 2 "id" (x: x) { a.b.c = 3; }
trace: { fn = "id"; from = { a.b = {}; }; to = { a.b = {}; }; }
=> { a.b.c = 3; }
```
:::
Example:
traceFnSeqN 2 "id" (x: x) { a.b.c = 3; }
trace: { fn = "id"; from = { a.b = {}; }; to = { a.b = {}; }; }
=> { a.b.c = 3; }
*/
traceFnSeqN = depth: name: f: v:
let res = f v;
@ -354,82 +168,66 @@ rec {
# -- TESTING --
/**
Evaluates a set of tests.
/* Evaluates a set of tests.
A test is an attribute set `{expr, expected}`,
denoting an expression and its expected result.
A test is an attribute set `{expr, expected}`,
denoting an expression and its expected result.
The result is a `list` of __failed tests__, each represented as
`{name, expected, result}`,
The result is a `list` of __failed tests__, each represented as
`{name, expected, result}`,
- expected
- What was passed as `expected`
- result
- The actual `result` of the test
- expected
- What was passed as `expected`
- result
- The actual `result` of the test
Used for regression testing of the functions in lib; see
tests.nix for more examples.
Used for regression testing of the functions in lib; see
tests.nix for more examples.
Important: Only attributes that start with `test` are executed.
Important: Only attributes that start with `test` are executed.
- If you want to run only a subset of the tests add the attribute `tests = ["testName"];`
- If you want to run only a subset of the tests add the attribute `tests = ["testName"];`
Example:
# Inputs
runTests {
testAndOk = {
expr = lib.and true false;
expected = false;
};
testAndFail = {
expr = lib.and true false;
expected = true;
};
}
->
[
{
name = "testAndFail";
expected = true;
result = false;
}
]
`tests`
: Tests to run
# Type
```
runTests :: {
tests = [ String ];
${testName} :: {
expr :: a;
expected :: a;
};
}
->
[
{
name :: String;
expected :: a;
result :: a;
Type:
runTests :: {
tests = [ String ];
${testName} :: {
expr :: a;
expected :: a;
};
}
]
```
# Examples
:::{.example}
## `lib.debug.runTests` usage example
```nix
runTests {
testAndOk = {
expr = lib.and true false;
expected = false;
};
testAndFail = {
expr = lib.and true false;
expected = true;
};
}
->
[
{
name = "testAndFail";
expected = true;
result = false;
}
]
```
:::
->
[
{
name :: String;
expected :: a;
result :: a;
}
]
*/
runTests =
# Tests to run
tests: concatLists (attrValues (mapAttrs (name: test:
let testsToRun = if tests ? tests then tests.tests else [];
in if (substring 0 4 name == "test" || elem name testsToRun)
@ -439,26 +237,10 @@ rec {
then [ { inherit name; expected = test.expected; result = test.expr; } ]
else [] ) tests));
/**
Create a test assuming that list elements are `true`.
/* Create a test assuming that list elements are `true`.
# Inputs
`expr`
: 1\. Function argument
# Examples
:::{.example}
## `lib.debug.testAllTrue` usage example
```nix
{ testX = allTrue [ true ]; }
```
:::
Example:
{ testX = allTrue [ true ]; }
*/
testAllTrue = expr: { inherit expr; expected = map (x: true) expr; };
}

View File

@ -47,7 +47,7 @@ let
# misc
asserts = callLibs ./asserts.nix;
debug = callLibs ./debug.nix;
misc = callLibs ./deprecated/misc.nix;
misc = callLibs ./deprecated.nix;
# domain-specific
fetchers = callLibs ./fetchers.nix;
@ -89,7 +89,7 @@ let
recurseIntoAttrs dontRecurseIntoAttrs cartesianProduct cartesianProductOfSets
mapCartesianProduct updateManyAttrsByPath;
inherit (self.lists) singleton forEach foldr fold foldl foldl' imap0 imap1
ifilter0 concatMap flatten remove findSingle findFirst any all count
concatMap flatten remove findSingle findFirst any all count
optional optionals toList range replicate partition zipListsWith zipLists
reverseList listDfs toposort sort sortOn naturalSort compareLists take
drop sublist last init crossLists unique allUnique intersectLists

View File

@ -1,11 +0,0 @@
# lib/deprecated
Do not add any new functions to this directory.
This directory contains the `lib.misc` sublibrary, which - as a location - is deprecated.
Furthermore, some of the functions inside are of *dubious* utility, and should perhaps be avoided,
while some functions *may still be needed*.
This directory does not play a role in the deprecation process for library functions.
They should be deprecated in place, by putting a `lib.warn` or `lib.warnIf` call around the function.

View File

@ -8,7 +8,6 @@
# derivation like fetchurl is allowed to do so since its result is
# by definition pure.
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
"HTTP_PROXY" "HTTPS_PROXY" "FTP_PROXY" "ALL_PROXY" "NO_PROXY"
];
}

View File

@ -1,4 +1,4 @@
/**
/*
<!-- This anchor is here for backwards compatibility -->
[]{#sec-fileset}
@ -6,7 +6,7 @@
A file set is a (mathematical) set of local files that can be added to the Nix store for use in Nix derivations.
File sets are easy and safe to use, providing obvious and composable semantics with good error messages to prevent mistakes.
# Overview {#sec-fileset-overview}
## Overview {#sec-fileset-overview}
Basics:
- [Implicit coercion from paths to file sets](#sec-fileset-path-coercion)
@ -58,7 +58,7 @@
see [this issue](https://github.com/NixOS/nixpkgs/issues/266356) to request it.
# Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
## Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.
Such path arguments are implicitly coerced to file sets containing all files under that path:
@ -78,7 +78,7 @@
This is in contrast to using [paths in string interpolation](https://nixos.org/manual/nix/stable/language/values.html#type-path), which does add the entire referenced path to the store.
:::
## Example {#sec-fileset-path-coercion-example}
### Example {#sec-fileset-path-coercion-example}
Assume we are in a local directory with a file hierarchy like this:
```
@ -157,34 +157,17 @@ let
in {
/**
/*
Create a file set from a path that may or may not exist:
- If the path does exist, the path is [coerced to a file set](#sec-fileset-path-coercion).
- If the path does not exist, a file set containing no files is returned.
Type:
maybeMissing :: Path -> FileSet
# Inputs
`path`
: 1\. Function argument
# Type
```
maybeMissing :: Path -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.maybeMissing` usage example
```nix
# All files in the current directory, but excluding main.o if it exists
difference ./. (maybeMissing ./main.o)
```
:::
Example:
# All files in the current directory, but excluding main.o if it exists
difference ./. (maybeMissing ./main.o)
*/
maybeMissing =
path:
@ -200,7 +183,7 @@ in {
else
_singleton path;
/**
/*
Incrementally evaluate and trace a file set in a pretty way.
This function is only intended for debugging purposes.
The exact tracing format is unspecified and may change.
@ -211,44 +194,27 @@ in {
This variant is useful for tracing file sets in the Nix repl.
Type:
trace :: FileSet -> Any -> Any
# Inputs
`fileset`
: The file set to trace.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
`val`
: The value to return.
# Type
```
trace :: FileSet -> Any -> Any
```
# Examples
:::{.example}
## `lib.fileset.trace` usage example
```nix
trace (unions [ ./Makefile ./src ./tests/run.sh ]) null
=>
trace: /home/user/src/myProject
trace: - Makefile (regular)
trace: - src (all files in directory)
trace: - tests
trace: - run.sh (regular)
null
```
:::
Example:
trace (unions [ ./Makefile ./src ./tests/run.sh ]) null
=>
trace: /home/user/src/myProject
trace: - Makefile (regular)
trace: - src (all files in directory)
trace: - tests
trace: - run.sh (regular)
null
*/
trace = fileset:
trace =
/*
The file set to trace.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
*/
fileset:
let
# "fileset" would be a better name, but that would clash with the argument name,
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
@ -258,7 +224,7 @@ in {
(_printFileset actualFileset)
(x: x);
/**
/*
Incrementally evaluate and trace a file set in a pretty way.
This function is only intended for debugging purposes.
The exact tracing format is unspecified and may change.
@ -268,47 +234,34 @@ in {
This variant is useful for tracing file sets passed as arguments to other functions.
Type:
traceVal :: FileSet -> FileSet
# Inputs
`fileset`
: The file set to trace and return.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
# Type
```
traceVal :: FileSet -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.traceVal` usage example
```nix
toSource {
root = ./.;
fileset = traceVal (unions [
./Makefile
./src
./tests/run.sh
]);
}
=>
trace: /home/user/src/myProject
trace: - Makefile (regular)
trace: - src (all files in directory)
trace: - tests
trace: - run.sh (regular)
"/nix/store/...-source"
```
:::
Example:
toSource {
root = ./.;
fileset = traceVal (unions [
./Makefile
./src
./tests/run.sh
]);
}
=>
trace: /home/user/src/myProject
trace: - Makefile (regular)
trace: - src (all files in directory)
trace: - tests
trace: - run.sh (regular)
"/nix/store/...-source"
*/
traceVal = fileset:
traceVal =
/*
The file set to trace and return.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
*/
fileset:
let
# "fileset" would be a better name, but that would clash with the argument name,
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
@ -320,7 +273,7 @@ in {
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
actualFileset;
/**
/*
Add the local files contained in `fileset` to the store as a single [store path](https://nixos.org/manual/nix/stable/glossary#gloss-store-path) rooted at `root`.
The result is the store path as a string-like value, making it usable e.g. as the `src` of a derivation, or in string interpolation:
@ -333,13 +286,63 @@ in {
The name of the store path is always `source`.
# Inputs
Type:
toSource :: {
root :: Path,
fileset :: FileSet,
} -> SourceLike
Takes an attribute set with the following attributes
Example:
# Import the current directory into the store
# but only include files under ./src
toSource {
root = ./.;
fileset = ./src;
}
=> "/nix/store/...-source"
`root` (Path; _required_)
# Import the current directory into the store
# but only include ./Makefile and all files under ./src
toSource {
root = ./.;
fileset = union
./Makefile
./src;
}
=> "/nix/store/...-source"
: The local directory [path](https://nixos.org/manual/nix/stable/language/values.html#type-path) that will correspond to the root of the resulting store path.
# Trying to include a file outside the root will fail
toSource {
root = ./.;
fileset = unions [
./Makefile
./src
../LICENSE
];
}
=> <error>
# The root needs to point to a directory that contains all the files
toSource {
root = ../.;
fileset = unions [
./Makefile
./src
../LICENSE
];
}
=> "/nix/store/...-source"
# The root has to be a local filesystem path
toSource {
root = "/nix/store/...-source";
fileset = ./.;
}
=> <error>
*/
toSource = {
/*
(required) The local directory [path](https://nixos.org/manual/nix/stable/language/values.html#type-path) that will correspond to the root of the resulting store path.
Paths in [strings](https://nixos.org/manual/nix/stable/language/values.html#type-string), including Nix store paths, cannot be passed as `root`.
`root` has to be a directory.
@ -347,10 +350,10 @@ in {
Changing `root` only affects the directory structure of the resulting store path, it does not change which files are added to the store.
The only way to change which files get added to the store is by changing the `fileset` attribute.
:::
`fileset` (FileSet; _required_)
: The file set whose files to import into the store.
*/
root,
/*
(required) The file set whose files to import into the store.
File sets can be created using other functions in this library.
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
@ -359,72 +362,7 @@ in {
If a directory does not recursively contain any file, it is omitted from the store path contents.
:::
# Type
```
toSource :: {
root :: Path,
fileset :: FileSet,
} -> SourceLike
```
# Examples
:::{.example}
## `lib.fileset.toSource` usage example
```nix
# Import the current directory into the store
# but only include files under ./src
toSource {
root = ./.;
fileset = ./src;
}
=> "/nix/store/...-source"
# Import the current directory into the store
# but only include ./Makefile and all files under ./src
toSource {
root = ./.;
fileset = union
./Makefile
./src;
}
=> "/nix/store/...-source"
# Trying to include a file outside the root will fail
toSource {
root = ./.;
fileset = unions [
./Makefile
./src
../LICENSE
];
}
=> <error>
# The root needs to point to a directory that contains all the files
toSource {
root = ../.;
fileset = unions [
./Makefile
./src
../LICENSE
];
}
=> "/nix/store/...-source"
# The root has to be a local filesystem path
toSource {
root = "/nix/store/...-source";
fileset = ./.;
}
=> <error>
```
:::
*/
toSource = {
root,
*/
fileset,
}:
let
@ -480,7 +418,7 @@ in {
};
/**
/*
The list of file paths contained in the given file set.
:::{.note}
@ -494,37 +432,24 @@ in {
The resulting list of files can be turned back into a file set using [`lib.fileset.unions`](#function-library-lib.fileset.unions).
Type:
toList :: FileSet -> [ Path ]
# Inputs
Example:
toList ./.
[ ./README.md ./Makefile ./src/main.c ./src/main.h ]
`fileset`
: The file set whose file paths to return. This argument can also be a path, which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
# Type
```
toList :: FileSet -> [ Path ]
```
# Examples
:::{.example}
## `lib.fileset.toList` usage example
```nix
toList ./.
[ ./README.md ./Makefile ./src/main.c ./src/main.h ]
toList (difference ./. ./src)
[ ./README.md ./Makefile ]
```
:::
toList (difference ./. ./src)
[ ./README.md ./Makefile ]
*/
toList = fileset:
toList =
# The file set whose file paths to return.
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
fileset:
_toList (_coerce "lib.fileset.toList: Argument" fileset);
/**
/*
The file set containing all files that are in either of two given file sets.
This is the same as [`unions`](#function-library-lib.fileset.unions),
but takes just two file sets instead of a list.
@ -533,41 +458,26 @@ in {
The given file sets are evaluated as lazily as possible,
with the first argument being evaluated first if needed.
Type:
union :: FileSet -> FileSet -> FileSet
# Inputs
Example:
# Create a file set containing the file `Makefile`
# and all files recursively in the `src` directory
union ./Makefile ./src
`fileset1`
: The first file set. This argument can also be a path, which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
`fileset2`
: The second file set. This argument can also be a path, which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
# Type
```
union :: FileSet -> FileSet -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.union` usage example
```nix
# Create a file set containing the file `Makefile`
# and all files recursively in the `src` directory
union ./Makefile ./src
# Create a file set containing the file `Makefile`
# and the LICENSE file from the parent directory
union ./Makefile ../LICENSE
```
:::
# Create a file set containing the file `Makefile`
# and the LICENSE file from the parent directory
union ./Makefile ../LICENSE
*/
union =
# The first file set.
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
fileset1:
# The second file set.
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
fileset2:
_unionMany
(_coerceMany "lib.fileset.union" [
@ -581,7 +491,7 @@ in {
}
]);
/**
/*
The file set containing all files that are in any of the given file sets.
This is the same as [`union`](#function-library-lib.fileset.unions),
but takes a list of file sets instead of just two.
@ -590,46 +500,32 @@ in {
The given file sets are evaluated as lazily as possible,
with earlier elements being evaluated first if needed.
Type:
unions :: [ FileSet ] -> FileSet
# Inputs
Example:
# Create a file set containing selected files
unions [
# Include the single file `Makefile` in the current directory
# This errors if the file doesn't exist
./Makefile
`filesets`
# Recursively include all files in the `src/code` directory
# If this directory is empty this has no effect
./src/code
: A list of file sets. The elements can also be paths, which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
# Include the files `run.sh` and `unit.c` from the `tests` directory
./tests/run.sh
./tests/unit.c
# Type
```
unions :: [ FileSet ] -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.unions` usage example
```nix
# Create a file set containing selected files
unions [
# Include the single file `Makefile` in the current directory
# This errors if the file doesn't exist
./Makefile
# Recursively include all files in the `src/code` directory
# If this directory is empty this has no effect
./src/code
# Include the files `run.sh` and `unit.c` from the `tests` directory
./tests/run.sh
./tests/unit.c
# Include the `LICENSE` file from the parent directory
../LICENSE
]
```
:::
# Include the `LICENSE` file from the parent directory
../LICENSE
]
*/
unions =
# A list of file sets.
# The elements can also be paths,
# which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
filesets:
if ! isList filesets then
throw ''
@ -645,43 +541,28 @@ in {
_unionMany
];
/**
/*
The file set containing all files that are in both of two given file sets.
See also [Intersection (set theory)](https://en.wikipedia.org/wiki/Intersection_(set_theory)).
The given file sets are evaluated as lazily as possible,
with the first argument being evaluated first if needed.
Type:
intersection :: FileSet -> FileSet -> FileSet
# Inputs
`fileset1`
: The first file set. This argument can also be a path, which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
`fileset2`
: The second file set. This argument can also be a path, which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
# Type
```
intersection :: FileSet -> FileSet -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.intersection` usage example
```nix
# Limit the selected files to the ones in ./., so only ./src and ./Makefile
intersection ./. (unions [ ../LICENSE ./src ./Makefile ])
```
:::
Example:
# Limit the selected files to the ones in ./., so only ./src and ./Makefile
intersection ./. (unions [ ../LICENSE ./src ./Makefile ])
*/
intersection =
# The first file set.
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
fileset1:
# The second file set.
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
fileset2:
let
filesets = _coerceMany "lib.fileset.intersection" [
@ -699,52 +580,41 @@ in {
(elemAt filesets 0)
(elemAt filesets 1);
/**
/*
The file set containing all files from the first file set that are not in the second file set.
See also [Difference (set theory)](https://en.wikipedia.org/wiki/Complement_(set_theory)#Relative_complement).
The given file sets are evaluated as lazily as possible,
with the first argument being evaluated first if needed.
Type:
union :: FileSet -> FileSet -> FileSet
# Inputs
Example:
# Create a file set containing all files from the current directory,
# except ones under ./tests
difference ./. ./tests
`positive`
: The positive file set. The result can only contain files that are also in this file set. This argument can also be a path, which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
`negative`
: The negative file set. The result will never contain files that are also in this file set. This argument can also be a path, which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
# Type
```
union :: FileSet -> FileSet -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.difference` usage example
```nix
# Create a file set containing all files from the current directory,
# except ones under ./tests
difference ./. ./tests
let
# A set of Nix-related files
nixFiles = unions [ ./default.nix ./nix ./tests/default.nix ];
in
# Create a file set containing all files under ./tests, except ones in `nixFiles`,
# meaning only without ./tests/default.nix
difference ./tests nixFiles
```
:::
let
# A set of Nix-related files
nixFiles = unions [ ./default.nix ./nix ./tests/default.nix ];
in
# Create a file set containing all files under ./tests, except ones in `nixFiles`,
# meaning only without ./tests/default.nix
difference ./tests nixFiles
*/
difference =
# The positive file set.
# The result can only contain files that are also in this file set.
#
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
positive:
# The negative file set.
# The result will never contain files that are also in this file set.
#
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
negative:
let
filesets = _coerceMany "lib.fileset.difference" [
@ -762,15 +632,36 @@ in {
(elemAt filesets 0)
(elemAt filesets 1);
/**
/*
Filter a file set to only contain files matching some predicate.
Type:
fileFilter ::
({
name :: String,
type :: String,
hasExt :: String -> Bool,
...
} -> Bool)
-> Path
-> FileSet
# Inputs
Example:
# Include all regular `default.nix` files in the current directory
fileFilter (file: file.name == "default.nix") ./.
`predicate`
# Include all non-Nix files from the current directory
fileFilter (file: ! file.hasExt "nix") ./.
: The predicate function to call on all files contained in given file set.
# Include all files that start with a "." in the current directory
fileFilter (file: hasPrefix "." file.name) ./.
# Include all regular files (not symlinks or others) in the current directory
fileFilter (file: file.type == "regular") ./.
*/
fileFilter =
/*
The predicate function to call on all files contained in given file set.
A file is included in the resulting file set if this function returns true for it.
This function is called with an attribute set containing these attributes:
@ -787,47 +678,9 @@ in {
`hasExt "gitignore"` is true.
Other attributes may be added in the future.
`path`
: The path whose files to filter
# Type
```
fileFilter ::
({
name :: String,
type :: String,
hasExt :: String -> Bool,
...
} -> Bool)
-> Path
-> FileSet
```
# Examples
:::{.example}
## `lib.fileset.fileFilter` usage example
```nix
# Include all regular `default.nix` files in the current directory
fileFilter (file: file.name == "default.nix") ./.
# Include all non-Nix files from the current directory
fileFilter (file: ! file.hasExt "nix") ./.
# Include all files that start with a "." in the current directory
fileFilter (file: hasPrefix "." file.name) ./.
# Include all regular files (not symlinks or others) in the current directory
fileFilter (file: file.type == "regular") ./.
```
:::
*/
fileFilter =
*/
predicate:
# The path whose files to filter
path:
if ! isFunction predicate then
throw ''
@ -846,37 +699,23 @@ in {
else
_fileFilter predicate path;
/**
Create a file set with the same files as a `lib.sources`-based value.
This does not import any of the files into the store.
/*
Create a file set with the same files as a `lib.sources`-based value.
This does not import any of the files into the store.
This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
:::{.note}
File sets cannot represent empty directories.
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
:::
:::{.note}
File sets cannot represent empty directories.
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
:::
# Inputs
`source`
: 1\. Function argument
# Type
```
Type:
fromSource :: SourceLike -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.fromSource` usage example
```nix
Example:
# There's no cleanSource-like function for file sets yet,
# but we can just convert cleanSource to a file set and use it that way
toSource {
@ -901,9 +740,6 @@ in {
./Makefile
./src
]);
```
:::
*/
fromSource = source:
let
@ -932,41 +768,27 @@ in {
# If there's no filter, no need to run the expensive conversion, all subpaths will be included
_singleton path;
/**
/*
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
This function behaves like [`gitTrackedWith { }`](#function-library-lib.fileset.gitTrackedWith) - using the defaults.
Type:
gitTracked :: Path -> FileSet
# Inputs
Example:
# Include all files tracked by the Git repository in the current directory
gitTracked ./.
`path`
: The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
This directory must contain a `.git` file or subdirectory.
# Type
```
gitTracked :: Path -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.gitTracked` usage example
```nix
# Include all files tracked by the Git repository in the current directory
gitTracked ./.
# Include only files tracked by the Git repository in the parent directory
# that are also in the current directory
intersection ./. (gitTracked ../.)
```
:::
# Include only files tracked by the Git repository in the parent directory
# that are also in the current directory
intersection ./. (gitTracked ../.)
*/
gitTracked =
/*
The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
This directory must contain a `.git` file or subdirectory.
*/
path:
_fromFetchGit
"gitTracked"
@ -974,7 +796,7 @@ in {
path
{};
/**
/*
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
The first argument allows configuration with an attribute set,
while the second argument is the path to the Git working tree.
@ -998,40 +820,27 @@ in {
This may change in the future.
:::
Type:
gitTrackedWith :: { recurseSubmodules :: Bool ? false } -> Path -> FileSet
# Inputs
`options` (attribute set)
: `recurseSubmodules` (optional, default: `false`)
: Whether to recurse into [Git submodules](https://git-scm.com/book/en/v2/Git-Tools-Submodules) to also include their tracked files.
If `true`, this is equivalent to passing the [--recurse-submodules](https://git-scm.com/docs/git-ls-files#Documentation/git-ls-files.txt---recurse-submodules) flag to `git ls-files`.
`path`
: The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
This directory must contain a `.git` file or subdirectory.
# Type
```
gitTrackedWith :: { recurseSubmodules :: Bool ? false } -> Path -> FileSet
```
# Examples
:::{.example}
## `lib.fileset.gitTrackedWith` usage example
```nix
# Include all files tracked by the Git repository in the current directory
# and any submodules under it
gitTracked { recurseSubmodules = true; } ./.
```
:::
Example:
# Include all files tracked by the Git repository in the current directory
# and any submodules under it
gitTracked { recurseSubmodules = true; } ./.
*/
gitTrackedWith =
{
/*
(optional, default: `false`) Whether to recurse into [Git submodules](https://git-scm.com/book/en/v2/Git-Tools-Submodules) to also include their tracked files.
If `true`, this is equivalent to passing the [--recurse-submodules](https://git-scm.com/docs/git-ls-files#Documentation/git-ls-files.txt---recurse-submodules) flag to `git ls-files`.
*/
recurseSubmodules ? false,
}:
/*
The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
This directory must contain a `.git` file or subdirectory.
*/
path:
if ! isBool recurseSubmodules then
throw "lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it's a ${typeOf recurseSubmodules} instead."

View File

@ -1,6 +1,6 @@
{ lib, ... }:
rec {
/**
/*
`fix f` computes the fixed point of the given function `f`. In other words, the return value is `x` in `x = f x`.
`f` must be a lazy function.
@ -63,52 +63,27 @@ rec {
See [`extends`](#function-library-lib.fixedPoints.extends) for an example use case.
There `self` is also often called `final`.
Type: fix :: (a -> a) -> a
# Inputs
Example:
fix (self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; })
=> { bar = "bar"; foo = "foo"; foobar = "foobar"; }
`f`
: 1\. Function argument
# Type
```
fix :: (a -> a) -> a
```
# Examples
:::{.example}
## `lib.fixedPoints.fix` usage example
```nix
fix (self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; })
=> { bar = "bar"; foo = "foo"; foobar = "foobar"; }
fix (self: [ 1 2 (elemAt self 0 + elemAt self 1) ])
=> [ 1 2 3 ]
```
:::
fix (self: [ 1 2 (elemAt self 0 + elemAt self 1) ])
=> [ 1 2 3 ]
*/
fix = f: let x = f x; in x;
/**
/*
A variant of `fix` that records the original recursive attribute set in the
result, in an attribute named `__unfix__`.
This is useful in combination with the `extends` function to
implement deep overriding.
# Inputs
`f`
: 1\. Function argument
*/
fix' = f: let x = f x // { __unfix__ = f; }; in x;
/**
/*
Return the fixpoint that `f` converges to when called iteratively, starting
with the input `x`.
@ -117,22 +92,7 @@ rec {
0
```
# Inputs
`f`
: 1\. Function argument
`x`
: 2\. Function argument
# Type
```
(a -> a) -> a -> a
```
Type: (a -> a) -> a -> a
*/
converge = f: x:
let
@ -142,7 +102,7 @@ rec {
then x
else converge f x';
/**
/*
Extend a function using an overlay.
Overlays allow modifying and extending fixed-point functions, specifically ones returning attribute sets.
@ -257,50 +217,32 @@ rec {
```
:::
Type:
extends :: (Attrs -> Attrs -> Attrs) # The overlay to apply to the fixed-point function
-> (Attrs -> Attrs) # A fixed-point function
-> (Attrs -> Attrs) # The resulting fixed-point function
# Inputs
Example:
f = final: { a = 1; b = final.a + 2; }
`overlay`
fix f
=> { a = 1; b = 3; }
: The overlay to apply to the fixed-point function
fix (extends (final: prev: { a = prev.a + 10; }) f)
=> { a = 11; b = 13; }
`f`
fix (extends (final: prev: { b = final.a + 5; }) f)
=> { a = 1; b = 6; }
: The fixed-point function
# Type
```
extends :: (Attrs -> Attrs -> Attrs) # The overlay to apply to the fixed-point function
-> (Attrs -> Attrs) # A fixed-point function
-> (Attrs -> Attrs) # The resulting fixed-point function
```
# Examples
:::{.example}
## `lib.fixedPoints.extends` usage example
```nix
f = final: { a = 1; b = final.a + 2; }
fix f
=> { a = 1; b = 3; }
fix (extends (final: prev: { a = prev.a + 10; }) f)
=> { a = 11; b = 13; }
fix (extends (final: prev: { b = final.a + 5; }) f)
=> { a = 1; b = 6; }
fix (extends (final: prev: { c = final.a + final.b; }) f)
=> { a = 1; b = 3; c = 4; }
```
:::
fix (extends (final: prev: { c = final.a + final.b; }) f)
=> { a = 1; b = 3; c = 4; }
*/
extends =
# The overlay to apply to the fixed-point function
overlay:
# The fixed-point function
f:
# Wrap with parenthesis to prevent nixdoc from rendering the `final` argument in the documentation
# The result should be thought of as a function, the argument of that function is not an argument to `extends` itself
(
final:
@ -310,29 +252,10 @@ rec {
prev // overlay final prev
);
/**
/*
Compose two extending functions of the type expected by 'extends'
into one where changes made in the first are available in the
'super' of the second
# Inputs
`f`
: 1\. Function argument
`g`
: 2\. Function argument
`final`
: 3\. Function argument
`prev`
: 4\. Function argument
*/
composeExtensions =
f: g: final: prev:
@ -340,7 +263,7 @@ rec {
prev' = prev // fApplied;
in fApplied // g final prev';
/**
/*
Compose several extending functions of the type expected by 'extends' into
one where changes made in preceding functions are made available to
subsequent ones.
@ -353,7 +276,7 @@ rec {
composeManyExtensions =
lib.foldr (x: y: composeExtensions x y) (final: prev: {});
/**
/*
Create an overridable, recursive attribute set. For example:
```
@ -375,20 +298,9 @@ rec {
*/
makeExtensible = makeExtensibleWithCustomName "extend";
/**
/*
Same as `makeExtensible` but the name of the extending attribute is
customized.
# Inputs
`extenderName`
: 1\. Function argument
`rattrs`
: 2\. Function argument
*/
makeExtensibleWithCustomName = extenderName: rattrs:
fix' (self: (rattrs self) // {

View File

@ -53,53 +53,6 @@ rec {
inherit type isGVariant;
intConstructors = [
{
name = "mkInt32";
type = type.int32;
min = -2147483648;
max = 2147483647;
}
{
name = "mkUint32";
type = type.uint32;
min = 0;
max = 4294967295;
}
{
name = "mkInt64";
type = type.int64;
# Nix does not support such large numbers.
min = null;
max = null;
}
{
name = "mkUint64";
type = type.uint64;
min = 0;
# Nix does not support such large numbers.
max = null;
}
{
name = "mkInt16";
type = type.int16;
min = -32768;
max = 32767;
}
{
name = "mkUint16";
type = type.uint16;
min = 0;
max = 65535;
}
{
name = "mkUchar";
type = type.uchar;
min = 0;
max = 255;
}
];
/* Returns the GVariant value that most closely matches the given Nix value.
If no GVariant value can be found unambiguously then error is thrown.
@ -117,20 +70,8 @@ rec {
mkArray v
else if isGVariant v then
v
else if builtins.isInt v then
let
validConstructors = builtins.filter ({ min, max, ... }: (min == null || min <= v) && (max == null || v <= max)) intConstructors;
in
throw ''
The GVariant type for number ${builtins.toString v} is unclear.
Please wrap the value with one of the following, depending on the value type in GSettings schema:
${lib.concatMapStringsSep "\n" ({ name, type, ...}: "- `lib.gvariant.${name}` for `${type}`") validConstructors}
''
else if builtins.isAttrs v then
throw "Cannot construct GVariant value from an attribute set. If you want to construct a dictionary, you will need to create an array containing items constructed with `lib.gvariant.mkDictionaryEntry`."
else
throw "The GVariant type of ${builtins.typeOf v} can't be inferred.";
throw "The GVariant type of ${v} can't be inferred.";
/* Returns the GVariant array from the given type of the elements and a Nix list.

View File

@ -632,11 +632,6 @@ in mkLicense lset) ({
url = "https://old.calculate-linux.org/packages/licenses/iASL";
};
icu = {
spdxId = "ICU";
fullName = "ICU";
};
ijg = {
spdxId = "IJG";
fullName = "Independent JPEG Group License";
@ -902,17 +897,6 @@ in mkLicense lset) ({
free = false;
};
ncbiPd = {
spdxId = "NCBI-PD";
fullname = "NCBI Public Domain Notice";
# Due to United States copyright law, anything with this "license" does not have a copyright in the
# jurisdiction of the United States. However, other jurisdictions may assign the United States
# government copyright to the work, and the license explicitly states that in such a case, no license
# is granted. This is nonfree and nonredistributable in most jurisdictions other than the United States.
free = false;
redistributable = false;
};
ncsa = {
spdxId = "NCSA";
fullName = "University of Illinois/NCSA Open Source License";
@ -1184,11 +1168,6 @@ in mkLicense lset) ({
# channel and NixOS images.
};
unicode-30 = {
spdxId = "Unicode-3.0";
fullName = "Unicode License v3";
};
unicode-dfs-2015 = {
spdxId = "Unicode-DFS-2015";
fullName = "Unicode License Agreement - Data Files and Software (2015)";

View File

@ -4,7 +4,7 @@
{ lib }:
let
inherit (lib.strings) toInt;
inherit (lib.trivial) compare min id warn pipe;
inherit (lib.trivial) compare min id warn;
inherit (lib.attrsets) mapAttrs;
in
rec {
@ -333,54 +333,6 @@ rec {
*/
imap1 = f: list: genList (n: f (n + 1) (elemAt list n)) (length list);
/**
Filter a list for elements that satisfy a predicate function.
The predicate function is called with both the index and value for each element.
It must return `true`/`false` to include/exclude a given element in the result.
This function is strict in the result of the predicate function for each element.
This function has O(n) complexity.
Also see [`builtins.filter`](https://nixos.org/manual/nix/stable/language/builtins.html#builtins-filter) (available as `lib.lists.filter`),
which can be used instead when the index isn't needed.
# Inputs
`ipred`
: The predicate function, it takes two arguments:
- 1. (int): the index of the element.
- 2. (a): the value of the element.
It must return `true`/`false` to include/exclude a given element from the result.
`list`
: The list to filter using the predicate.
# Type
```
ifilter0 :: (int -> a -> bool) -> [a] -> [a]
```
# Examples
:::{.example}
## `lib.lists.ifilter0` usage example
```nix
ifilter0 (i: v: i == 0 || v > 2) [ 1 2 3 ]
=> [ 1 3 ]
```
:::
*/
ifilter0 =
ipred:
input:
map (idx: elemAt input idx) (
filter (idx: ipred idx (elemAt input idx)) (
genList (x: x) (length input)
)
);
/**
Map and concatenate the result.

View File

@ -26,12 +26,8 @@ rec {
dontDistribute = drv: addMetaAttrs { hydraPlatforms = []; } drv;
/*
Change the [symbolic name of a derivation](https://nixos.org/manual/nix/stable/language/derivations.html#attr-name).
:::{.warning}
Dependent derivations will be rebuilt when the symbolic name is changed.
:::
/* Change the symbolic name of a package for presentation purposes
(i.e., so that nix-env users can tell them apart).
*/
setName = name: drv: drv // {inherit name;};

View File

@ -81,8 +81,6 @@ let
&& final.parsed.kernel == platform.parsed.kernel;
isCompatible = _: throw "2022-05-23: isCompatible has been removed in favor of canExecute, refer to the 22.11 changelog for details";
# Derived meta-data
useLLVM = final.isFreeBSD;
libc =
/**/ if final.isDarwin then "libSystem"
else if final.isMinGW then "msvcrt"
@ -93,7 +91,6 @@ let
else if final.isAndroid then "bionic"
else if final.isLinux /* default */ then "glibc"
else if final.isFreeBSD then "fblibc"
else if final.isOpenBSD then "oblibc"
else if final.isNetBSD then "nblibc"
else if final.isAvr then "avrlibc"
else if final.isGhcjs then null

View File

@ -13,7 +13,7 @@ let
"x86_64-darwin" "i686-darwin" "aarch64-darwin" "armv7a-darwin"
# FreeBSD
"i686-freebsd" "x86_64-freebsd"
"i686-freebsd13" "x86_64-freebsd13"
# Genode
"aarch64-genode" "i686-genode" "x86_64-genode"

View File

@ -235,11 +235,6 @@ rec {
libc = "newlib";
};
microblaze-embedded = {
config = "microblazeel-none-elf";
libc = "newlib";
};
#
# Redox
#
@ -328,7 +323,7 @@ rec {
# BSDs
x86_64-freebsd = {
config = "x86_64-unknown-freebsd";
config = "x86_64-unknown-freebsd13";
useLLVM = true;
};
@ -342,11 +337,6 @@ rec {
useLLVM = true;
};
x86_64-openbsd = {
config = "x86_64-unknown-openbsd";
useLLVM = true;
};
#
# WASM
#

View File

@ -326,7 +326,11 @@ rec {
# the normalized name for macOS.
macos = { execFormat = macho; families = { inherit darwin; }; name = "darwin"; };
ios = { execFormat = macho; families = { inherit darwin; }; };
freebsd = { execFormat = elf; families = { inherit bsd; }; name = "freebsd"; };
# A tricky thing about FreeBSD is that there is no stable ABI across
# versions. That means that putting in the version as part of the
# config string is paramount.
freebsd12 = { execFormat = elf; families = { inherit bsd; }; name = "freebsd"; version = 12; };
freebsd13 = { execFormat = elf; families = { inherit bsd; }; name = "freebsd"; version = 13; };
linux = { execFormat = elf; families = { }; };
netbsd = { execFormat = elf; families = { inherit bsd; }; };
none = { execFormat = unknown; families = { }; };
@ -469,7 +473,6 @@ rec {
elem (elemAt l 2) [ "wasi" "redox" "mmixware" "ghcjs" "mingw32" ] ||
hasPrefix "freebsd" (elemAt l 2) ||
hasPrefix "netbsd" (elemAt l 2) ||
hasPrefix "openbsd" (elemAt l 2) ||
hasPrefix "genode" (elemAt l 2)
then {
cpu = elemAt l 0;

View File

@ -63,10 +63,8 @@ let
hasAttrByPath
hasInfix
id
ifilter0
isStorePath
lazyDerivation
length
lists
listToAttrs
makeExtensible
@ -653,31 +651,6 @@ runTests {
expected = ["b" "c"];
};
testIfilter0Example = {
expr = ifilter0 (i: v: i == 0 || v > 2) [ 1 2 3 ];
expected = [ 1 3 ];
};
testIfilter0Empty = {
expr = ifilter0 (i: v: abort "shouldn't be evaluated!") [ ];
expected = [ ];
};
testIfilter0IndexOnly = {
expr = length (ifilter0 (i: v: mod i 2 == 0) [ (throw "0") (throw "1") (throw "2") (throw "3")]);
expected = 2;
};
testIfilter0All = {
expr = ifilter0 (i: v: true) [ 10 11 12 13 14 15 ];
expected = [ 10 11 12 13 14 15 ];
};
testIfilter0First = {
expr = ifilter0 (i: v: i == 0) [ 10 11 12 13 14 15 ];
expected = [ 10 ];
};
testIfilter0Last = {
expr = ifilter0 (i: v: i == 5) [ 10 11 12 13 14 15 ];
expected = [ 15 ];
};
testFold =
let
f = op: fold: fold op 0 (range 0 100);
@ -1639,27 +1612,6 @@ runTests {
];
};
testToGNUCommandLineSeparator = {
expr = cli.toGNUCommandLine { optionValueSeparator = "="; } {
data = builtins.toJSON { id = 0; };
X = "PUT";
retry = 3;
retry-delay = null;
url = [ "https://example.com/foo" "https://example.com/bar" ];
silent = false;
verbose = true;
};
expected = [
"-X=PUT"
"--data={\"id\":0}"
"--retry=3"
"--url=https://example.com/foo"
"--url=https://example.com/bar"
"--verbose"
];
};
testToGNUCommandLineShell = {
expr = cli.toGNUCommandLineShell {} {
data = builtins.toJSON { id = 0; };

View File

@ -2,7 +2,7 @@
# Don't test properties of pkgs.lib, but rather the lib in the parent directory
pkgs ? import ../.. {} // { lib = throw "pkgs.lib accessed, but the lib tests should use nixpkgs' lib path directly!"; },
nix ? pkgs-nixVersions.stable,
nixVersions ? [ pkgs-nixVersions.minimum nix pkgs-nixVersions.latest ],
nixVersions ? [ pkgs-nixVersions.minimum nix pkgs-nixVersions.unstable ],
pkgs-nixVersions ? import ./nix-for-tests.nix { inherit pkgs; },
}:
@ -24,9 +24,7 @@ in
#
# https://github.com/NixOS/nixpkgs/issues/272591
#
[(import ../../pkgs/test/release {
inherit pkgs lib nix;
})]
[(import ../../pkgs/test/release {})]
;
}

View File

@ -40,7 +40,7 @@ lib.runTests (
testarm = mseteq arm [ "armv5tel-linux" "armv6l-linux" "armv6l-netbsd" "armv6l-none" "armv7a-linux" "armv7a-netbsd" "armv7l-linux" "armv7l-netbsd" "arm-none" "armv7a-darwin" ];
testarmv7 = mseteq armv7 [ "armv7a-darwin" "armv7a-linux" "armv7l-linux" "armv7a-netbsd" "armv7l-netbsd" ];
testi686 = mseteq i686 [ "i686-linux" "i686-freebsd" "i686-genode" "i686-netbsd" "i686-openbsd" "i686-cygwin" "i686-windows" "i686-none" "i686-darwin" ];
testi686 = mseteq i686 [ "i686-linux" "i686-freebsd13" "i686-genode" "i686-netbsd" "i686-openbsd" "i686-cygwin" "i686-windows" "i686-none" "i686-darwin" ];
testmips = mseteq mips [ "mips-none" "mips64-none" "mips-linux" "mips64-linux" "mips64el-linux" "mipsel-linux" "mipsel-netbsd" ];
testmmix = mseteq mmix [ "mmix-mmixware" ];
testpower = mseteq power [ "powerpc-netbsd" "powerpc-none" "powerpc64-linux" "powerpc64le-linux" "powerpcle-none" ];
@ -48,11 +48,11 @@ lib.runTests (
testriscv32 = mseteq riscv32 [ "riscv32-linux" "riscv32-netbsd" "riscv32-none" ];
testriscv64 = mseteq riscv64 [ "riscv64-linux" "riscv64-netbsd" "riscv64-none" ];
tests390x = mseteq s390x [ "s390x-linux" "s390x-none" ];
testx86_64 = mseteq x86_64 [ "x86_64-linux" "x86_64-darwin" "x86_64-freebsd" "x86_64-genode" "x86_64-redox" "x86_64-openbsd" "x86_64-netbsd" "x86_64-cygwin" "x86_64-solaris" "x86_64-windows" "x86_64-none" ];
testx86_64 = mseteq x86_64 [ "x86_64-linux" "x86_64-darwin" "x86_64-freebsd13" "x86_64-genode" "x86_64-redox" "x86_64-openbsd" "x86_64-netbsd" "x86_64-cygwin" "x86_64-solaris" "x86_64-windows" "x86_64-none" ];
testcygwin = mseteq cygwin [ "i686-cygwin" "x86_64-cygwin" ];
testdarwin = mseteq darwin [ "x86_64-darwin" "i686-darwin" "aarch64-darwin" "armv7a-darwin" ];
testfreebsd = mseteq freebsd [ "i686-freebsd" "x86_64-freebsd" ];
testfreebsd = mseteq freebsd [ "i686-freebsd13" "x86_64-freebsd13" ];
testgenode = mseteq genode [ "aarch64-genode" "i686-genode" "x86_64-genode" ];
testredox = mseteq redox [ "x86_64-redox" ];
testgnu = mseteq gnu (linux /* ++ kfreebsd ++ ... */);

View File

@ -403,7 +403,7 @@ in {
On each release the first letter is bumped and a new animal is chosen
starting with that new letter.
*/
codeName = "Vicuna";
codeName = "Uakari";
/**
Returns the current nixpkgs version suffix as string.
@ -623,37 +623,6 @@ in {
/**
Reads a JSON file.
# Examples
:::{.example}
## `lib.trivial.importJSON` usage example
example.json
```json
{
"title": "Example JSON",
"hello": {
"world": "foo",
"bar": {
"foobar": true
}
}
}
```
```nix
importJSON ./example.json
=> {
title = "Example JSON";
hello = {
world = "foo";
bar = {
foobar = true;
};
};
}
```
:::
# Inputs
@ -673,35 +642,6 @@ in {
/**
Reads a TOML file.
# Examples
:::{.example}
## `lib.trivial.importTOML` usage example
example.toml
```toml
title = "TOML Example"
[hello]
world = "foo"
[hello.bar]
foobar = true
```
```nix
importTOML ./example.toml
=> {
title = "TOML Example";
hello = {
world = "foo";
bar = {
foobar = true;
};
};
}
```
:::
# Inputs

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,7 @@ let
trace = if builtins.getEnv "VERBOSE" == "1" then builtins.trace else (x: y: y);
rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" ];
rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" "xbursttools" ];
# Add the recurseForDerivations attribute to ensure that
# nix-instantiate recurses into nested attribute sets.

View File

@ -28,7 +28,6 @@ OK_MISSING_BY_PACKAGE = {
"discover": {
"rpm-ostree-1", # we don't have rpm-ostree (duh)
"Snapd", # we don't have snaps and probably never will
"packagekitqt6", # intentionally disabled
},
"elisa": {
"UPNPQT", # upstream says it's broken
@ -37,9 +36,6 @@ OK_MISSING_BY_PACKAGE = {
"Sphinx", # only used for docs, bloats closure size
"QCollectionGenerator"
},
"gwenview": {
"Tiff", # duplicate?
},
"kio-extras-kf5": {
"KDSoapWSDiscoveryClient", # actually vendored on KF5 version
},
@ -80,11 +76,6 @@ OK_MISSING_BY_PACKAGE = {
"plasma-desktop": {
"scim", # upstream is dead, not packaged in Nixpkgs
},
"poppler-qt6": {
"gobject-introspection-1.0", # we don't actually want to build the GTK variant
"gdk-pixbuf-2.0",
"gtk+-3.0",
},
"powerdevil": {
"DDCUtil", # cursed, intentionally disabled
},
@ -92,9 +83,6 @@ OK_MISSING_BY_PACKAGE = {
"Qt6Qml", # tests only
"Qt6Quick",
},
"skladnik": {
"POVRay", # too expensive to rerender all the assets
},
"syntax-highlighting": {
"XercesC", # only used for extra validation at build time
}

View File

@ -1,20 +1,19 @@
name,rockspec,ref,server,version,luaversion,maintainers
name,src,ref,server,version,luaversion,maintainers
alt-getopt,,,,,,arobyn
ansicolors,,,,,,Freed-Wu
bit32,,,,5.3.0-1,5.1,lblasc
argparse,,,,,,
basexx,,,,,,
binaryheap,,,,,,vcunat
busted,,,,,,
cassowary,,,,,,alerque
cassowary,,,,,,marsam alerque
cldr,,,,,,alerque
compat53,,,,,,vcunat
commons.nvim,,,,,,mrcjkb
cosmo,,,,,,
cosmo,,,,,,marsam
coxpcall,,,,1.17.0-1,,
cqueues,,,,,,vcunat
cyan,,,,,,
digestif,,,,,5.3,
digestif,https://github.com/astoff/digestif.git,,,,5.3,
dkjson,,,,,,
fennel,,,,,,misterio77
fidget.nvim,,,,,,mrcjkb
@ -23,8 +22,8 @@ fluent,,,,,,alerque
funnyfiles.nvim,,,,,,mrcjkb
fzf-lua,,,,,,mrcjkb
fzy,,,,,,mrcjkb
gitsigns.nvim,https://raw.githubusercontent.com/lewis6991/gitsigns.nvim/main/gitsigns.nvim-scm-1.rockspec,,,,5.1,
haskell-tools.nvim,,,,,,mrcjkb
gitsigns.nvim,https://github.com/lewis6991/gitsigns.nvim.git,,,,5.1,
haskell-tools.nvim,,,,,,
http,,,,0.3-0,,vcunat
image.nvim,,,,,,teto
inspect,,,,,,
@ -32,7 +31,7 @@ jsregexp,,,,,,
ldbus,,,http://luarocks.org/dev,,,
ldoc,,,,,,
lgi,,,,,,
linenoise,https://raw.githubusercontent.com/hoelzro/lua-linenoise/master/linenoise-0.9-1.rockspec,,,,,
linenoise,https://github.com/hoelzro/lua-linenoise.git,,,,,
ljsyscall,,,,,5.1,lblasc
lmathx,,,,,5.3,alexshpilkin
lmpfrlib,,,,,5.3,alexshpilkin
@ -55,13 +54,13 @@ lua-resty-jwt,,,,,,
lua-resty-openidc,,,,,,
lua-resty-openssl,,,,,,
lua-resty-session,,,,,,
lua-rtoml,https://raw.githubusercontent.com/lblasc/lua-rtoml/main/lua-rtoml-0.2-0.rockspec,,,,,lblasc
lua-subprocess,https://raw.githubusercontent.com/0x0ade/lua-subprocess/master/subprocess-scm-1.rockspec,,,,5.1,scoder12
lua-rtoml,https://github.com/lblasc/lua-rtoml,,,,,lblasc
lua-subprocess,https://github.com/0x0ade/lua-subprocess,,,,5.1,scoder12
lua-term,,,,,,
lua-toml,,,,,,
lua-zlib,,,,,,koral
lua_cliargs,,,,,,
luabitop,https://raw.githubusercontent.com/teto/luabitop/master/luabitop-1.0.2-3.rockspec,,,,,
luabitop,https://github.com/teto/luabitop.git,,,,,
luacheck,,,,,,
luacov,,,,,,
luadbi,,,,,,
@ -77,18 +76,14 @@ lualdap,,,,,,aanderse
lualogging,,,,,,
luaossl,,,,,5.1,
luaposix,,,,34.1.1-1,,vyp lblasc
luaprompt,,,,,,Freed-Wu
luarepl,,,,,,
luarocks,,,,,,mrcjkb teto
luarocks-build-rust-mlua,,,,,,mrcjkb
luarocks-build-treesitter-parser,,,,,,mrcjkb
luasec,,,,,,flosse
luasnip,,,,,,
luasocket,,,,,,
luasql-sqlite3,,,,,,vyp
luassert,,,,,,
luasystem,,,,,,
luatext,,,,,,
luaunbound,,,,,,
luaunit,,,,,,lockejan
luautf8,,,,,,pstn
@ -98,7 +93,7 @@ lua-yajl,,,,,,pstn
lua-iconv,,,,7.0.0,,
luuid,,,,20120509-2,,
luv,,,,1.44.2-1,,
lush.nvim,,,https://luarocks.org/dev,,,teto
lush.nvim,https://github.com/rktjmp/lush.nvim,,,,,teto
lyaml,,,,,,lblasc
magick,,,,,5.1,donovanglover
markdown,,,,,,
@ -106,28 +101,27 @@ mediator_lua,,,,,,
middleclass,,,,,,
mimetypes,,,,,,
mpack,,,,,,
moonscript,https://raw.githubusercontent.com/leafo/moonscript/master/moonscript-dev-1.rockspec,,,,,arobyn
moonscript,https://github.com/leafo/moonscript.git,dev-1,,,,arobyn
neotest,,,,,,mrcjkb
nlua,,,,,,teto
nui.nvim,,,,,,mrcjkb
nvim-cmp,https://raw.githubusercontent.com/hrsh7th/nvim-cmp/main/nvim-cmp-scm-1.rockspec,,,,,
nvim-cmp,https://github.com/hrsh7th/nvim-cmp,,,,,
nvim-nio,,,,,,mrcjkb
pathlib.nvim,,,,,,
penlight,,,,,,alerque
plenary.nvim,https://raw.githubusercontent.com/nvim-lua/plenary.nvim/master/plenary.nvim-scm-1.rockspec,,,,5.1,
psl,,,,0.3,,
rapidjson,,,,,,
penlight,https://github.com/lunarmodules/Penlight.git,,,,,alerque
plenary.nvim,https://github.com/nvim-lua/plenary.nvim.git,,,,5.1,
rapidjson,https://github.com/xpol/lua-rapidjson.git,,,,,
rocks.nvim,,,,,5.1,teto mrcjkb
rest.nvim,,,,,5.1,teto
rocks.nvim,,,,,,mrcjkb
rocks-git.nvim,,,,,,mrcjkb
rocks-config.nvim,,,,,,mrcjkb
rocks-dev.nvim,,,,,,mrcjkb
rtp.nvim,,,,,,mrcjkb
rustaceanvim,,,,,,mrcjkb
say,,,,,,
say,https://github.com/Olivine-Labs/say.git,,,,,
serpent,,,,,,lockejan
sqlite,,,,,,
std._debug,,,,,,
std._debug,https://github.com/lua-stdlib/_debug.git,,,,,
std.normalize,,,,,,
stdlib,,,,41.2.2,,vyp
teal-language-server,,,http://luarocks.org/dev,,,
@ -137,7 +131,6 @@ tiktoken_core,,,,,,natsukium
tl,,,,,,mephistophiles
toml,,,,,,mrcjkb
toml-edit,,,,,5.1,mrcjkb
tree-sitter-norg,,,,,5.1,mrcjkb
vstruct,,,,,,
vstruct,https://github.com/ToxicFrog/vstruct.git,,,,,
vusted,,,,,,figsoda
xml2lua,,,,,,teto

1 name rockspec src ref server version luaversion maintainers
2 alt-getopt arobyn
ansicolors Freed-Wu
3 bit32 5.3.0-1 5.1 lblasc
4 argparse
5 basexx
6 binaryheap vcunat
7 busted
8 cassowary alerque marsam alerque
9 cldr alerque
10 compat53 vcunat
11 commons.nvim mrcjkb
12 cosmo marsam
13 coxpcall 1.17.0-1
14 cqueues vcunat
15 cyan
16 digestif https://github.com/astoff/digestif.git 5.3
17 dkjson
18 fennel misterio77
19 fidget.nvim mrcjkb
22 funnyfiles.nvim mrcjkb
23 fzf-lua mrcjkb
24 fzy mrcjkb
25 gitsigns.nvim https://raw.githubusercontent.com/lewis6991/gitsigns.nvim/main/gitsigns.nvim-scm-1.rockspec https://github.com/lewis6991/gitsigns.nvim.git 5.1
26 haskell-tools.nvim mrcjkb
27 http 0.3-0 vcunat
28 image.nvim teto
29 inspect
31 ldbus http://luarocks.org/dev
32 ldoc
33 lgi
34 linenoise https://raw.githubusercontent.com/hoelzro/lua-linenoise/master/linenoise-0.9-1.rockspec https://github.com/hoelzro/lua-linenoise.git
35 ljsyscall 5.1 lblasc
36 lmathx 5.3 alexshpilkin
37 lmpfrlib 5.3 alexshpilkin
54 lua-resty-openidc
55 lua-resty-openssl
56 lua-resty-session
57 lua-rtoml https://raw.githubusercontent.com/lblasc/lua-rtoml/main/lua-rtoml-0.2-0.rockspec https://github.com/lblasc/lua-rtoml lblasc
58 lua-subprocess https://raw.githubusercontent.com/0x0ade/lua-subprocess/master/subprocess-scm-1.rockspec https://github.com/0x0ade/lua-subprocess 5.1 scoder12
59 lua-term
60 lua-toml
61 lua-zlib koral
62 lua_cliargs
63 luabitop https://raw.githubusercontent.com/teto/luabitop/master/luabitop-1.0.2-3.rockspec https://github.com/teto/luabitop.git
64 luacheck
65 luacov
66 luadbi
76 lualogging
77 luaossl 5.1
78 luaposix 34.1.1-1 vyp lblasc
luaprompt Freed-Wu
79 luarepl
luarocks mrcjkb teto
80 luarocks-build-rust-mlua mrcjkb
luarocks-build-treesitter-parser mrcjkb
81 luasec flosse
82 luasnip
83 luasocket
84 luasql-sqlite3 vyp
85 luassert
86 luasystem
luatext
87 luaunbound
88 luaunit lockejan
89 luautf8 pstn
93 lua-iconv 7.0.0
94 luuid 20120509-2
95 luv 1.44.2-1
96 lush.nvim https://github.com/rktjmp/lush.nvim https://luarocks.org/dev teto
97 lyaml lblasc
98 magick 5.1 donovanglover
99 markdown
101 middleclass
102 mimetypes
103 mpack
104 moonscript https://raw.githubusercontent.com/leafo/moonscript/master/moonscript-dev-1.rockspec https://github.com/leafo/moonscript.git dev-1 arobyn
105 neotest mrcjkb
106 nlua teto
107 nui.nvim mrcjkb
108 nvim-cmp https://raw.githubusercontent.com/hrsh7th/nvim-cmp/main/nvim-cmp-scm-1.rockspec https://github.com/hrsh7th/nvim-cmp
109 nvim-nio mrcjkb
110 pathlib.nvim
111 penlight https://github.com/lunarmodules/Penlight.git alerque
112 plenary.nvim https://raw.githubusercontent.com/nvim-lua/plenary.nvim/master/plenary.nvim-scm-1.rockspec https://github.com/nvim-lua/plenary.nvim.git 5.1
113 psl rapidjson https://github.com/xpol/lua-rapidjson.git 0.3
114 rapidjson rocks.nvim 5.1 teto mrcjkb
115 rest.nvim 5.1 teto
116 rocks.nvim mrcjkb
117 rocks-git.nvim mrcjkb
118 rocks-config.nvim mrcjkb
119 rocks-dev.nvim mrcjkb
rtp.nvim mrcjkb
120 rustaceanvim mrcjkb
121 say https://github.com/Olivine-Labs/say.git
122 serpent lockejan
123 sqlite
124 std._debug https://github.com/lua-stdlib/_debug.git
125 std.normalize
126 stdlib 41.2.2 vyp
127 teal-language-server http://luarocks.org/dev
131 tl mephistophiles
132 toml mrcjkb
133 toml-edit 5.1 mrcjkb
134 tree-sitter-norg vstruct https://github.com/ToxicFrog/vstruct.git 5.1 mrcjkb
vstruct
135 vusted figsoda
136 xml2lua teto

View File

@ -1,22 +1,18 @@
{ stdenv, lib, makeWrapper, perl, perlPackages }:
stdenv.mkDerivation {
pname = "nixpkgs-lint";
version = "1";
name = "nixpkgs-lint-1";
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ perl perlPackages.XMLSimple ];
dontUnpack = true;
dontBuild = true;
buildPhase = "true";
installPhase =
''
mkdir -p $out/bin
cp ${./nixpkgs-lint.pl} $out/bin/nixpkgs-lint
# make the built version hermetic
substituteInPlace $out/bin/nixpkgs-lint \
--replace-fail "#! /usr/bin/env nix-shell" "#! ${lib.getExe perl}"
wrapProgram $out/bin/nixpkgs-lint --set PERL5LIB $PERL5LIB
'';

View File

@ -108,7 +108,7 @@ class Repo:
@property
def name(self):
return self.uri.strip("/").split("/")[-1]
return self.uri.split("/")[-1]
@property
def branch(self):

View File

@ -32,6 +32,7 @@ with lib.maintainers; {
acme = {
members = [
aanderse
andrew-d
arianvp
emily
flokli
@ -45,6 +46,7 @@ with lib.maintainers; {
bazel = {
members = [
mboes
marsam
uri-canva
cbley
olebedev
@ -97,6 +99,7 @@ with lib.maintainers; {
budgie = {
members = [
bobby285271
federicoschonborn
];
scope = "Maintain Budgie desktop environment";
shortName = "Budgie";
@ -236,6 +239,7 @@ with lib.maintainers; {
members = [
cole-h
grahamc
hoverbear
];
scope = "Group registration for packages maintained by Determinate Systems.";
shortName = "Determinate Systems employees";
@ -261,9 +265,8 @@ with lib.maintainers; {
};
docs = {
members = [ ];
githubTeams = [
"documentation-team"
members = [
ryantm
];
scope = "Maintain nixpkgs/NixOS documentation and tools for building it.";
shortName = "Docs";
@ -344,16 +347,6 @@ with lib.maintainers; {
shortName = "freedesktop.org packaging";
};
fslabs = {
# Verify additions to this team with at least one already existing member of the team.
members = [
greaka
lpostula
];
scope = "Group registration for packages maintained by Foresight Spatial Labs.";
shortName = "Foresight Spatial Labs employees";
};
gcc = {
members = [
synthetica
@ -387,6 +380,7 @@ with lib.maintainers; {
krav
talyz
yayayayaka
yuka
];
scope = "Maintain gitlab packages.";
shortName = "gitlab";
@ -414,6 +408,7 @@ with lib.maintainers; {
hedning
jtojnar
dasj19
amaxine
];
githubTeams = [
"gnome"
@ -428,6 +423,7 @@ with lib.maintainers; {
bandresen
hlolli
glittershark
babariviere
ericdallo
thiagokokada
];
@ -471,14 +467,6 @@ with lib.maintainers; {
shortName = "Home Assistant";
};
infisical = {
members = [
akhilmhdh
];
scope = "Maintain Infisical";
shortName = "Infisical";
};
iog = {
members = [
cleverca22
@ -543,6 +531,7 @@ with lib.maintainers; {
members = [
aanderse
edwtjo
MP2E
thiagokokada
];
scope = "Maintain Libretro, RetroArch and related packages.";
@ -688,7 +677,6 @@ with lib.maintainers; {
dandellion
sumnerevans
nickcao
teutat3s
];
scope = "Maintain the ecosystem around Matrix, a decentralized messenger.";
shortName = "Matrix";
@ -735,16 +723,6 @@ with lib.maintainers; {
enableFeatureFreezePing = true;
};
lix = {
members = [
raitobezarius
qyriad
];
scope = "Maintain the Lix package manager inside of Nixpkgs.";
shortName = "Lix ecosystem";
enableFeatureFreezePing = true;
};
module-system = {
members = [
infinisil
@ -758,6 +736,7 @@ with lib.maintainers; {
node = {
members = [
lilyinstarlight
marsam
winter
];
scope = "Maintain Node.js runtimes and build tooling.";
@ -823,7 +802,6 @@ with lib.maintainers; {
aanderse
drupol
ma27
patka
talyz
];
githubTeams = [
@ -856,6 +834,7 @@ with lib.maintainers; {
python = {
members = [
fridh
hexa
jonringer
tjni
@ -885,10 +864,8 @@ with lib.maintainers; {
r = {
members = [
b-rodrigues
bcdarwin
jbedo
kupac
];
scope = "Maintain the R programming language and related packages.";
shortName = "R";
@ -929,6 +906,7 @@ with lib.maintainers; {
ruby = {
members = [
marsam
];
scope = "Maintain the Ruby interpreter and related packages.";
shortName = "Ruby";
@ -962,12 +940,6 @@ with lib.maintainers; {
shortName = "SageMath";
};
sdl = {
members = [ ];
scope = "Maintain SDL libraries.";
shortName = "SDL";
};
sphinx = {
members = [ ];
scope = "Maintain Sphinx related packages.";

View File

@ -48,7 +48,7 @@ Reviewing process:
- Description, default and example should be provided.
- Ensure that option changes are backward compatible.
- `mkRenamedOptionModuleWith` provides a way to make renamed option backward compatible.
- Use `lib.versionAtLeast config.system.stateVersion "24.05"` on backward incompatible changes which may corrupt, change or update the state stored on existing setups.
- Use `lib.versionAtLeast config.system.stateVersion "23.11"` on backward incompatible changes which may corrupt, change or update the state stored on existing setups.
- Ensure that removed options are declared with `mkRemovedOptionModule`.
- Ensure that changes that are not backward compatible are mentioned in release notes.
- Ensure that documentations affected by the change is updated.

View File

@ -10,12 +10,14 @@ If you find yourself repeating yourself over and over, its time to abstract.
adminAddr = "alice@example.org";
forceSSL = true;
enableACME = true;
enablePHP = true;
};
"wiki.example.org" = {
documentRoot = "/webroot/wiki.example.org";
adminAddr = "alice@example.org";
forceSSL = true;
enableACME = true;
enablePHP = true;
};
};
}
@ -33,7 +35,7 @@ in
{
services.httpd.virtualHosts =
{ "blog.example.org" = (commonConfig // { documentRoot = "/webroot/blog.example.org"; });
"wiki.example.org" = (commonConfig // { documentRoot = "/webroot/wiki.example.org"; });
"wiki.example.org" = (commonConfig // { documentRoot = "/webroot/wiki.example.com"; });
};
}
```

View File

@ -6,5 +6,5 @@ graphical installation CD.
It sets [](#opt-services.xserver.enable),
[](#opt-services.displayManager.sddm.enable),
[](#opt-services.xserver.desktopManager.plasma5.enable),
and [](#opt-services.libinput.enable) to true. It also
and [](#opt-services.xserver.libinput.enable) to true. It also
includes glxinfo and firefox in the system packages list.

View File

@ -207,7 +207,7 @@ Latitude series) can be enabled as follows:
```nix
{
services.libinput.enable = true;
services.xserver.libinput.enable = true;
}
```
@ -216,7 +216,7 @@ For instance, the following disables tap-to-click behavior:
```nix
{
services.libinput.touchpad.tapping = false;
services.xserver.libinput.touchpad.tapping = false;
}
```

View File

@ -80,17 +80,17 @@ let
cp -r --no-preserve=all $inputs/* .
substituteInPlace ./manual.md \
--replace-fail '@NIXOS_VERSION@' "${version}"
--replace '@NIXOS_VERSION@' "${version}"
substituteInPlace ./configuration/configuration.md \
--replace-fail \
--replace \
'@MODULE_CHAPTERS@' \
${escapeShellArg (concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)}
substituteInPlace ./nixos-options.md \
--replace-fail \
--replace \
'@NIXOS_OPTIONS_JSON@' \
${optionsDoc.optionsJSON}/${common.outputPath}/options.json
substituteInPlace ./development/writing-nixos-tests.section.md \
--replace-fail \
--replace \
'@NIXOS_TEST_OPTIONS_JSON@' \
${testOptionsDoc.optionsJSON}/${common.outputPath}/options.json
sed -e '/@PYTHON_MACHINE_METHODS@/ {' -e 'r ${testDriverMachineDocstrings}/machine-methods.md' -e 'd' -e '}' \

View File

@ -1,17 +1,17 @@
# Bootspec {#sec-bootspec}
# Experimental feature: Bootspec {#sec-experimental-bootspec}
Bootspec is a feature introduced in [RFC-0125](https://github.com/NixOS/rfcs/pull/125) in order to standardize bootloader support and advanced boot workflows such as SecureBoot and potentially more.
The reference implementation can be found [here](https://github.com/NixOS/nixpkgs/pull/172237).
Bootspec is a experimental feature, introduced in the [RFC-0125 proposal](https://github.com/NixOS/rfcs/pull/125), the reference implementation can be found [there](https://github.com/NixOS/nixpkgs/pull/172237) in order to standardize bootloader support
and advanced boot workflows such as SecureBoot and potentially more.
The creation of bootspec documents is enabled by default.
You can enable the creation of bootspec documents through [`boot.bootspec.enable = true`](options.html#opt-boot.bootspec.enable), which will prompt a warning until [RFC-0125](https://github.com/NixOS/rfcs/pull/125) is officially merged.
## Schema {#sec-bootspec-schema}
## Schema {#sec-experimental-bootspec-schema}
The bootspec schema is versioned and validated against [a CUE schema file](https://cuelang.org/) which should considered as the source of truth for your applications.
You will find the current version [here](../../../modules/system/activation/bootspec.cue).
## Extensions mechanism {#sec-bootspec-extensions}
## Extensions mechanism {#sec-experimental-bootspec-extensions}
Bootspec cannot account for all usecases.
@ -29,9 +29,8 @@ An example for SecureBoot is to get the Nix store path to `/etc/os-release` in o
To reduce incompatibility and prevent names from clashing between applications, it is **highly recommended** to use a unique namespace for your extensions.
## External bootloaders {#sec-bootspec-external-bootloaders}
## External bootloaders {#sec-experimental-bootspec-external-bootloaders}
It is possible to enable your own bootloader through [`boot.loader.external.installHook`](options.html#opt-boot.loader.external.installHook) which can wrap an existing bootloader.
Currently, there is no good story to compose existing bootloaders to enrich their features, e.g. SecureBoot, etc.
It will be necessary to reimplement or reuse existing parts.
Currently, there is no good story to compose existing bootloaders to enrich their features, e.g. SecureBoot, etc. It will be necessary to reimplement or reuse existing parts.

View File

@ -173,7 +173,7 @@ lib.mkOption {
## Extensible Option Types {#sec-option-declarations-eot}
Extensible option types is a feature that allows to extend certain types
Extensible option types is a feature that allow to extend certain types
declaration through multiple module files. This feature only work with a
restricted set of types, namely `enum` and `submodules` and any composed
forms of them.

View File

@ -44,7 +44,7 @@ As an alternative, you can proxy the guest shell to a local TCP server by first
starting a TCP server in a terminal using the command:
```ShellSession
$ socat 'READLINE,PROMPT=$ ' tcp-listen:4444,reuseaddr
$ socat 'READLINE,PROMPT=$ ' tcp-listen:4444,reuseaddr`
```
In the terminal where the test driver is running, connect to this server by

View File

@ -146,27 +146,6 @@ have a predefined type and string generator already declared under
: Outputs the given attribute set as an Elixir map, instead of the
default Elixir keyword list
`pkgs.formats.php { finalVariable }` []{#pkgs-formats-php}
: A function taking an attribute set with values
`finalVariable`
: The variable that will store generated expression (usually `config`). If set to `null`, generated expression will contain `return`.
It returns a set with PHP-Config-specific attributes `type`, `lib`, and
`generate` as specified [below](#pkgs-formats-result).
The `lib` attribute contains functions to be used in settings, for
generating special PHP values:
`mkRaw phpCode`
: Outputs the given string as raw PHP code
`mkMixedArray list set`
: Creates PHP array that contains both indexed and associative values. For example, `lib.mkMixedArray [ "hello" "world" ] { "nix" = "is-great"; }` returns `['hello', 'world', 'nix' => 'is-great']`
[]{#pkgs-formats-result}
These functions all return an attribute set with these values:

View File

@ -42,11 +42,9 @@ The first steps to all these are the same:
will be safer to use the `nixos-*` channels instead:
```ShellSession
$ nix-channel --add https://nixos.org/channels/nixos-<version> nixpkgs
$ nix-channel --add https://nixos.org/channels/nixos-version nixpkgs
```
Where `<version>` corresponds to the latest version available on [channels.nixos.org](https://channels.nixos.org/).
You may want to throw in a `nix-channel --update` for good measure.
1. Install the NixOS installation tools:

View File

@ -3,8 +3,8 @@
Installing NixOS into a VirtualBox guest is convenient for users who
want to try NixOS without installing it on bare metal. If you want to
use a pre-made VirtualBox appliance, it is available at [the downloads
page](https://nixos.org/download/#nixos-virtualbox). If you want to set
up a VirtualBox guest manually, follow these instructions:
page](https://nixos.org/nixos/download.html). If you want to set up a
VirtualBox guest manually, follow these instructions:
1. Add a New Machine in VirtualBox with OS Type "Linux / Other Linux"

View File

@ -6,7 +6,7 @@ expressions and associated binaries. The NixOS channels are updated
automatically from NixOS's Git repository after certain tests have
passed and all packages have been built. These channels are:
- *Stable channels*, such as [`nixos-24.05`](https://channels.nixos.org/nixos-24.05).
- *Stable channels*, such as [`nixos-23.11`](https://channels.nixos.org/nixos-23.11).
These only get conservative bug fixes and package upgrades. For
instance, a channel update may cause the Linux kernel on your system
to be upgraded from 4.19.34 to 4.19.38 (a minor bug fix), but not
@ -19,7 +19,7 @@ passed and all packages have been built. These channels are:
radical changes between channel updates. It's not recommended for
production systems.
- *Small channels*, such as [`nixos-24.05-small`](https://channels.nixos.org/nixos-24.05-small)
- *Small channels*, such as [`nixos-23.11-small`](https://channels.nixos.org/nixos-23.11-small)
or [`nixos-unstable-small`](https://channels.nixos.org/nixos-unstable-small).
These are identical to the stable and unstable channels described above,
except that they contain fewer binary packages. This means they get updated
@ -33,13 +33,13 @@ To see what channels are available, go to <https://channels.nixos.org>.
contains the channel's latest version and includes ISO images and
VirtualBox appliances.) Please note that during the release process,
channels that are not yet released will be present here as well. See the
Getting NixOS page <https://nixos.org/download/> to find the newest
supported stable release.
Getting NixOS page <https://nixos.org/nixos/download.html> to find the
newest supported stable release.
When you first install NixOS, you're automatically subscribed to the
NixOS channel that corresponds to your installation source. For
instance, if you installed from a 24.05 ISO, you will be subscribed to
the `nixos-24.05` channel. To see which NixOS channel you're subscribed
instance, if you installed from a 23.11 ISO, you will be subscribed to
the `nixos-23.11` channel. To see which NixOS channel you're subscribed
to, run the following as root:
```ShellSession
@ -54,16 +54,16 @@ To switch to a different NixOS channel, do
```
(Be sure to include the `nixos` parameter at the end.) For instance, to
use the NixOS 24.05 stable channel:
use the NixOS 23.11 stable channel:
```ShellSession
# nix-channel --add https://channels.nixos.org/nixos-24.05 nixos
# nix-channel --add https://channels.nixos.org/nixos-23.11 nixos
```
If you have a server, you may want to use the "small" channel instead:
```ShellSession
# nix-channel --add https://channels.nixos.org/nixos-24.05-small nixos
# nix-channel --add https://channels.nixos.org/nixos-23.11-small nixos
```
And if you want to live on the bleeding edge:
@ -117,6 +117,6 @@ modules. You can also specify a channel explicitly, e.g.
```nix
{
system.autoUpgrade.channel = "https://channels.nixos.org/nixos-24.05";
system.autoUpgrade.channel = "https://channels.nixos.org/nixos-23.11";
}
```

View File

@ -3,7 +3,6 @@
This section lists the release notes for each stable version of NixOS and current unstable revision.
```{=include=} sections
rl-2411.section.md
rl-2405.section.md
rl-2311.section.md
rl-2305.section.md

View File

@ -146,7 +146,7 @@ In addition to numerous new and upgraded packages, this release has the followin
- [touchegg](https://github.com/JoseExposito/touchegg), a multi-touch gesture recognizer. Available as [services.touchegg](#opt-services.touchegg.enable).
- [pantheon-tweaks](https://github.com/pantheon-tweaks/pantheon-tweaks), an unofficial system settings panel for Pantheon. Available as `programs.pantheon-tweaks`.
- [pantheon-tweaks](https://github.com/pantheon-tweaks/pantheon-tweaks), an unofficial system settings panel for Pantheon. Available as [programs.pantheon-tweaks](#opt-programs.pantheon-tweaks.enable).
- [joycond](https://github.com/DanielOgorchock/joycond), a service that uses `hid-nintendo` to provide nintendo joycond pairing and better nintendo switch pro controller support.

View File

@ -366,7 +366,7 @@ In addition to numerous new and upgraded packages, this release includes the fol
__Note:__ secrets from these files will be leaked into the store unless you use a
[**file**-provider or env-var](https://grafana.com/docs/grafana/latest/setup-grafana/configure-grafana/#file-provider) for secrets!
- `services.grafana.provision.notifiers` is not affected by this change because
- [services.grafana.provision.notifiers](#opt-services.grafana.provision.notifiers) is not affected by this change because
this feature is deprecated by Grafana and will probably be removed in Grafana 10.
It's recommended to use `services.grafana.provision.alerting.contactPoints` instead.

File diff suppressed because it is too large Load Diff

View File

@ -1,80 +0,0 @@
# Release 24.11 (“Vicuña”, 2024.11/??) {#sec-release-24.11}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
## Highlights {#sec-release-24.11-highlights}
- [AMDVLK](https://github.com/GPUOpen-Drivers/AMDVLK), AMD's open source Vulkan driver, is now available to be configured as `hardware.amdgpu.amdvlk` option.
This also allows configuring runtime settings of AMDVLK and enabling experimental features.
## New Services {#sec-release-24.11-new-services}
- [Open-WebUI](https://github.com/open-webui/open-webui), a user-friendly WebUI
for LLMs. Available as [services.open-webui](#opt-services.open-webui.enable)
service.
- [Quickwit](https://quickwit.io), sub-second search & analytics engine on cloud storage. Available as [services.quickwit](options.html#opt-services.quickwit).
- [Renovate](https://github.com/renovatebot/renovate), a dependency updating tool for various git forges and language ecosystems. Available as [services.renovate](#opt-services.renovate.enable).
## Backward Incompatibilities {#sec-release-24.11-incompatibilities}
- `nginx` package no longer includes `gd` and `geoip` dependencies. For enabling it, override `nginx` package with the optionals `withImageFilter` and `withGeoIP`.
- `openssh` and `openssh_hpn` are now compiled without Kerberos 5 / GSSAPI support in an effort to reduce the attack surface of the components for the majority of users. Users needing this support can
use the new `opensshWithKerberos` and `openssh_hpnWithKerberos` flavors (e.g. `programs.ssh.package = pkgs.openssh_gssapi`).
- `nvimpager` was updated to version 0.13.0, which changes the order of user and
nvimpager settings: user commands in `-c` and `--cmd` now override the
respective default settings because they are executed later.
- `services.forgejo.mailerPasswordFile` has been deprecated by the drop-in replacement `services.forgejo.secrets.mailer.PASSWD`,
which is part of the new free-form `services.forgejo.secrets` option.
`services.forgejo.secrets` is a small wrapper over systemd's `LoadCredential=`. It has the same structure (sections/keys) as
`services.forgejo.settings` but takes file paths that will be read before service startup instead of some plaintext value.
- `services.ddclient.use` has been deprecated: `ddclient` now supports separate IPv4 and IPv6 configuration. Use `services.ddclient.usev4` and `services.ddclient.usev6` instead.
- The Invoiceplane module now only accepts the structured `settings` option.
`extraConfig` is now removed.
- Legacy package `stalwart-mail_0_6` was dropped, please note the
[manual upgrade process](https://github.com/stalwartlabs/mail-server/blob/main/UPGRADING.md)
before changing the package to `pkgs.stalwart-mail` in
[`services.stalwart-mail.package`](#opt-services.stalwart-mail.package).
- `haskell.lib.compose.justStaticExecutables` now disallows references to GHC in the
output by default, to alert users to closure size issues caused by
[#164630](https://github.com/NixOS/nixpkgs/issues/164630). See ["Packaging
Helpers" in the Haskell section of the Nixpkgs
manual](https://nixos.org/manual/nixpkgs/unstable/#haskell-packaging-helpers)
for information on working around `output '...' is not allowed to refer to
the following paths` errors caused by this change.
- The `stalwart-mail` module now uses RocksDB as the default storage backend
for `stateVersion` ≥ 24.11. (It was previously using SQLite for structured
data and the filesystem for blobs).
- `zx` was updated to v8, which introduces several breaking changes.
See the [v8 changelog](https://github.com/google/zx/releases/tag/8.0.0) for more information.
- The `portunus` package and service do not support weak password hashes anymore.
If you installed Portunus on NixOS 23.11 or earlier, upgrade to NixOS 24.05 first to get support for strong password hashing.
Then, follow the instructions on the [upstream release notes](https://github.com/majewsky/portunus/releases/tag/v2.0.0) to upgrade all existing user accounts to strong password hashes.
If you need to upgrade to 24.11 without having completed the migration, consider the security implications of weak password hashes on your user accounts, and add the following to your configuration:
```nix
services.portunus.package = pkgs.portunus.override { libxcrypt = pkgs.libxcrypt-legacy; };
services.portunus.ldap.package = pkgs.openldap.override { libxcrypt = pkgs.libxcrypt-legacy; };
```
## Other Notable Changes {#sec-release-24.11-notable-changes}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
- `hareHook` has been added as the language framework for Hare. From now on, it,
not the `hare` package, should be added to `nativeBuildInputs` when building
Hare programs.
- To facilitate dependency injection, the `imgui` package now builds a static archive using vcpkg' CMake rules.
The derivation now installs "impl" headers selectively instead of by a wildcard.
Use `imgui.src` if you just want to access the unpacked sources.

View File

@ -35,8 +35,6 @@ rec {
aarch64-linux = "${qemuPkg}/bin/qemu-system-aarch64 -machine virt,gic-version=max,accel=kvm:tcg -cpu max";
powerpc64le-linux = "${qemuPkg}/bin/qemu-system-ppc64 -machine powernv";
powerpc64-linux = "${qemuPkg}/bin/qemu-system-ppc64 -machine powernv";
riscv32-linux = "${qemuPkg}/bin/qemu-system-riscv32 -machine virt";
riscv64-linux = "${qemuPkg}/bin/qemu-system-riscv64 -machine virt";
x86_64-darwin = "${qemuPkg}/bin/qemu-kvm -cpu max";
};
otherHostGuestMatrix = {

View File

@ -1,4 +1,4 @@
{ config, lib, pkgs, utils }:
{ config, lib, pkgs }:
let
inherit (lib)
@ -18,7 +18,6 @@ let
flip
head
isInt
isFloat
isList
isPath
length
@ -153,7 +152,7 @@ in rec {
"Systemd ${group} field `${name}' is outside the range [${toString min},${toString max}]";
assertRangeOrOneOf = name: min: max: values: group: attr:
optional (attr ? ${name} && !(((isInt attr.${name} || isFloat attr.${name}) && min <= attr.${name} && max >= attr.${name}) || elem attr.${name} values))
optional (attr ? ${name} && !((min <= attr.${name} && max >= attr.${name}) || elem attr.${name} values))
"Systemd ${group} field `${name}' is not a value in range [${toString min},${toString max}], or one of ${toString values}";
assertMinimum = name: min: group: attr:
@ -182,30 +181,6 @@ in rec {
in if errors == [] then true
else trace (concatStringsSep "\n" errors) false;
checkUnitConfigWithLegacyKey = legacyKey: group: checks: attrs:
let
dump = lib.generators.toPretty { }
(lib.generators.withRecursion { depthLimit = 2; throwOnDepthLimit = false; } attrs);
attrs' =
if legacyKey == null
then attrs
else if ! attrs?${legacyKey}
then attrs
else if removeAttrs attrs [ legacyKey ] == {}
then attrs.${legacyKey}
else throw ''
The declaration
${dump}
must not mix unit options with the legacy key '${legacyKey}'.
This can be fixed by moving all settings from within ${legacyKey}
one level up.
'';
in
checkUnitConfig group checks attrs';
toOption = x:
if x == true then "true"
else if x == false then "false"
@ -421,41 +396,8 @@ in rec {
};
};
serviceConfig = { name, config, ... }: {
config = {
name = "${name}.service";
environment.PATH = mkIf (config.path != []) "${makeBinPath config.path}:${makeSearchPathOutput "bin" "sbin" config.path}";
};
};
pathConfig = { name, config, ... }: {
config = {
name = "${name}.path";
};
};
socketConfig = { name, config, ... }: {
config = {
name = "${name}.socket";
};
};
sliceConfig = { name, config, ... }: {
config = {
name = "${name}.slice";
};
};
targetConfig = { name, config, ... }: {
config = {
name = "${name}.target";
};
};
timerConfig = { name, config, ... }: {
config = {
name = "${name}.timer";
};
serviceConfig = { config, ... }: {
config.environment.PATH = mkIf (config.path != []) "${makeBinPath config.path}:${makeSearchPathOutput "bin" "sbin" config.path}";
};
stage2ServiceConfig = {
@ -474,7 +416,6 @@ in rec {
mountConfig = { config, ... }: {
config = {
name = "${utils.escapeSystemdPath config.where}.mount";
mountConfig =
{ What = config.what;
Where = config.where;
@ -488,7 +429,6 @@ in rec {
automountConfig = { config, ... }: {
config = {
name = "${utils.escapeSystemdPath config.where}.automount";
automountConfig =
{ Where = config.where;
};
@ -504,8 +444,8 @@ in rec {
WantedBy=${concatStringsSep " " def.wantedBy}
'';
targetToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
targetToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text =
''
[Unit]
@ -513,8 +453,8 @@ in rec {
'';
};
serviceToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
serviceToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text = commonUnitText def (''
[Service]
'' + (let env = cfg.globalEnvironment // def.environment;
@ -523,7 +463,7 @@ in rec {
"Environment=${toJSON "${n}=${env.${n}}"}\n";
# systemd max line length is now 1MiB
# https://github.com/systemd/systemd/commit/e6dde451a51dc5aaa7f4d98d39b8fe735f73d2af
in if stringLength s >= 1048576 then throw "The value of the environment variable ${n} in systemd service ${def.name}.service is too long." else s) (attrNames env))
in if stringLength s >= 1048576 then throw "The value of the environment variable ${n} in systemd service ${name}.service is too long." else s) (attrNames env))
+ (if def ? reloadIfChanged && def.reloadIfChanged then ''
X-ReloadIfChanged=true
'' else if (def ? restartIfChanged && !def.restartIfChanged) then ''
@ -534,8 +474,8 @@ in rec {
'' + attrsToSection def.serviceConfig);
};
socketToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
socketToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text = commonUnitText def ''
[Socket]
${attrsToSection def.socketConfig}
@ -544,40 +484,40 @@ in rec {
'';
};
timerToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
timerToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text = commonUnitText def ''
[Timer]
${attrsToSection def.timerConfig}
'';
};
pathToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
pathToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text = commonUnitText def ''
[Path]
${attrsToSection def.pathConfig}
'';
};
mountToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
mountToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text = commonUnitText def ''
[Mount]
${attrsToSection def.mountConfig}
'';
};
automountToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
automountToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text = commonUnitText def ''
[Automount]
${attrsToSection def.automountConfig}
'';
};
sliceToUnit = def:
{ inherit (def) name aliases wantedBy requiredBy upheldBy enable overrideStrategy;
sliceToUnit = name: def:
{ inherit (def) aliases wantedBy requiredBy upheldBy enable overrideStrategy;
text = commonUnitText def ''
[Slice]
${attrsToSection def.sliceConfig}

View File

@ -63,13 +63,13 @@ in {
${attrsToSection def.l2tpConfig}
'' + flip concatMapStrings def.l2tpSessions (x: ''
[L2TPSession]
${attrsToSection x}
${attrsToSection x.l2tpSessionConfig}
'') + optionalString (def.wireguardConfig != { }) ''
[WireGuard]
${attrsToSection def.wireguardConfig}
'' + flip concatMapStrings def.wireguardPeers (x: ''
[WireGuardPeer]
${attrsToSection x}
${attrsToSection x.wireguardPeerConfig}
'') + optionalString (def.bondConfig != { }) ''
[Bond]
${attrsToSection def.bondConfig}
@ -122,13 +122,13 @@ in {
${concatStringsSep "\n" (map (s: "Xfrm=${s}") def.xfrm)}
'' + "\n" + flip concatMapStrings def.addresses (x: ''
[Address]
${attrsToSection x}
${attrsToSection x.addressConfig}
'') + flip concatMapStrings def.routingPolicyRules (x: ''
[RoutingPolicyRule]
${attrsToSection x}
${attrsToSection x.routingPolicyRuleConfig}
'') + flip concatMapStrings def.routes (x: ''
[Route]
${attrsToSection x}
${attrsToSection x.routeConfig}
'') + optionalString (def.dhcpV4Config != { }) ''
[DHCPv4]
${attrsToSection def.dhcpV4Config}
@ -147,27 +147,24 @@ in {
'' + optionalString (def.ipv6SendRAConfig != { }) ''
[IPv6SendRA]
${attrsToSection def.ipv6SendRAConfig}
'' + flip concatMapStrings def.ipv6PREF64Prefixes (x: ''
[IPv6PREF64Prefix]
${attrsToSection x}
'') + flip concatMapStrings def.ipv6Prefixes (x: ''
'' + flip concatMapStrings def.ipv6Prefixes (x: ''
[IPv6Prefix]
${attrsToSection x}
${attrsToSection x.ipv6PrefixConfig}
'') + flip concatMapStrings def.ipv6RoutePrefixes (x: ''
[IPv6RoutePrefix]
${attrsToSection x}
${attrsToSection x.ipv6RoutePrefixConfig}
'') + flip concatMapStrings def.dhcpServerStaticLeases (x: ''
[DHCPServerStaticLease]
${attrsToSection x}
${attrsToSection x.dhcpServerStaticLeaseConfig}
'') + optionalString (def.bridgeConfig != { }) ''
[Bridge]
${attrsToSection def.bridgeConfig}
'' + flip concatMapStrings def.bridgeFDBs (x: ''
[BridgeFDB]
${attrsToSection x}
${attrsToSection x.bridgeFDBConfig}
'') + flip concatMapStrings def.bridgeMDBs (x: ''
[BridgeMDB]
${attrsToSection x}
${attrsToSection x.bridgeMDBConfig}
'') + optionalString (def.lldpConfig != { }) ''
[LLDP]
${attrsToSection def.lldpConfig}
@ -254,7 +251,7 @@ in {
${attrsToSection def.quickFairQueueingConfigClass}
'' + flip concatMapStrings def.bridgeVLANs (x: ''
[BridgeVLAN]
${attrsToSection x}
${attrsToSection x.bridgeVLANConfig}
'') + def.extraConfig;
}

View File

@ -5,13 +5,8 @@ let
automountConfig
makeUnit
mountConfig
pathConfig
sliceConfig
socketConfig
stage1ServiceConfig
stage2ServiceConfig
targetConfig
timerConfig
unitConfig
;
@ -53,32 +48,29 @@ let
;
in
{
rec {
units = attrsOf (submodule ({ name, config, ... }: {
options = concreteUnitOptions;
config = {
name = mkDefault name;
unit = mkDefault (makeUnit name config);
};
config = { unit = mkDefault (makeUnit name config); };
}));
services = attrsOf (submodule [ stage2ServiceOptions unitConfig stage2ServiceConfig ]);
initrdServices = attrsOf (submodule [ stage1ServiceOptions unitConfig stage1ServiceConfig ]);
targets = attrsOf (submodule [ stage2CommonUnitOptions unitConfig targetConfig ]);
initrdTargets = attrsOf (submodule [ stage1CommonUnitOptions unitConfig targetConfig ]);
targets = attrsOf (submodule [ stage2CommonUnitOptions unitConfig ]);
initrdTargets = attrsOf (submodule [ stage1CommonUnitOptions unitConfig ]);
sockets = attrsOf (submodule [ stage2SocketOptions unitConfig socketConfig]);
initrdSockets = attrsOf (submodule [ stage1SocketOptions unitConfig socketConfig ]);
sockets = attrsOf (submodule [ stage2SocketOptions unitConfig ]);
initrdSockets = attrsOf (submodule [ stage1SocketOptions unitConfig ]);
timers = attrsOf (submodule [ stage2TimerOptions unitConfig timerConfig ]);
initrdTimers = attrsOf (submodule [ stage1TimerOptions unitConfig timerConfig ]);
timers = attrsOf (submodule [ stage2TimerOptions unitConfig ]);
initrdTimers = attrsOf (submodule [ stage1TimerOptions unitConfig ]);
paths = attrsOf (submodule [ stage2PathOptions unitConfig pathConfig ]);
initrdPaths = attrsOf (submodule [ stage1PathOptions unitConfig pathConfig ]);
paths = attrsOf (submodule [ stage2PathOptions unitConfig ]);
initrdPaths = attrsOf (submodule [ stage1PathOptions unitConfig ]);
slices = attrsOf (submodule [ stage2SliceOptions unitConfig sliceConfig ]);
initrdSlices = attrsOf (submodule [ stage1SliceOptions unitConfig sliceConfig ]);
slices = attrsOf (submodule [ stage2SliceOptions unitConfig ]);
initrdSlices = attrsOf (submodule [ stage1SliceOptions unitConfig ]);
mounts = listOf (submodule [ stage2MountOptions unitConfig mountConfig ]);
initrdMounts = listOf (submodule [ stage1MountOptions unitConfig mountConfig ]);

View File

@ -65,14 +65,6 @@ in rec {
'';
};
name = lib.mkOption {
type = lib.types.str;
description = ''
The name of this systemd unit, including its extension.
This can be used to refer to this unit from other systemd units.
'';
};
overrideStrategy = mkOption {
default = "asDropinIfExists";
type = types.enum [ "asDropinIfExists" "asDropin" ];

View File

@ -24,7 +24,6 @@ python3Packages.buildPythonApplication {
coreutils
netpbm
python3Packages.colorama
python3Packages.junit-xml
python3Packages.ptpython
qemu_pkg
socat
@ -47,7 +46,7 @@ python3Packages.buildPythonApplication {
echo -e "\x1b[32m## run mypy\x1b[0m"
mypy test_driver extract-docstrings.py
echo -e "\x1b[32m## run ruff\x1b[0m"
ruff check .
ruff .
echo -e "\x1b[32m## run black\x1b[0m"
black --check --diff .
'';

Some files were not shown because too many files have changed in this diff Show More