Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2023-11-11 00:02:47 +00:00 committed by GitHub
commit 5f5513224f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
108 changed files with 2081 additions and 1144 deletions

View File

@ -285,11 +285,11 @@ specifying an interpreter version), like this:
```nix ```nix
{ lib { lib
, python3 , python3Packages
, fetchPypi , fetchPypi
}: }:
python3.pkgs.buildPythonApplication rec { python3Packages.buildPythonApplication rec {
pname = "luigi"; pname = "luigi";
version = "2.7.9"; version = "2.7.9";
pyproject = true; pyproject = true;
@ -300,13 +300,13 @@ python3.pkgs.buildPythonApplication rec {
}; };
nativeBuildInputs = [ nativeBuildInputs = [
python3.pkgs.setuptools python3Packages.setuptools
python3.pkgs.wheel python3Packages.wheel
]; ];
propagatedBuildInputs = with python3.pkgs; [ propagatedBuildInputs = [
tornado python3Packages.tornado
python-daemon python3Packages.python-daemon
]; ];
meta = with lib; { meta = with lib; {

View File

@ -244,5 +244,4 @@ Here's a list of places in the library that need to be updated in the future:
- > The file set library is currently somewhat limited but is being expanded to include more functions over time. - > The file set library is currently somewhat limited but is being expanded to include more functions over time.
in [the manual](../../doc/functions/fileset.section.md) in [the manual](../../doc/functions/fileset.section.md)
- If/Once a function to convert `lib.sources` values into file sets exists, the `_coerce` and `toSource` functions should be updated to mention that function in the error when such a value is passed
- If/Once a function exists that can optionally include a path depending on whether it exists, the error message for the path not existing in `_coerce` should mention the new function - If/Once a function exists that can optionally include a path depending on whether it exists, the error message for the path not existing in `_coerce` should mention the new function

View File

@ -3,8 +3,10 @@ let
inherit (import ./internal.nix { inherit lib; }) inherit (import ./internal.nix { inherit lib; })
_coerce _coerce
_singleton
_coerceMany _coerceMany
_toSourceFilter _toSourceFilter
_fromSourceFilter
_unionMany _unionMany
_fileFilter _fileFilter
_printFileset _printFileset
@ -152,7 +154,12 @@ in {
sourceFilter = _toSourceFilter fileset; sourceFilter = _toSourceFilter fileset;
in in
if ! isPath root then if ! isPath root then
if isStringLike root then if root ? _isLibCleanSourceWith then
throw ''
lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
Note that this only works for sources created from paths.''
else if isStringLike root then
throw '' throw ''
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead. lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'' Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
@ -188,6 +195,75 @@ in {
filter = sourceFilter; filter = sourceFilter;
}; };
/*
Create a file set with the same files as a `lib.sources`-based value.
This does not import any of the files into the store.
This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
:::{.note}
File sets cannot represent empty directories.
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
:::
Type:
fromSource :: SourceLike -> FileSet
Example:
# There's no cleanSource-like function for file sets yet,
# but we can just convert cleanSource to a file set and use it that way
toSource {
root = ./.;
fileset = fromSource (lib.sources.cleanSource ./.);
}
# Keeping a previous sourceByRegex (which could be migrated to `lib.fileset.unions`),
# but removing a subdirectory using file set functions
difference
(fromSource (lib.sources.sourceByRegex ./. [
"^README\.md$"
# This regex includes everything in ./doc
"^doc(/.*)?$"
])
./doc/generated
# Use cleanSource, but limit it to only include ./Makefile and files under ./src
intersection
(fromSource (lib.sources.cleanSource ./.))
(unions [
./Makefile
./src
]);
*/
fromSource = source:
let
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
# which are technically internal to lib.sources,
# but we'll allow this since both libraries are in the same code base
# and this function is a bridge between them.
isFiltered = source ? _isLibCleanSourceWith;
path = if isFiltered then source.origSrc else source;
in
# We can only support sources created from paths
if ! isPath path then
if isStringLike path then
throw ''
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
else
throw ''
lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''
lib.fileset.fromSource: The source origin (${toString path}) of the argument does not exist.''
else if isFiltered then
_fromSourceFilter path source.filter
else
# If there's no filter, no need to run the expensive conversion, all subpaths will be included
_singleton path;
/* /*
The file set containing all files that are in either of two given file sets. The file set containing all files that are in either of two given file sets.
This is the same as [`unions`](#function-library-lib.fileset.unions), This is the same as [`unions`](#function-library-lib.fileset.unions),

View File

@ -167,7 +167,12 @@ rec {
else else
value value
else if ! isPath value then else if ! isPath value then
if isStringLike value then if value ? _isLibCleanSourceWith then
throw ''
${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
Note that this only works for sources created from paths.''
else if isStringLike value then
throw '' throw ''
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead. ${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'' Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
@ -470,6 +475,59 @@ rec {
else else
nonEmpty; nonEmpty;
# Turn a builtins.filterSource-based source filter on a root path into a file set
# containing only files included by the filter.
# The filter is lazily called as necessary to determine whether paths are included
# Type: Path -> (String -> String -> Bool) -> fileset
_fromSourceFilter = root: sourceFilter:
let
# During the recursion we need to track both:
# - The path value such that we can safely call `readDir` on it
# - The path string value such that we can correctly call the `filter` with it
#
# While we could just recurse with the path value,
# this would then require converting it to a path string for every path,
# which is a fairly expensive operation
# Create a file set from a directory entry
fromDirEntry = path: pathString: type:
# The filter needs to run on the path as a string
if ! sourceFilter pathString type then
null
else if type == "directory" then
fromDir path pathString
else
type;
# Create a file set from a directory
fromDir = path: pathString:
mapAttrs
# This looks a bit funny, but we need both the path-based and the path string-based values
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
# We need to readDir on the path value, because reading on a path string
# would be unspecified if there are multiple filesystem roots
(readDir path);
rootPathType = pathType root;
# We need to convert the path to a string to imitate what builtins.path calls the filter function with.
# We don't want to rely on `toString` for this though because it's not very well defined, see ../path/README.md
# So instead we use `lib.path.splitRoot` to safely deconstruct the path into its filesystem root and subpath
# We don't need the filesystem root though, builtins.path doesn't expose that in any way to the filter.
# So we only need the components, which we then turn into a string as one would expect.
rootString = "/" + concatStringsSep "/" (components (splitRoot root).subpath);
in
if rootPathType == "directory" then
# We imitate builtins.path not calling the filter on the root path
_create root (fromDir root rootString)
else
# Direct files are always included by builtins.path without calling the filter
# But we need to lift up the base path to its parent to satisfy the base path invariant
_create (dirOf root)
{
${baseNameOf root} = rootPathType;
};
# Transforms the filesetTree of a file set to a shorter base path, e.g. # Transforms the filesetTree of a file set to a shorter base path, e.g.
# _shortenTreeBase [ "foo" ] (_create /foo/bar null) # _shortenTreeBase [ "foo" ] (_create /foo/bar null)
# => { bar = null; } # => { bar = null; }

View File

@ -1,5 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# shellcheck disable=SC2016 # shellcheck disable=SC2016
# shellcheck disable=SC2317
# shellcheck disable=SC2192
# Tests lib.fileset # Tests lib.fileset
# Run: # Run:
@ -224,23 +226,17 @@ withFileMonitor() {
fi fi
} }
# Check whether a file set includes/excludes declared paths as expected, usage:
# Create the tree structure declared in the tree variable, usage:
# #
# tree=( # tree=(
# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path # [a/b] = # Declare that file a/b should exist
# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path # [c/a] = # Declare that file c/a should exist
# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path # [c/d/]= # Declare that directory c/d/ should exist
# ) # )
# checkFileset './a' # Pass the fileset as the argument # createTree
declare -A tree declare -A tree
checkFileset() { createTree() {
# New subshell so that we can have a separate trap handler, see `trap` below
local fileset=$1
# Process the tree into separate arrays for included paths, excluded paths and excluded files.
local -a included=()
local -a excluded=()
local -a excludedFiles=()
# Track which paths need to be created # Track which paths need to be created
local -a dirsToCreate=() local -a dirsToCreate=()
local -a filesToCreate=() local -a filesToCreate=()
@ -248,24 +244,9 @@ checkFileset() {
# If keys end with a `/` we treat them as directories, otherwise files # If keys end with a `/` we treat them as directories, otherwise files
if [[ "$p" =~ /$ ]]; then if [[ "$p" =~ /$ ]]; then
dirsToCreate+=("$p") dirsToCreate+=("$p")
isFile=
else else
filesToCreate+=("$p") filesToCreate+=("$p")
isFile=1
fi fi
case "${tree[$p]}" in
1)
included+=("$p")
;;
0)
excluded+=("$p")
if [[ -n "$isFile" ]]; then
excludedFiles+=("$p")
fi
;;
*)
die "Unsupported tree value: ${tree[$p]}"
esac
done done
# Create all the necessary paths. # Create all the necessary paths.
@ -280,6 +261,43 @@ checkFileset() {
mkdir -p "${parentsToCreate[@]}" mkdir -p "${parentsToCreate[@]}"
touch "${filesToCreate[@]}" touch "${filesToCreate[@]}"
fi fi
}
# Check whether a file set includes/excludes declared paths as expected, usage:
#
# tree=(
# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
# )
# checkFileset './a' # Pass the fileset as the argument
checkFileset() {
# New subshell so that we can have a separate trap handler, see `trap` below
local fileset=$1
# Create the tree
createTree
# Process the tree into separate arrays for included paths, excluded paths and excluded files.
local -a included=()
local -a excluded=()
local -a excludedFiles=()
for p in "${!tree[@]}"; do
case "${tree[$p]}" in
1)
included+=("$p")
;;
0)
excluded+=("$p")
# If keys end with a `/` we treat them as directories, otherwise files
if [[ ! "$p" =~ /$ ]]; then
excludedFiles+=("$p")
fi
;;
*)
die "Unsupported tree value: ${tree[$p]}"
esac
done
expression="toSource { root = ./.; fileset = $fileset; }" expression="toSource { root = ./.; fileset = $fileset; }"
@ -321,6 +339,10 @@ checkFileset() {
expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead. expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
\s*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.' \s*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
\s*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
\s*Note that this only works for sources created from paths.'
# Only paths are accepted as `root` # Only paths are accepted as `root`
expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.' expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
@ -365,6 +387,9 @@ rm -rf -- *
expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.' expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead. expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
\s*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.' \s*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
\s*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
\s*Note that this only works for sources created from paths.'
# Path coercion errors for non-existent paths # Path coercion errors for non-existent paths
expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.' expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.'
@ -995,6 +1020,217 @@ touch 0 "${filesToCreate[@]}"
expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace" expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
rm -rf -- * rm -rf -- *
## lib.fileset.fromSource
# Check error messages
expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
\s*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
expectFailure 'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
# fromSource on a path works and is the same as coercing that path
mkdir a
touch a/b c
expectEqual 'trace (fromSource ./.) null' 'trace ./. null'
rm -rf -- *
# Check that converting to a file set doesn't read the included files
mkdir a
touch a/b
run() {
expectEqual "trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
rm a/b
}
withFileMonitor run a/b
rm -rf -- *
# Check that converting to a file set doesn't read entries for directories that are filtered out
mkdir -p a/b
touch a/b/c
run() {
expectEqual "trace (fromSource (lib.cleanSourceWith {
src = ./a;
filter = pathString: type: false;
})) null" "builtins.trace \"(empty)\" null"
rm a/b/c
rmdir a/b
}
withFileMonitor run a/b
rm -rf -- *
# The filter is not needed on empty directories
expectEqual 'trace (fromSource (lib.cleanSourceWith {
src = ./.;
filter = abort "filter should not be needed";
})) null' 'trace _emptyWithoutBase null'
# Single files also work
touch a b
expectEqual 'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
rm -rf -- *
# For a tree assigning each subpath true/false,
# check whether a source filter with those results includes the same files
# as a file set created using fromSource. Usage:
#
# tree=(
# [a]=1 # ./a is a file and the filter should return true for it
# [b/]=0 # ./b is a directory and the filter should return false for it
# )
# checkSource
checkSource() {
createTree
# Serialise the tree as JSON (there's only minimal savings with jq,
# and we don't need to handle escapes)
{
echo "{"
first=1
for p in "${!tree[@]}"; do
if [[ -z "$first" ]]; then
echo ","
else
first=
fi
echo "\"$p\":"
case "${tree[$p]}" in
1)
echo "true"
;;
0)
echo "false"
;;
*)
die "Unsupported tree value: ${tree[$p]}"
esac
done
echo "}"
} > "$tmp/tree.json"
# An expression to create a source value with a filter matching the tree
sourceExpr='
let
tree = importJSON '"$tmp"'/tree.json;
in
cleanSourceWith {
src = ./.;
filter =
pathString: type:
let
stripped = removePrefix (toString ./. + "/") pathString;
key = stripped + optionalString (type == "directory") "/";
in
tree.${key} or
(throw "tree key ${key} missing");
}
'
filesetExpr='
toSource {
root = ./.;
fileset = fromSource ('"$sourceExpr"');
}
'
# Turn both into store paths
sourceStorePath=$(expectStorePath "$sourceExpr")
filesetStorePath=$(expectStorePath "$filesetExpr")
# Loop through each path in the tree
while IFS= read -r -d $'\0' subpath; do
if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
# If it's not in the source store path, it's also not in the file set store path
if [[ -e "$filesetStorePath"/"$subpath" ]]; then
die "The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
fi
elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
# If it's an empty directory in the source store path, it shouldn't be in the file set store path
if [[ -e "$filesetStorePath"/"$subpath" ]]; then
die "The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
fi
else
# If it's non-empty directory or a file, it should be in the file set store path
if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
die "The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
fi
fi
done < <(find . -mindepth 1 -print0)
rm -rf -- *
}
# Check whether the filter is evaluated correctly
tree=(
[a]=
[b/]=
[b/c]=
[b/d]=
[e/]=
[e/e/]=
)
# We fill out the above tree values with all possible combinations of 0 and 1
# Then check whether a filter based on those return values gets turned into the corresponding file set
for i in $(seq 0 $((2 ** ${#tree[@]} - 1 ))); do
for p in "${!tree[@]}"; do
tree[$p]=$(( i % 2 ))
(( i /= 2 )) || true
done
checkSource
done
# The filter is called with the same arguments in the same order
mkdir a e
touch a/b a/c d e
expectEqual '
trace (fromSource (cleanSourceWith {
src = ./.;
filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
})) null
' '
builtins.seq (cleanSourceWith {
src = ./.;
filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
}).outPath
builtins.trace "'"$work"' (all files in directory)"
null
'
rm -rf -- *
# Test that if a directory is not included, the filter isn't called on its contents
mkdir a b
touch a/c b/d
expectEqual 'trace (fromSource (cleanSourceWith {
src = ./.;
filter = pathString: type:
if pathString == toString ./a then
false
else if pathString == toString ./b then
true
else if pathString == toString ./b/d then
true
else
abort "This filter should not be called with path ${pathString}";
})) null' 'trace (_create ./. { b = "directory"; }) null'
rm -rf -- *
# The filter is called lazily:
# If a later say intersection removes a part of the tree, the filter won't run on it
mkdir a d
touch a/{b,c} d/e
expectEqual 'trace (intersection ./a (fromSource (lib.cleanSourceWith {
src = ./.;
filter = pathString: type:
if pathString == toString ./a || pathString == toString ./a/b then
true
else if pathString == toString ./a/c then
false
else
abort "filter should not be called on ${pathString}";
}))) null' 'trace ./a/b null'
rm -rf -- *
# TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets # TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets
echo >&2 tests ok echo >&2 tests ok

View File

@ -57,6 +57,22 @@ using:
Once the connection is established, you can enter commands in the socat terminal Once the connection is established, you can enter commands in the socat terminal
where socat is running. where socat is running.
## Port forwarding to NixOS test VMs {#sec-nixos-test-port-forwarding}
If your test has only a single VM, you may use e.g.
```ShellSession
$ QEMU_NET_OPTS="hostfwd=tcp:127.0.0.1:2222-127.0.0.1:22" ./result/bin/nixos-test-driver
```
to port-forward a port in the VM (here `22`) to the host machine (here port `2222`).
This naturally does not work when multiple machines are involved,
since a single port on the host cannot forward to multiple VMs.
If the test defines multiple machines, you may opt to _temporarily_ set
`virtualisation.forwardPorts` in the test definition for debugging.
## Reuse VM state {#sec-nixos-test-reuse-vm-state} ## Reuse VM state {#sec-nixos-test-reuse-vm-state}
You can re-use the VM states coming from a previous run by setting the You can re-use the VM states coming from a previous run by setting the

View File

@ -89,7 +89,7 @@ guest. For instance, the following will forward host port 2222 to guest
port 22 (SSH): port 22 (SSH):
```ShellSession ```ShellSession
$ QEMU_NET_OPTS="hostfwd=tcp::2222-:22" ./result/bin/run-*-vm $ QEMU_NET_OPTS="hostfwd=tcp:127.0.0.1:2222-127.0.0.1:22" ./result/bin/run-*-vm
``` ```
allowing you to log in via SSH (assuming you have set the appropriate allowing you to log in via SSH (assuming you have set the appropriate

View File

@ -33,6 +33,8 @@
- All [ROCm](https://rocm.docs.amd.com/en/latest/) packages have been updated to 5.7.0. - All [ROCm](https://rocm.docs.amd.com/en/latest/) packages have been updated to 5.7.0.
- [ROCm](https://rocm.docs.amd.com/en/latest/) package attribute sets are versioned: `rocmPackages` -> `rocmPackages_5`. - [ROCm](https://rocm.docs.amd.com/en/latest/) package attribute sets are versioned: `rocmPackages` -> `rocmPackages_5`.
- `yarn-berry` has been updated to 4.0.1. This means that NodeJS versions less than `18.12` are no longer supported by it. More details at the [upstream changelog](https://github.com/yarnpkg/berry/blob/master/CHANGELOG.md).
- If the user has a custom shell enabled via `users.users.${USERNAME}.shell = ${CUSTOMSHELL}`, the - If the user has a custom shell enabled via `users.users.${USERNAME}.shell = ${CUSTOMSHELL}`, the
assertion will require them to also set `programs.${CUSTOMSHELL}.enable = assertion will require them to also set `programs.${CUSTOMSHELL}.enable =
true`. This is generally safe behavior, but for anyone needing to opt out from true`. This is generally safe behavior, but for anyone needing to opt out from
@ -373,6 +375,8 @@
- The `junicode` font package has been updated to [major version 2](https://github.com/psb1558/Junicode-font/releases/tag/v2.001), which is now a font family. In particular, plain `Junicode.ttf` no longer exists. In addition, TrueType font files are now placed in `font/truetype` instead of `font/junicode-ttf`; this change does not affect use via `fonts.packages` NixOS option. - The `junicode` font package has been updated to [major version 2](https://github.com/psb1558/Junicode-font/releases/tag/v2.001), which is now a font family. In particular, plain `Junicode.ttf` no longer exists. In addition, TrueType font files are now placed in `font/truetype` instead of `font/junicode-ttf`; this change does not affect use via `fonts.packages` NixOS option.
- The `prayer` package as well as `services.prayer` have been removed because it's been unmaintained for several years and the author's website has vanished.
## Other Notable Changes {#sec-release-23.11-notable-changes} ## Other Notable Changes {#sec-release-23.11-notable-changes}
- A new option `system.switch.enable` was added. By default, this is option is - A new option `system.switch.enable` was added. By default, this is option is
@ -525,6 +529,8 @@ The module update takes care of the new config syntax and the data itself (user
- `services.bitcoind` now properly respects the `enable` option. - `services.bitcoind` now properly respects the `enable` option.
- The Home Assistant module now offers support for installing custom components and lovelace modules. Available at [`services.home-assistant.customComponents`](#opt-services.home-assistant.customComponents) and [`services.home-assistant.customLovelaceModules`](#opt-services.home-assistant.customLovelaceModules).
## Nixpkgs internals {#sec-release-23.11-nixpkgs-internals} ## Nixpkgs internals {#sec-release-23.11-nixpkgs-internals}
- The use of `sourceRoot = "source";`, `sourceRoot = "source/subdir";`, and similar lines in package derivations using the default `unpackPhase` is deprecated as it requires `unpackPhase` to always produce a directory named "source". Use `sourceRoot = src.name`, `sourceRoot = "${src.name}/subdir";`, or `setSourceRoot = "sourceRoot=$(echo */subdir)";` or similar instead. - The use of `sourceRoot = "source";`, `sourceRoot = "source/subdir";`, and similar lines in package derivations using the default `unpackPhase` is deprecated as it requires `unpackPhase` to always produce a directory named "source". Use `sourceRoot = src.name`, `sourceRoot = "${src.name}/subdir";`, or `setSourceRoot = "sourceRoot=$(echo */subdir)";` or similar instead.

View File

@ -86,7 +86,7 @@ in
#rtkit = 45; # dynamically allocated 2021-09-03 #rtkit = 45; # dynamically allocated 2021-09-03
dovecot2 = 46; dovecot2 = 46;
dovenull2 = 47; dovenull2 = 47;
prayer = 49; # prayer = 49; # dropped in 23.11
mpd = 50; mpd = 50;
clamav = 51; clamav = 51;
#fprot = 52; # unused #fprot = 52; # unused
@ -411,7 +411,7 @@ in
#rtkit = 45; # unused #rtkit = 45; # unused
dovecot2 = 46; dovecot2 = 46;
dovenull2 = 47; dovenull2 = 47;
prayer = 49; # prayer = 49; # dropped in 23.11
mpd = 50; mpd = 50;
clamav = 51; clamav = 51;
#fprot = 52; # unused #fprot = 52; # unused

View File

@ -1041,7 +1041,6 @@
./services/networking/powerdns.nix ./services/networking/powerdns.nix
./services/networking/pppd.nix ./services/networking/pppd.nix
./services/networking/pptpd.nix ./services/networking/pptpd.nix
./services/networking/prayer.nix
./services/networking/privoxy.nix ./services/networking/privoxy.nix
./services/networking/prosody.nix ./services/networking/prosody.nix
./services/networking/quassel.nix ./services/networking/quassel.nix

View File

@ -111,6 +111,7 @@ in
(mkRemovedOptionModule [ "services" "riak" ] "The corresponding package was removed from nixpkgs.") (mkRemovedOptionModule [ "services" "riak" ] "The corresponding package was removed from nixpkgs.")
(mkRemovedOptionModule [ "services" "cryptpad" ] "The corresponding package was removed from nixpkgs.") (mkRemovedOptionModule [ "services" "cryptpad" ] "The corresponding package was removed from nixpkgs.")
(mkRemovedOptionModule [ "services" "rtsp-simple-server" ] "Package has been completely rebranded by upstream as mediamtx, and thus the service and the package were renamed in NixOS as well.") (mkRemovedOptionModule [ "services" "rtsp-simple-server" ] "Package has been completely rebranded by upstream as mediamtx, and thus the service and the package were renamed in NixOS as well.")
(mkRemovedOptionModule [ "services" "prayer" ] "The corresponding package was removed from nixpkgs.")
(mkRemovedOptionModule [ "i18n" "inputMethod" "fcitx" ] "The fcitx module has been removed. Please use fcitx5 instead") (mkRemovedOptionModule [ "i18n" "inputMethod" "fcitx" ] "The fcitx module has been removed. Please use fcitx5 instead")
(mkRemovedOptionModule [ "services" "dhcpd4" ] '' (mkRemovedOptionModule [ "services" "dhcpd4" ] ''

View File

@ -51,7 +51,7 @@ with lib;
}) })
(mkIf (!config.services.gnome.at-spi2-core.enable) { (mkIf (!config.services.gnome.at-spi2-core.enable) {
environment.variables = { environment.sessionVariables = {
NO_AT_BRIDGE = "1"; NO_AT_BRIDGE = "1";
GTK_A11Y = "none"; GTK_A11Y = "none";
}; };

View File

@ -16,7 +16,8 @@ let
cp ${format.generate "configuration.yaml" filteredConfig} $out cp ${format.generate "configuration.yaml" filteredConfig} $out
sed -i -e "s/'\!\([a-z_]\+\) \(.*\)'/\!\1 \2/;s/^\!\!/\!/;" $out sed -i -e "s/'\!\([a-z_]\+\) \(.*\)'/\!\1 \2/;s/^\!\!/\!/;" $out
''; '';
lovelaceConfig = cfg.lovelaceConfig or {}; lovelaceConfig = if (cfg.lovelaceConfig == null) then {}
else (lib.recursiveUpdate customLovelaceModulesResources cfg.lovelaceConfig);
lovelaceConfigFile = format.generate "ui-lovelace.yaml" lovelaceConfig; lovelaceConfigFile = format.generate "ui-lovelace.yaml" lovelaceConfig;
# Components advertised by the home-assistant package # Components advertised by the home-assistant package
@ -62,8 +63,24 @@ let
# Respect overrides that already exist in the passed package and # Respect overrides that already exist in the passed package and
# concat it with values passed via the module. # concat it with values passed via the module.
extraComponents = oldArgs.extraComponents or [] ++ extraComponents; extraComponents = oldArgs.extraComponents or [] ++ extraComponents;
extraPackages = ps: (oldArgs.extraPackages or (_: []) ps) ++ (cfg.extraPackages ps); extraPackages = ps: (oldArgs.extraPackages or (_: []) ps)
++ (cfg.extraPackages ps)
++ (lib.concatMap (component: component.propagatedBuildInputs or []) cfg.customComponents);
})); }));
# Create a directory that holds all lovelace modules
customLovelaceModulesDir = pkgs.buildEnv {
name = "home-assistant-custom-lovelace-modules";
paths = cfg.customLovelaceModules;
};
# Create parts of the lovelace config that reference lovelave modules as resources
customLovelaceModulesResources = {
lovelace.resources = map (card: {
url = "/local/nixos-lovelace-modules/${card.entrypoint or card.pname}.js?${card.version}";
type = "module";
}) cfg.customLovelaceModules;
};
in { in {
imports = [ imports = [
# Migrations in NixOS 22.05 # Migrations in NixOS 22.05
@ -137,6 +154,41 @@ in {
''; '';
}; };
customComponents = mkOption {
type = types.listOf types.package;
default = [];
example = literalExpression ''
with pkgs.home-assistant-custom-components; [
prometheus-sensor
];
'';
description = lib.mdDoc ''
List of custom component packages to install.
Available components can be found below `pkgs.home-assistant-custom-components`.
'';
};
customLovelaceModules = mkOption {
type = types.listOf types.package;
default = [];
example = literalExpression ''
with pkgs.home-assistant-custom-lovelace-modules; [
mini-graph-card
mini-media-player
];
'';
description = lib.mdDoc ''
List of custom lovelace card packages to load as lovelace resources.
Available cards can be found below `pkgs.home-assistant-custom-lovelace-modules`.
::: {.note}
Automatic loading only works with lovelace in `yaml` mode.
:::
'';
};
config = mkOption { config = mkOption {
type = types.nullOr (types.submodule { type = types.nullOr (types.submodule {
freeformType = format.type; freeformType = format.type;
@ -408,9 +460,35 @@ in {
rm -f "${cfg.configDir}/ui-lovelace.yaml" rm -f "${cfg.configDir}/ui-lovelace.yaml"
ln -s /etc/home-assistant/ui-lovelace.yaml "${cfg.configDir}/ui-lovelace.yaml" ln -s /etc/home-assistant/ui-lovelace.yaml "${cfg.configDir}/ui-lovelace.yaml"
''; '';
copyCustomLovelaceModules = if cfg.customLovelaceModules != [] then ''
mkdir -p "${cfg.configDir}/www"
ln -fns ${customLovelaceModulesDir} "${cfg.configDir}/www/nixos-lovelace-modules"
'' else ''
rm -f "${cfg.configDir}/www/nixos-lovelace-modules"
'';
copyCustomComponents = ''
mkdir -p "${cfg.configDir}/custom_components"
# remove components symlinked in from below the /nix/store
components="$(find "${cfg.configDir}/custom_components" -maxdepth 1 -type l)"
for component in "$components"; do
if [[ "$(readlink "$component")" =~ ^${escapeShellArg builtins.storeDir} ]]; then
rm "$component"
fi
done
# recreate symlinks for desired components
declare -a components=(${escapeShellArgs cfg.customComponents})
for component in "''${components[@]}"; do
path="$(dirname $(find "$component" -name "manifest.json"))"
ln -fns "$path" "${cfg.configDir}/custom_components/"
done
'';
in in
(optionalString (cfg.config != null) copyConfig) + (optionalString (cfg.config != null) copyConfig) +
(optionalString (cfg.lovelaceConfig != null) copyLovelaceConfig) (optionalString (cfg.lovelaceConfig != null) copyLovelaceConfig) +
copyCustomLovelaceModules +
copyCustomComponents
; ;
environment.PYTHONPATH = package.pythonPath; environment.PYTHONPATH = package.pythonPath;
serviceConfig = let serviceConfig = let

View File

@ -1,90 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
inherit (pkgs) prayer;
cfg = config.services.prayer;
stateDir = "/var/lib/prayer";
prayerUser = "prayer";
prayerGroup = "prayer";
prayerExtraCfg = pkgs.writeText "extraprayer.cf" ''
prefix = "${prayer}"
var_prefix = "${stateDir}"
prayer_user = "${prayerUser}"
prayer_group = "${prayerGroup}"
sendmail_path = "/run/wrappers/bin/sendmail"
use_http_port ${cfg.port}
${cfg.extraConfig}
'';
prayerCfg = pkgs.runCommand "prayer.cf" { preferLocalBuild = true; } ''
# We have to remove the http_port 80, or it will start a server there
cat ${prayer}/etc/prayer.cf | grep -v http_port > $out
cat ${prayerExtraCfg} >> $out
'';
in
{
###### interface
options = {
services.prayer = {
enable = mkEnableOption (lib.mdDoc "the prayer webmail http server");
port = mkOption {
default = 2080;
type = types.port;
description = lib.mdDoc ''
Port the prayer http server is listening to.
'';
};
extraConfig = mkOption {
type = types.lines;
default = "" ;
description = lib.mdDoc ''
Extra configuration. Contents will be added verbatim to the configuration file.
'';
};
};
};
###### implementation
config = mkIf config.services.prayer.enable {
environment.systemPackages = [ prayer ];
users.users.${prayerUser} =
{ uid = config.ids.uids.prayer;
description = "Prayer daemon user";
home = stateDir;
};
users.groups.${prayerGroup} =
{ gid = config.ids.gids.prayer; };
systemd.services.prayer = {
wantedBy = [ "multi-user.target" ];
serviceConfig.Type = "forking";
preStart = ''
mkdir -m 0755 -p ${stateDir}
chown ${prayerUser}:${prayerGroup} ${stateDir}
'';
script = "${prayer}/sbin/prayer --config-file=${prayerCfg}";
};
};
}

View File

@ -43,6 +43,16 @@ in {
psycopg2 psycopg2
]; ];
# test loading custom components
customComponents = with pkgs.home-assistant-custom-components; [
prometheus-sensor
];
# test loading lovelace modules
customLovelaceModules = with pkgs.home-assistant-custom-lovelace-modules; [
mini-graph-card
];
config = { config = {
homeassistant = { homeassistant = {
name = "Home"; name = "Home";
@ -114,6 +124,14 @@ in {
inheritParentConfig = true; inheritParentConfig = true;
configuration.services.home-assistant.config.backup = {}; configuration.services.home-assistant.config.backup = {};
}; };
specialisation.removeCustomThings = {
inheritParentConfig = true;
configuration.services.home-assistant = {
customComponents = lib.mkForce [];
customLovelaceModules = lib.mkForce [];
};
};
}; };
testScript = { nodes, ... }: let testScript = { nodes, ... }: let
@ -161,6 +179,14 @@ in {
hass.wait_for_open_port(8123) hass.wait_for_open_port(8123)
hass.succeed("curl --fail http://localhost:8123/lovelace") hass.succeed("curl --fail http://localhost:8123/lovelace")
with subtest("Check that custom components get installed"):
hass.succeed("test -f ${configDir}/custom_components/prometheus_sensor/manifest.json")
hass.wait_until_succeeds("journalctl -u home-assistant.service | grep -q 'We found a custom integration prometheus_sensor which has not been tested by Home Assistant'")
with subtest("Check that lovelace modules are referenced and fetchable"):
hass.succeed("grep -q 'mini-graph-card-bundle.js' '${configDir}/ui-lovelace.yaml'")
hass.succeed("curl --fail http://localhost:8123/local/nixos-lovelace-modules/mini-graph-card-bundle.js")
with subtest("Check that optional dependencies are in the PYTHONPATH"): with subtest("Check that optional dependencies are in the PYTHONPATH"):
env = get_unit_property("Environment") env = get_unit_property("Environment")
python_path = env.split("PYTHONPATH=")[1].split()[0] python_path = env.split("PYTHONPATH=")[1].split()[0]
@ -200,6 +226,13 @@ in {
for domain in ["backup"]: for domain in ["backup"]:
assert f"Setup of domain {domain} took" in journal, f"{domain} setup missing" assert f"Setup of domain {domain} took" in journal, f"{domain} setup missing"
with subtest("Check custom components and custom lovelace modules get removed"):
cursor = get_journal_cursor()
hass.succeed("${system}/specialisation/removeCustomThings/bin/switch-to-configuration test")
hass.fail("grep -q 'mini-graph-card-bundle.js' '${configDir}/ui-lovelace.yaml'")
hass.fail("test -f ${configDir}/custom_components/prometheus_sensor/manifest.json")
wait_for_homeassistant(cursor)
with subtest("Check that no errors were logged"): with subtest("Check that no errors were logged"):
hass.fail("journalctl -u home-assistant -o cat | grep -q ERROR") hass.fail("journalctl -u home-assistant -o cat | grep -q ERROR")

View File

@ -1,4 +1,4 @@
{ lib, stdenv, fetchurl, libogg, libvorbis, libao, pkg-config, curl { lib, stdenv, fetchurl, fetchpatch, libogg, libvorbis, libao, pkg-config, curl, libiconv
, speex, flac , speex, flac
, autoreconfHook }: , autoreconfHook }:
@ -11,12 +11,18 @@ stdenv.mkDerivation rec {
sha256 = "1c7h4ivgfdyygz2hyh6nfibxlkz8kdk868a576qkkjgj5gn78xyv"; sha256 = "1c7h4ivgfdyygz2hyh6nfibxlkz8kdk868a576qkkjgj5gn78xyv";
}; };
nativeBuildInputs = [ autoreconfHook pkg-config ]; patches = lib.optionals stdenv.cc.isClang [
buildInputs = [ libogg libvorbis libao curl speex flac ]; # Fixes a call to undeclared function `utf8_decode`.
# https://github.com/xiph/vorbis-tools/pull/33
(fetchpatch {
url = "https://github.com/xiph/vorbis-tools/commit/8a645f78b45ae7e370c0dc2a52d0f2612aa6110b.patch";
hash = "sha256-RkT9Xa0pRu/oO9E9qhDa17L0luWgYHI2yINIkPZanmI=";
})
];
env = lib.optionalAttrs stdenv.cc.isClang { nativeBuildInputs = [ autoreconfHook pkg-config ];
NIX_CFLAGS_COMPILE = "-Wno-error=implicit-function-declaration"; buildInputs = [ libogg libvorbis libao curl speex flac ]
}; ++ lib.optionals stdenv.isDarwin [ libiconv ];
meta = with lib; { meta = with lib; {
description = "Extra tools for Ogg-Vorbis audio codec"; description = "Extra tools for Ogg-Vorbis audio codec";

View File

@ -1575,8 +1575,8 @@ let
mktplcRef = { mktplcRef = {
publisher = "github"; publisher = "github";
name = "copilot"; name = "copilot";
version = "1.126.493"; version = "1.135.544";
sha256 = "1an7z8z3xz2piw2xz1hdrs6l5rhpyvnjmb650ff2m4k24n01svfy"; sha256 = "sha256-OeG1nkQbQAfu8NuDEA+iaWy0ioFyXPe7Qm/CZIKPiX8=";
}; };
meta = { meta = {
@ -1592,8 +1592,8 @@ let
mktplcRef = { mktplcRef = {
publisher = "github"; publisher = "github";
name = "copilot-chat"; name = "copilot-chat";
version = "0.3.2023061502"; version = "0.11.2023111001";
sha256 = "sha256-sUoKwlPDMz+iQbmIsD2JhyDwmUQzOyCHXaXCUaizQ7k="; sha256 = "sha256-sBDvqqyq0R0ZyS81G61fI9Vd860RIjhNzCqY0bdz1mg=";
}; };
meta = { meta = {
description = "GitHub Copilot Chat is a companion extension to GitHub Copilot that houses experimental chat features"; description = "GitHub Copilot Chat is a companion extension to GitHub Copilot that houses experimental chat features";
@ -3554,8 +3554,8 @@ let
mktplcRef = { mktplcRef = {
name = "uiua-vscode"; name = "uiua-vscode";
publisher = "uiua-lang"; publisher = "uiua-lang";
version = "0.0.22"; version = "0.0.23";
sha256 = "sha256-fJcSJwwRVofduWEEMa5f2VrSfyONKPkFl9OW+++lSRw="; sha256 = "sha256-NauXoYTAka8qXNPYlW5g7r6NNX1x8cnvDRbEGkRsMoY=";
}; };
meta = { meta = {
description = "VSCode language extension for Uiua"; description = "VSCode language extension for Uiua";

View File

@ -6,35 +6,24 @@
, makeWrapper , makeWrapper
, alsa-lib , alsa-lib
, curl , curl
, egl-wayland
, libao , libao
, libdecor
, libevdev
, libffi
, libGL
, libpulseaudio , libpulseaudio
, libX11
, libXext
, libxkbcommon
, libzip , libzip
, mesa , lua
, miniupnpc , miniupnpc
, udev , SDL2
, vulkan-headers
, vulkan-loader , vulkan-loader
, wayland
, zlib
}: }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "flycast"; pname = "flycast";
version = "2.1"; version = "2.2";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "flyinghead"; owner = "flyinghead";
repo = "flycast"; repo = "flycast";
rev = "V${version}"; rev = "v${version}";
sha256 = "sha256-PRInOqg9OpaUVLwSj1lOxDtjpVaYehkRsp0jLrVKPyY="; sha256 = "sha256-eQMKaUaZ1b0oXre4Ouli4qIyNaG64KntyRGk3/YIopc=";
fetchSubmodules = true; fetchSubmodules = true;
}; };
@ -47,23 +36,16 @@ stdenv.mkDerivation rec {
buildInputs = [ buildInputs = [
alsa-lib alsa-lib
curl curl
egl-wayland
libao libao
libdecor
libevdev
libffi
libGL
libpulseaudio libpulseaudio
libX11
libXext
libxkbcommon
libzip libzip
mesa # for libgbm lua
miniupnpc miniupnpc
udev SDL2
vulkan-headers ];
wayland
zlib cmakeFlags = [
"-DUSE_HOST_SDL=ON"
]; ];
postFixup = '' postFixup = ''

View File

@ -0,0 +1,21 @@
diff -ur a/db/drivers/mysql/db.c b/db/drivers/mysql/db.c
--- a/db/drivers/mysql/db.c 1969-12-31 19:00:01.000000000 -0500
+++ b/db/drivers/mysql/db.c 2023-11-09 23:26:25.329700495 -0500
@@ -52,9 +52,16 @@
db_get_login2("mysql", name, &user, &password, &host, &port);
+ const char* errstr;
+ unsigned int port_number = (unsigned int)strtonum(port, 0, 65536, &errstr);
+ if (errstr != NULL) {
+ db_d_append_error("%s", errstr);
+ return DB_FAILED;
+ }
+
connection = mysql_init(NULL);
res = mysql_real_connect(connection, host, user, password,
- connpar.dbname, port, NULL, 0);
+ connpar.dbname, port_number, NULL, 0);
if (res == NULL) {
db_d_append_error("%s\n%s", _("Connection failed."),

View File

@ -81,12 +81,13 @@ stdenv.mkDerivation (finalAttrs: {
strictDeps = true; strictDeps = true;
# On Darwin the installer tries to symlink the help files into a system patches = lib.optionals stdenv.isDarwin [
# directory # Fix conversion of const char* to unsigned int.
patches = [ ./no_symbolic_links.patch ]; ./clang-integer-conversion.patch
];
# Correct mysql_config query # Correct mysql_config query
patchPhase = '' postPatch = ''
substituteInPlace configure --replace "--libmysqld-libs" "--libs" substituteInPlace configure --replace "--libmysqld-libs" "--libs"
''; '';

View File

@ -1,37 +0,0 @@
diff --git a/include/Make/Install.make b/include/Make/Install.make
index 0aba138..8ba74bc 100644
--- a/include/Make/Install.make
+++ b/include/Make/Install.make
@@ -116,11 +116,6 @@ real-install: | $(INST_DIR) $(UNIX_BIN)
-$(INSTALL) config.status $(INST_DIR)/config.status
-$(CHMOD) -R a+rX $(INST_DIR) 2>/dev/null
-ifneq ($(findstring darwin,$(ARCH)),)
- @# enable OSX Help Viewer
- @/bin/ln -sfh "$(INST_DIR)/docs/html" /Library/Documentation/Help/GRASS-$(GRASS_VERSION_MAJOR).$(GRASS_VERSION_MINOR)
-endif
-
$(INST_DIR) $(UNIX_BIN):
$(MAKE_DIR_CMD) $@
diff --git a/macosx/app/build_html_user_index.sh b/macosx/app/build_html_user_index.sh
index 04e63eb..c9d9c2c 100755
--- a/macosx/app/build_html_user_index.sh
+++ b/macosx/app/build_html_user_index.sh
@@ -140,7 +140,6 @@ else
# echo "<tr><td valign=\"top\"><a href=\"$HTMLDIRG/$i\">$BASENAME</a></td> <td>$SHORTDESC</td></tr>" >> $FULLINDEX
# make them local to user to simplify page links
echo "<tr><td valign=\"top\"><a href=\"global_$i\">$BASENAME</a></td> <td>$SHORTDESC</td></tr>" >> $FULLINDEX
- ln -sf "$HTMLDIRG/$i" global_$i
done
done
fi
@@ -183,8 +182,3 @@ echo "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">
</html>" > $i.html
done
-# add Help Viewer links in user docs folder
-
-mkdir -p $HOME/Library/Documentation/Help/
-ln -sfh ../../GRASS/$GRASS_MMVER/Modules/docs/html $HOME/Library/Documentation/Help/GRASS-$GRASS_MMVER-addon
-ln -sfh $GISBASE/docs/html $HOME/Library/Documentation/Help/GRASS-$GRASS_MMVER

View File

@ -9,11 +9,11 @@
stdenvNoCC.mkDerivation rec { stdenvNoCC.mkDerivation rec {
pname = "camunda-modeler"; pname = "camunda-modeler";
version = "5.16.0"; version = "5.17.0";
src = fetchurl { src = fetchurl {
url = "https://github.com/camunda/camunda-modeler/releases/download/v${version}/camunda-modeler-${version}-linux-x64.tar.gz"; url = "https://github.com/camunda/camunda-modeler/releases/download/v${version}/camunda-modeler-${version}-linux-x64.tar.gz";
hash = "sha256-Y+v/r5bhtgXBjRQic0s5FA+KMWx5R7DOK+qZ9Izdnb0="; hash = "sha256-yxph3Aor5nZOhu2PY4MGcfScaz9w24JXqXbhT+QKlNI=";
}; };
sourceRoot = "camunda-modeler-${version}-linux-x64"; sourceRoot = "camunda-modeler-${version}-linux-x64";

View File

@ -10,11 +10,11 @@
}: }:
let let
pname = "jetbrains-toolbox"; pname = "jetbrains-toolbox";
version = "2.0.5.17700"; version = "2.1.0.18144";
src = fetchzip { src = fetchzip {
url = "https://download.jetbrains.com/toolbox/jetbrains-toolbox-${version}.tar.gz"; url = "https://download.jetbrains.com/toolbox/jetbrains-toolbox-${version}.tar.gz";
sha256 = "sha256-BO9W9miQUltsg1tCyTl9j5xRCJUCsO02hUKDCYt7hd8="; sha256 = "sha256-K65naW+RWAy4uxQq2GQmL0kwCH+G73ez1kgTtnTwjEw=";
stripRoot = false; stripRoot = false;
}; };

View File

@ -1,38 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ce78a9d..3cd51e0 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -8,18 +8,21 @@ list(APPEND CMAKE_PREFIX_PATH ${CMAKE_BINARY_DIR})
# Common configuration
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
-
-# Setup Conan
-if(NOT EXISTS "${CMAKE_BINARY_DIR}/conan.cmake")
- message(STATUS "Downloading conan.cmake from https://github.com/conan-io/cmake-conan")
- file(DOWNLOAD "https://raw.githubusercontent.com/conan-io/cmake-conan/0.18.1/conan.cmake"
- "${CMAKE_BINARY_DIR}/conan.cmake"
- TLS_VERIFY ON)
-endif()
-include(${CMAKE_BINARY_DIR}/conan.cmake)
-
-conan_cmake_autodetect(settings)
-conan_cmake_install(PATH_OR_REFERENCE ${CMAKE_SOURCE_DIR} BUILD missing SETTINGS ${settings})
+set(USE_CONAN ON CACHE BOOL "Use conan for dependency managment")
+
+if(USE_CONAN)
+ # Setup Conan
+ if(NOT EXISTS "${CMAKE_BINARY_DIR}/conan.cmake")
+ message(STATUS "Downloading conan.cmake from https://github.com/conan-io/cmake-conan")
+ file(DOWNLOAD "https://raw.githubusercontent.com/conan-io/cmake-conan/0.18.1/conan.cmake"
+ "${CMAKE_BINARY_DIR}/conan.cmake"
+ TLS_VERIFY ON)
+ endif()
+ include(${CMAKE_BINARY_DIR}/conan.cmake)
+
+ conan_cmake_autodetect(settings)
+ conan_cmake_install(PATH_OR_REFERENCE ${CMAKE_SOURCE_DIR} BUILD missing SETTINGS ${settings})
+endif ()
# Setup Qt
set(CMAKE_AUTOMOC ON)

View File

@ -1,13 +1,16 @@
{ lib { lib
, stdenv , stdenv
, fetchFromGitHub , fetchFromGitHub
, fetchpatch
, cmake , cmake
, magic-enum , magic-enum
, range-v3
, spdlog , spdlog
, qtbase , qtbase
, qtconnectivity , qtconnectivity
, qttools , qttools
, qtlanguageserver , qtlanguageserver
, qtwayland
, wrapQtAppsHook , wrapQtAppsHook
, libXScrnSaver , libXScrnSaver
, nix-update-script , nix-update-script
@ -15,15 +18,24 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "kemai"; pname = "kemai";
version = "0.9.2"; version = "0.10.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "AlexandrePTJ"; owner = "AlexandrePTJ";
repo = "kemai"; repo = "kemai";
rev = version; rev = version;
hash = "sha256-PDjNO2iMPK0J3TSHVZ/DW3W0GkdB8yNZYoTGEd2snac="; hash = "sha256-wclBAgeDyAIw/nGF6lzIwbwdoZMBTu+tjxsnIxIkODM=";
}; };
patches = [
# Backport the fix for an issue where LICENSE.txt ends up in /bin
# Remove in next release
(fetchpatch {
url = "https://github.com/AlexandrePTJ/kemai/commit/e279679dd7308efebe004252d168d7308f3b99ce.patch";
hash = "sha256-5cmRRMVATf4ul4HhaQKiE0yTN2qd+MfNFQzGTLLpOyg=";
})
];
buildInputs = [ buildInputs = [
qtbase qtbase
qtconnectivity qtconnectivity
@ -31,10 +43,14 @@ stdenv.mkDerivation rec {
qtlanguageserver qtlanguageserver
libXScrnSaver libXScrnSaver
magic-enum magic-enum
range-v3
spdlog spdlog
] ++ lib.optional stdenv.hostPlatform.isLinux qtwayland;
cmakeFlags = [
"-DFETCHCONTENT_FULLY_DISCONNECTED=ON"
"-DFETCHCONTENT_QUIET=OFF"
"-DFETCHCONTENT_TRY_FIND_PACKAGE_MODE=ALWAYS"
]; ];
cmakeFlags = [ "-DUSE_CONAN=OFF" ];
patches = [ ./000-cmake-disable-conan.diff ];
nativeBuildInputs = [ cmake wrapQtAppsHook ]; nativeBuildInputs = [ cmake wrapQtAppsHook ];
@ -48,5 +64,7 @@ stdenv.mkDerivation rec {
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ poelzi ]; maintainers = with maintainers; [ poelzi ];
platforms = platforms.unix; platforms = platforms.unix;
broken = stdenv.isDarwin;
mainProgram = "Kemai";
}; };
} }

View File

@ -503,6 +503,9 @@ buildStdenv.mkDerivation {
preBuild = '' preBuild = ''
cd mozobj cd mozobj
'' + lib.optionalString (lib.versionAtLeast version "120") ''
# https://bugzilla.mozilla.org/show_bug.cgi?id=1864083
export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE $(pkg-config dbus-1 --cflags)"
''; '';
postBuild = '' postBuild = ''

View File

@ -30,11 +30,11 @@
firefox-beta = buildMozillaMach rec { firefox-beta = buildMozillaMach rec {
pname = "firefox-beta"; pname = "firefox-beta";
version = "119.0b9"; version = "120.0b9";
applicationName = "Mozilla Firefox Beta"; applicationName = "Mozilla Firefox Beta";
src = fetchurl { src = fetchurl {
url = "mirror://mozilla/firefox/releases/${version}/source/firefox-${version}.source.tar.xz"; url = "mirror://mozilla/firefox/releases/${version}/source/firefox-${version}.source.tar.xz";
sha512 = "11d07474e3ca72a4e2f60053882e09a215e0d29d6830d0cd41447bb67370118356090af7adcbacd7703ad9fcdda83c9f909419c86b8f3bf2eacd9ca3d3aa3f54"; sha512 = "7ac5562ce393ea84663eac5c6ee1a0ca527ff4a8a9ec6aaaef37213ff071076846949e80af21d95ec8e32d3cbc740b772a9d7cc54965b7bbc8e015da22ae927f";
}; };
meta = { meta = {
@ -58,12 +58,12 @@
firefox-devedition = (buildMozillaMach rec { firefox-devedition = (buildMozillaMach rec {
pname = "firefox-devedition"; pname = "firefox-devedition";
version = "119.0b9"; version = "120.0b9";
applicationName = "Mozilla Firefox Developer Edition"; applicationName = "Mozilla Firefox Developer Edition";
branding = "browser/branding/aurora"; branding = "browser/branding/aurora";
src = fetchurl { src = fetchurl {
url = "mirror://mozilla/devedition/releases/${version}/source/firefox-${version}.source.tar.xz"; url = "mirror://mozilla/devedition/releases/${version}/source/firefox-${version}.source.tar.xz";
sha512 = "ce3e2adb3171aa05c7af3b7a4ea25eaafbc109c522b90e26aad577192a0902000fb7d705fa5707a9a7d0be2ab1c0cddc5a98abbe6549e1377c0a1d765bda62eb"; sha512 = "07bf1a58550e70c683719adef55fa3d1ee06876e0cb086c28242879c683269c4aa784b1dce639218b3ad24a546192088fe5224a52e13a0086f205ec5470e2428";
}; };
meta = { meta = {

View File

@ -18,7 +18,7 @@
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "palemoon-bin"; pname = "palemoon-bin";
version = "32.4.1"; version = "32.5.0";
src = fetchzip { src = fetchzip {
urls = [ urls = [
@ -26,9 +26,9 @@ stdenv.mkDerivation (finalAttrs: {
"https://rm-us.palemoon.org/release/palemoon-${finalAttrs.version}.linux-x86_64-gtk${if withGTK3 then "3" else "2"}.tar.xz" "https://rm-us.palemoon.org/release/palemoon-${finalAttrs.version}.linux-x86_64-gtk${if withGTK3 then "3" else "2"}.tar.xz"
]; ];
hash = if withGTK3 then hash = if withGTK3 then
"sha256-c/rfnMpiLWqlNZppqPRNWXsgAQ1FofAdel5EFnK+mrY=" "sha256-1MJ5K9Zc/BHeQwwlq3XyUV8XTFEpPytNyTnsDpE1tBI="
else else
"sha256-27njFdqq2DUctlz/UOtH5tlOduQNpoapuCYS+48K9dk="; "sha256-xXunZTqoc2A+ilosRUUluxDwewD3xwITF5nb5Lbyv7Y=";
}; };
preferLocalBuild = true; preferLocalBuild = true;

View File

@ -1,4 +1,5 @@
{ lib { lib
, stdenv
, mkDerivationWith , mkDerivationWith
, fetchFromGitHub , fetchFromGitHub
, python3Packages , python3Packages
@ -6,6 +7,8 @@
}: }:
mkDerivationWith python3Packages.buildPythonApplication rec { mkDerivationWith python3Packages.buildPythonApplication rec {
inherit stdenv;
pname = "webmacs"; pname = "webmacs";
version = "0.8"; version = "0.8";

View File

@ -95,6 +95,7 @@ let
removed = name: date: throw "the ${name} terraform provider removed from nixpkgs on ${date}"; removed = name: date: throw "the ${name} terraform provider removed from nixpkgs on ${date}";
in in
lib.optionalAttrs config.allowAliases { lib.optionalAttrs config.allowAliases {
fly = archived "fly" "2023/10";
ksyun = removed "ksyun" "2023/04"; ksyun = removed "ksyun" "2023/04";
}; };

View File

@ -425,15 +425,6 @@
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-RqYzqKPzb5GcrzHnEDZC7GaBt1zP8g28Wo3WNAe07Ck=" "vendorHash": "sha256-RqYzqKPzb5GcrzHnEDZC7GaBt1zP8g28Wo3WNAe07Ck="
}, },
"fly": {
"hash": "sha256-9QB2fbggCKcJz8tkSYgq/X8r+MB2M76VCWXgsHARTkU=",
"homepage": "https://registry.terraform.io/providers/fly-apps/fly",
"owner": "fly-apps",
"repo": "terraform-provider-fly",
"rev": "v0.0.23",
"spdx": "BSD-3-Clause",
"vendorHash": "sha256-f+Z6Y2WPxqJoHoCwuK6sgFa8nUnkW/WwrD55dtU0wtM="
},
"fortios": { "fortios": {
"hash": "sha256-RpcKMndbO3wbkHmrINkbsQ+UeFsZrQ7x02dv8ZpFMec=", "hash": "sha256-RpcKMndbO3wbkHmrINkbsQ+UeFsZrQ7x02dv8ZpFMec=",
"homepage": "https://registry.terraform.io/providers/fortinetdev/fortios", "homepage": "https://registry.terraform.io/providers/fortinetdev/fortios",

View File

@ -0,0 +1,20 @@
{ seclists
, stdenvNoCC
}:
stdenvNoCC.mkDerivation {
pname = "rockyou";
inherit (seclists) version src;
installPhase = ''
runHook preInstall
mkdir -p $out/share/wordlists/
tar -xvzf ${seclists}/share/wordlists/seclists/Passwords/Leaked-Databases/rockyou.txt.tar.gz -C $out/share/wordlists/
runHook postInstall
'';
meta = seclists.meta // {
description = "A famous wordlist often used for brute force attacks";
};
}

View File

@ -0,0 +1,34 @@
{ lib
, fetchFromGitHub
, stdenvNoCC
}:
stdenvNoCC.mkDerivation {
pname = "seclists";
version = "2023.2";
src = fetchFromGitHub {
owner = "danielmiessler";
repo = "SecLists";
rev = "2023.2";
hash = "sha256-yVxb5GaQDuCsyjIV+oZzNUEFoq6gMPeaIeQviwGdAgY=";
};
installPhase = ''
runHook preInstall
mkdir -p $out/share/wordlists/seclists
find . -maxdepth 1 -type d -regextype posix-extended -regex '^./[A-Z].*' -exec cp -R {} $out/share/wordlists/seclists \;
find $out/share/wordlists/seclists -name "*.md" -delete
runHook postInstall
'';
meta = with lib; {
description = "A collection of multiple types of lists used during security assessments, collected in one place";
homepage = "https://github.com/danielmiessler/seclists";
license = licenses.mit;
maintainers = with maintainers; [ tochiaha janik pamplemousse ];
};
}

View File

@ -14,16 +14,16 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "uiua"; pname = "uiua";
version = "0.1.0"; version = "0.2.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "uiua-lang"; owner = "uiua-lang";
repo = "uiua"; repo = "uiua";
rev = version; rev = version;
hash = "sha256-ZoiT7Yf8Mdwh2vBkRCDxhkbvTkekhTopFNWjUnyoPUQ="; hash = "sha256-RAMQC9weEvTV44nAXjwMYv+4O5aSNNM5UOf/xBb4SBE=";
}; };
cargoHash = "sha256-My/15zNfEqt+a0jganS6LfFiEXENUaPTcyz6SBL0oKo="; cargoHash = "sha256-ZBedAIHwbRiR9i6w0CWIiE+OJvTkmxiEihn7zLAV/Dg=";
nativeBuildInputs = lib.optionals stdenv.isDarwin [ nativeBuildInputs = lib.optionals stdenv.isDarwin [
rustPlatform.bindgenHook rustPlatform.bindgenHook

View File

@ -0,0 +1,70 @@
{ lib
, callPackage
, nmap
, rockyou
, runtimeShell
, seclists
, symlinkJoin
, tree
, wfuzz
, lists ? [
nmap
rockyou
seclists
wfuzz
]
}:
symlinkJoin rec {
pname = "wordlists";
version = "unstable-2023-10-10";
name = "${pname}-${version}";
paths = lists;
postBuild = ''
mkdir -p $out/bin
# Create a command to show the location of the links.
cat >> $out/bin/wordlists << __EOF__
#!${runtimeShell}
${tree}/bin/tree ${placeholder "out"}/share/wordlists
__EOF__
chmod +x $out/bin/wordlists
# Create a handy command for easy access to the wordlists.
# e.g.: `cat "$(wordlists_path)/rockyou.txt"`, or `ls "$(wordlists_path)/dirbuster"`
cat >> $out/bin/wordlists_path << __EOF__
#!${runtimeShell}
printf "${placeholder "out"}/share/wordlists\n"
__EOF__
chmod +x $out/bin/wordlists_path
'';
meta = with lib; {
description = "A collection of wordlists useful for security testing";
longDescription = ''
The `wordlists` package provides two scripts. One is called {command}`wordlists`,
and it will list a tree of all the wordlists installed. The other one is
called {command}`wordlists_path` which will print the path to the nix store
location of the lists. You can for example do
{command}`$(wordlists_path)/rockyou.txt` to get the location of the
[rockyou](https://en.wikipedia.org/wiki/RockYou#Data_breach)
wordlist. If you want to modify the available wordlists you can override
the `lists` attribute`. In your nixos configuration this would look
similiar to this:
```nix
environment.systemPackages = [
(pkgs.wordlists.override { lists = with pkgs; [ rockyou ] })
]
```
you can use this with nix-shell by doing:
{command}`nix-shell -p 'wordlists.override { lists = with (import <nixpkgs> {}); [ nmap ]; }'
If you want to add a new package that provides wordlist/s the convention
is to copy it to {file}`$out/share/wordlists/myNewWordlist`.
'';
maintainers = with maintainers; [ janik pamplemousse ];
};
}

View File

@ -7,16 +7,16 @@ let
arch = if stdenv.isAarch64 then "arm64" else "x86_64"; arch = if stdenv.isAarch64 then "arm64" else "x86_64";
hashes = hashes =
{ {
"x86_64-linux" = "b13110bacc3f71c2a3e12c52172a821a85cc13243a95249ca18c8beb296c0ce8"; "x86_64-linux" = "a51d5b9a011c54b0001ff3273cee027774686e233adadb20b1978d2cabfe32a6";
"aarch64-linux" = "afbc71f0570b86215942d1b4207fe3de0299e6fdfd2e6caac78bf688c81b9bd1"; "aarch64-linux" = "8904ce928f60e06df1f06b3af5ee5eb320c388922aa38b698d823df1d73e8e49";
"x86_64-darwin" = "50a3df09b02b34e1653beb1507c6de0f332674e088ded7c66af4e5987753304e"; "x86_64-darwin" = "b4d1bb5ddc3503862750e5b241f74c22dc013792bc4f410dd914a5216e20ed2f";
"aarch64-darwin" = "174a5bfec355361c4f030861405513818be25fd7e4325f7221aa71ebd27475d3"; "aarch64-darwin" = "6d20e384dae90bb994c3f1e866c964124c7e8a51e9e08bad0e90a2b560bb5a18";
}; };
in in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "lamdera"; pname = "lamdera";
version = "1.2.0"; version = "1.2.1";
src = fetchurl { src = fetchurl {
url = "https://static.lamdera.com/bin/lamdera-${version}-${os}-${arch}"; url = "https://static.lamdera.com/bin/lamdera-${version}-${os}-${arch}";

View File

@ -6,8 +6,8 @@
, pkg-config , pkg-config
, openssl , openssl
, Security , Security
, libiconv
, nix-update-script , nix-update-script
, SystemConfiguration
}: }:
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
@ -24,7 +24,7 @@ rustPlatform.buildRustPackage rec {
nativeBuildInputs = [ git pkg-config ]; nativeBuildInputs = [ git pkg-config ];
buildInputs = [ openssl ] ++ buildInputs = [ openssl ] ++
lib.optionals stdenv.isDarwin [ Security libiconv ]; lib.optionals stdenv.isDarwin [ Security SystemConfiguration ];
cargoHash = "sha256-ffnDTGg+m0NUhG2BYjsXb2fWHeQmtDcBGqQDLqwZMWI="; cargoHash = "sha256-ffnDTGg+m0NUhG2BYjsXb2fWHeQmtDcBGqQDLqwZMWI=";

View File

@ -14,6 +14,10 @@ stdenv.mkDerivation (finalAttrs: {
hash= "sha256-T4feegblOeG+NU+c+PAobf8HT8KDSfcINkRAa1hNpkY="; hash= "sha256-T4feegblOeG+NU+c+PAobf8HT8KDSfcINkRAa1hNpkY=";
}; };
patches = [
./readlink.patch
];
configureFlags = [ "--enable-mcpplib" ]; configureFlags = [ "--enable-mcpplib" ];
meta = with lib; { meta = with lib; {

View File

@ -0,0 +1,24 @@
From 1c4b0f26614bff331eb8a9f2b514309af6f31fd0 Mon Sep 17 00:00:00 2001
From: Jose <pepone@users.noreply.github.com>
Date: Mon, 26 Jun 2023 16:43:43 +0200
Subject: [PATCH] Add 'unistd' header for readlink (#8)
---
src/system.c | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/src/system.c b/src/system.c
index a3501f9..646caf6 100644
--- a/src/system.c
+++ b/src/system.c
@@ -37,6 +37,11 @@
* 2. append the system-dependent routines in this file.
*/
+
+#ifndef _MSC_VER
+# include <unistd.h> // For readlink()
+#endif
+
#if PREPROCESSED
#include "mcpp.H"
#else

View File

@ -7,15 +7,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "boost-sml"; pname = "boost-sml";
# This is first commit since 1.1.6 that passes all tests (test_policies_logging is commented out) version = "1.1.9";
version = "1.1.6";
working_tests = "24d762d1901f4f6afaa5c5e0d1b7b77537964694";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "boost-ext"; owner = "boost-ext";
repo = "sml"; repo = "sml";
rev = "${working_tests}"; rev = "v${version}";
hash = "sha256-ZhIfyYdzrzPTAYevOz5I6tAcUiLRMV8HENKX9jychEY="; hash = "sha256-RYgSpnsmgZybpkJALIzxpkDRfe9QF2FHG+nA3msFaK0=";
}; };
buildInputs = [ boost ]; buildInputs = [ boost ];

View File

@ -1,6 +1,6 @@
let version = "2.9.11"; in let version = "2.9.11"; in
{ stdenv, lib, buildPackages, fetchurl, zlib, gettext { stdenv, lib, buildPackages, fetchurl, zlib, gettext
, wordlists ? [ (fetchurl { , lists ? [ (fetchurl {
url = "https://github.com/cracklib/cracklib/releases/download/v${version}/cracklib-words-${version}.gz"; url = "https://github.com/cracklib/cracklib/releases/download/v${version}/cracklib-words-${version}.gz";
hash = "sha256-popxGjE1c517Z+nzYLM/DU7M+b1/rE0XwNXkVqkcUXo="; hash = "sha256-popxGjE1c517Z+nzYLM/DU7M+b1/rE0XwNXkVqkcUXo=";
}) ] }) ]
@ -23,7 +23,7 @@ stdenv.mkDerivation rec {
patchShebangs util patchShebangs util
'' + '' '' + ''
ln -vs ${toString wordlists} dicts/ ln -vs ${toString lists} dicts/
''; '';
postInstall = '' postInstall = ''

View File

@ -16,7 +16,7 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "intel-media-driver"; pname = "intel-media-driver";
version = "23.1.6"; version = "23.3.5";
outputs = [ "out" "dev" ]; outputs = [ "out" "dev" ];
@ -24,14 +24,14 @@ stdenv.mkDerivation rec {
owner = "intel"; owner = "intel";
repo = "media-driver"; repo = "media-driver";
rev = "intel-media-${version}"; rev = "intel-media-${version}";
sha256 = "sha256-Z1xBU+4SdwknXpYUS8EwEURNIsg2+R/U0CcW3FW325M="; hash = "sha256-7OdLpqO2evNeyxceOtHEI7sJCVybqvrcM1ZZx8bI4xw=";
}; };
patches = [ patches = [
# fix platform detection # fix platform detection
(fetchpatch { (fetchpatch {
url = "https://salsa.debian.org/multimedia-team/intel-media-driver-non-free/-/raw/04ffb03f744780a55aba311c612d708b00584bb7/debian/patches/0002-Remove-settings-based-on-ARCH.patch"; url = "https://salsa.debian.org/multimedia-team/intel-media-driver-non-free/-/raw/7376a99f060c26d6be8e56674da52a61662617b9/debian/patches/0002-Remove-settings-based-on-ARCH.patch";
sha256 = "sha256-o/Pg0S53SYh3O7L+AwxOPl1Bx4TS6iKB8ql8GhhHI/o="; hash = "sha256-57yePuHWYb3XXrB4MjYO2h6jbqfs4SGTLlLG91el8M4=";
}) })
]; ];

View File

@ -10,6 +10,7 @@
, libpng , libpng
, boost , boost
, guile , guile
, python
, qtbase , qtbase
, darwin , darwin
}: }:
@ -25,8 +26,8 @@ stdenv.mkDerivation {
hash = "sha256-OITy3fJx+Z6856V3D/KpSQRJztvOdJdqUv1c65wNgCc="; hash = "sha256-OITy3fJx+Z6856V3D/KpSQRJztvOdJdqUv1c65wNgCc=";
}; };
nativeBuildInputs = [ wrapQtAppsHook cmake ninja pkg-config ]; nativeBuildInputs = [ wrapQtAppsHook cmake ninja pkg-config python.pkgs.pythonImportsCheckHook ];
buildInputs = [ eigen zlib libpng boost guile qtbase ] buildInputs = [ eigen zlib libpng boost guile python qtbase ]
++ lib.optionals stdenv.isDarwin [ darwin.apple_sdk_11_0.frameworks.Cocoa ]; ++ lib.optionals stdenv.isDarwin [ darwin.apple_sdk_11_0.frameworks.Cocoa ];
preConfigure = '' preConfigure = ''
@ -42,6 +43,14 @@ stdenv.mkDerivation {
--replace "LIBFIVE_STDLIB_DIR=$<TARGET_FILE_DIR:libfive-stdlib>" \ --replace "LIBFIVE_STDLIB_DIR=$<TARGET_FILE_DIR:libfive-stdlib>" \
"LIBFIVE_STDLIB_DIR=$out/lib" "LIBFIVE_STDLIB_DIR=$out/lib"
substituteInPlace libfive/bind/python/CMakeLists.txt \
--replace ' ''${PYTHON_SITE_PACKAGES_DIR}' \
" $out/${python.sitePackages}" \
substituteInPlace libfive/bind/python/libfive/ffi.py \
--replace "os.path.join('libfive', folder)" \
"os.path.join('$out/${python.sitePackages}/libfive', folder)" \
export XDG_CACHE_HOME=$(mktemp -d)/.cache export XDG_CACHE_HOME=$(mktemp -d)/.cache
''; '';
@ -63,12 +72,29 @@ stdenv.mkDerivation {
'' + '' '' + ''
# Link "Studio" binary to "libfive-studio" to be more obvious: # Link "Studio" binary to "libfive-studio" to be more obvious:
ln -s "$out/bin/Studio" "$out/bin/libfive-studio" ln -s "$out/bin/Studio" "$out/bin/libfive-studio"
# Create links since libfive looks for the library in a specific path.
mkdir -p "$out/${python.sitePackages}/libfive/src"
ln -s "$out"/lib/libfive.* "$out/${python.sitePackages}/libfive/src/"
mkdir -p "$out/${python.sitePackages}/libfive/stdlib"
ln -s "$out"/lib/libfive-stdlib.* "$out/${python.sitePackages}/libfive/stdlib/"
# Create links so Studio can find the bindings.
mkdir -p "$out/libfive/bind"
ln -s "$out/${python.sitePackages}" "$out/libfive/bind/python"
''; '';
pythonImportsCheck = [
"libfive"
"libfive.runner"
"libfive.shape"
"libfive.stdlib"
];
meta = with lib; { meta = with lib; {
description = "Infrastructure for solid modeling with F-Reps in C, C++, and Guile"; description = "Infrastructure for solid modeling with F-Reps in C, C++, and Guile";
homepage = "https://libfive.com/"; homepage = "https://libfive.com/";
maintainers = with maintainers; [ hodapp kovirobi ]; maintainers = with maintainers; [ hodapp kovirobi wulfsta ];
license = with licenses; [ mpl20 gpl2Plus ]; license = with licenses; [ mpl20 gpl2Plus ];
platforms = with platforms; all; platforms = with platforms; all;
}; };

View File

@ -1,4 +1,4 @@
{ lib, stdenv, fetchFromGitHub, pkg-config }: { lib, stdenv, fetchFromGitHub, fetchpatch }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
version = "2.11.10"; version = "2.11.10";
@ -11,6 +11,15 @@ stdenv.mkDerivation rec {
hash = "sha256-Rbm45HRbRKQ6Cdup+gvKJ1xkK1HKG3irR5AIjhLer7g="; hash = "sha256-Rbm45HRbRKQ6Cdup+gvKJ1xkK1HKG3irR5AIjhLer7g=";
}; };
patches = [
(fetchpatch {
url = "https://github.com/coin-or/CoinUtils/commit/1700ed92c2bc1562aabe65dee3b4885bd5c87fb9.patch";
stripLen = 1;
extraPrefix = "CoinUtils/";
hash = "sha256-8S6XteZvoJlL+5MWiOrW7HXsdcnzpuEFTyzX9qg7OUY=";
})
];
doCheck = true; doCheck = true;
meta = with lib; { meta = with lib; {

View File

@ -2,11 +2,11 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "maestro"; pname = "maestro";
version = "1.34.0"; version = "1.34.1";
src = fetchurl { src = fetchurl {
url = "https://github.com/mobile-dev-inc/maestro/releases/download/cli-${version}/maestro.zip"; url = "https://github.com/mobile-dev-inc/maestro/releases/download/cli-${version}/maestro.zip";
sha256 = "1qbva38lcy1rm5k6r207hk3nqrr07h7x9sdppz4w5f37q0ll986r"; sha256 = "0whnhcf7a3j01693254qqwfk9d3xa4icv4kyqkn4ihxyibznb91d";
}; };
dontUnpack = true; dontUnpack = true;

View File

@ -1,5 +1,6 @@
{ lib { lib
, fetchFromGitHub , fetchFromGitHub
, fetchpatch
, buildPythonPackage , buildPythonPackage
, packaging , packaging
, setuptools , setuptools
@ -23,6 +24,14 @@ buildPythonPackage rec {
hash = "sha256-2yajhuRyQ7BqghbSgPClW3inpw4TW2DhgQbomcRFx94="; hash = "sha256-2yajhuRyQ7BqghbSgPClW3inpw4TW2DhgQbomcRFx94=";
}; };
patches = [
# Removes `register` storage class specifier, which is not allowed in C++17.
(fetchpatch {
url = "https://github.com/pytroll/aggdraw/commit/157ed49803567e8c3eeb7dfeff4c116db35747f7.patch";
hash = "sha256-QSzpO90u5oSBWUzehRFbXgZ1ApEfLlfp11MUx6w11aI=";
})
];
nativeBuildInputs = [ nativeBuildInputs = [
packaging packaging
setuptools setuptools

View File

@ -22,7 +22,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "aioesphomeapi"; pname = "aioesphomeapi";
version = "18.2.1"; version = "18.2.4";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.9"; disabled = pythonOlder "3.9";
@ -31,7 +31,7 @@ buildPythonPackage rec {
owner = "esphome"; owner = "esphome";
repo = pname; repo = pname;
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-PW3/V4PTm+UxTsfSSvOEX+FGcuF4m+mDOz6Z/AzB2qk="; hash = "sha256-m82UfhcmAFBDfSVmia6nhBB2qyQjSZJbXtzD/sGeqk4=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -11,7 +11,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "bluetooth-data-tools"; pname = "bluetooth-data-tools";
version = "1.13.0"; version = "1.14.0";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.9"; disabled = pythonOlder "3.9";
@ -20,7 +20,7 @@ buildPythonPackage rec {
owner = "Bluetooth-Devices"; owner = "Bluetooth-Devices";
repo = pname; repo = pname;
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-qvr4CYOMgyTEFONpe6KA176H56+w6RHThAyUthIzszE="; hash = "sha256-eO17EuZ9K6tLAyEGmTaxw1Cxfz3XPPwNCcIwZ2/uHug=";
}; };
# The project can build both an optimized cython version and an unoptimized # The project can build both an optimized cython version and an unoptimized

View File

@ -1,12 +1,12 @@
{ lib { lib
, buildPythonPackage , buildPythonPackage
, fetchPypi , fetchFromGitHub
, packaging , packaging
, setuptools
, setuptools-scm , setuptools-scm
, shapely , shapely
, sqlalchemy , sqlalchemy
, alembic , alembic
, psycopg2
, pytestCheckHook , pytestCheckHook
, pythonOlder , pythonOlder
}: }:
@ -14,37 +14,35 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "geoalchemy2"; pname = "geoalchemy2";
version = "0.14.2"; version = "0.14.2";
format = "setuptools"; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
src = fetchPypi { src = fetchFromGitHub {
pname = "GeoAlchemy2"; owner = "geoalchemy";
inherit version; repo = "geoalchemy2";
hash = "sha256-jKAj3LmjbG0xLztK7mMdZjhSZOL8n+sKsPRG61YJQH0="; rev = "refs/tags/${version}";
hash = "sha256-C/F1hpL2DnzC4UPAGGFntlQlULCx5Ufzkw7EIrzRV7I=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [
setuptools
setuptools-scm setuptools-scm
]; ];
propagatedBuildInputs = [ propagatedBuildInputs = [
packaging
shapely
sqlalchemy sqlalchemy
packaging
]; ];
nativeCheckInputs = [ nativeCheckInputs = [
alembic alembic
psycopg2
pytestCheckHook pytestCheckHook
]; ] ++ passthru.optional-dependencies.shapely;
pytestFlagsArray = [ env = {
# tests require live postgis database SETUPTOOLS_SCM_PRETEND_VERSION = version;
"--deselect=tests/test_pickle.py::TestPickle::test_pickle_unpickle" };
"--deselect=tests/gallery/test_specific_compilation.py::test_specific_compilation"
];
disabledTestPaths = [ disabledTestPaths = [
# tests require live databases # tests require live databases
@ -52,23 +50,29 @@ buildPythonPackage rec {
"tests/gallery/test_length_at_insert.py" "tests/gallery/test_length_at_insert.py"
"tests/gallery/test_insert_raster.py" "tests/gallery/test_insert_raster.py"
"tests/gallery/test_orm_mapped_v2.py" "tests/gallery/test_orm_mapped_v2.py"
"tests/gallery/test_specific_compilation.py"
"tests/gallery/test_summarystatsagg.py" "tests/gallery/test_summarystatsagg.py"
"tests/gallery/test_type_decorator.py" "tests/gallery/test_type_decorator.py"
"tests/test_functional.py" "tests/test_functional.py"
"tests/test_functional_postgresql.py" "tests/test_functional_postgresql.py"
"tests/test_functional_mysql.py" "tests/test_functional_mysql.py"
"tests/test_alembic_migrations.py" "tests/test_alembic_migrations.py"
"tests/test_pickle.py"
]; ];
pythonImportsCheck = [ pythonImportsCheck = [
"geoalchemy2" "geoalchemy2"
]; ];
passthru.optional-dependencies = {
shapely = [ shapely ];
};
meta = with lib; { meta = with lib; {
description = "Toolkit for working with spatial databases"; description = "Toolkit for working with spatial databases";
homepage = "https://geoalchemy-2.readthedocs.io/"; homepage = "https://geoalchemy-2.readthedocs.io/";
changelog = "https://github.com/geoalchemy/geoalchemy2/releases/tag/${version}"; changelog = "https://github.com/geoalchemy/geoalchemy2/releases/tag/${version}";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ ]; maintainers = with maintainers; [ nickcao ];
}; };
} }

View File

@ -11,7 +11,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "home-assistant-bluetooth"; pname = "home-assistant-bluetooth";
version = "1.10.3"; version = "1.10.4";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.9"; disabled = pythonOlder "3.9";
@ -20,7 +20,7 @@ buildPythonPackage rec {
owner = "home-assistant-libs"; owner = "home-assistant-libs";
repo = pname; repo = pname;
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-77RrqmoCftPc48fFtuuFo0KqGX3n+6aDx2RFkwGCNzQ="; hash = "sha256-7gkesxQI6QBxyQpHlSSh1w6MDeid0dSdXn+jnxvafD0=";
}; };
postPatch = '' postPatch = ''

View File

@ -15,14 +15,15 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "jupyter-cache"; pname = "jupyter-cache";
version = "0.6.1"; version = "1.0.0";
format = "pyproject"; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.9";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit version;
sha256 = "sha256-Jvg5ARQ+30ry8/9akeLSrSmORuLO4DyAcdN6I6Y8y/w="; pname = "jupyter_cache";
hash = "sha256-0Pp9dTPNV5gZjYiJMYJpqME4LtOyL2IsCak1ZSH0hoc=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [
@ -45,6 +46,7 @@ buildPythonPackage rec {
meta = with lib; { meta = with lib; {
description = "A defined interface for working with a cache of jupyter notebooks"; description = "A defined interface for working with a cache of jupyter notebooks";
homepage = "https://github.com/executablebooks/jupyter-cache"; homepage = "https://github.com/executablebooks/jupyter-cache";
changelog = "https://github.com/executablebooks/jupyter-cache/blob/v${version}/CHANGELOG.md";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ marsam ]; maintainers = with maintainers; [ marsam ];
}; };

View File

@ -1,33 +0,0 @@
From 75baa1751973378cb96fb204b0a18a74e5caa2d1 Mon Sep 17 00:00:00 2001
From: Rouven Czerwinski <r.czerwinski@pengutronix.de>
Date: Wed, 17 Feb 2021 14:03:20 +0100
Subject: [PATCH] serialdriver: remove pyserial version check
This check isn't required on NixOS, since pyserial within NixOS already
contains the patches.
Signed-off-by: Rouven Czerwinski <r.czerwinski@pengutronix.de>
---
labgrid/driver/serialdriver.py | 6 ------
1 file changed, 6 deletions(-)
diff --git a/labgrid/driver/serialdriver.py b/labgrid/driver/serialdriver.py
index 126f674e..59a92269 100644
--- a/labgrid/driver/serialdriver.py
+++ b/labgrid/driver/serialdriver.py
@@ -27,12 +27,6 @@ class SerialDriver(ConsoleExpectMixin, Driver, ConsoleProtocol):
bindings = {"port": "SerialPort", }
else:
bindings = {"port": {"SerialPort", "NetworkSerialPort"}, }
- if version.parse(serial.__version__) != version.Version('3.4.0.1'):
- message = ("The installed pyserial version does not contain important RFC2217 fixes.\n"
- "You can install the labgrid fork via:\n"
- "pip uninstall pyserial\n"
- "pip install https://github.com/labgrid-project/pyserial/archive/v3.4.0.1.zip#egg=pyserial\n") # pylint: disable=line-too-long
- warnings.warn(message)
txdelay = attr.ib(default=0.0, validator=attr.validators.instance_of(float))
timeout = attr.ib(default=3.0, validator=attr.validators.instance_of(float))
--
2.30.0

View File

@ -17,7 +17,9 @@
, pyusb , pyusb
, pyyaml , pyyaml
, requests , requests
, setuptools
, setuptools-scm , setuptools-scm
, wheel
, xmodem , xmodem
}: }:
@ -32,13 +34,13 @@ buildPythonPackage rec {
sha256 = "sha256-yhlBqqCLOt6liw4iv8itG6E4QfIa7cW76QJqefUM5dw="; sha256 = "sha256-yhlBqqCLOt6liw4iv8itG6E4QfIa7cW76QJqefUM5dw=";
}; };
patches = [ nativeBuildInputs = [
# Pyserial within Nixpkgs already includes the necessary fix, remove the setuptools
# pyserial version check from labgrid. setuptools-scm
./0001-serialdriver-remove-pyserial-version-check.patch wheel
]; ];
nativeBuildInputs = [ setuptools-scm ]; pyproject = true;
propagatedBuildInputs = [ propagatedBuildInputs = [
ansicolors ansicolors

View File

@ -11,16 +11,16 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "maison"; pname = "maison";
version = "1.4.0"; version = "1.4.1";
format = "pyproject"; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "dbatten5"; owner = "dbatten5";
repo = pname; repo = "maison";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-Ny/n1vDWS6eA9zLIB0os5zrbwvutb+7sQ6iPXeid1M0="; hash = "sha256-uJW+7+cIt+jnbiC+HvT7KzyNk1enEtELTxtfc4eXAPU=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -3,6 +3,7 @@
, buildPythonPackage , buildPythonPackage
, envisage , envisage
, fetchPypi , fetchPypi
, fetchpatch
, numpy , numpy
, packaging , packaging
, pyface , pyface
@ -26,6 +27,24 @@ buildPythonPackage rec {
hash = "sha256-n0J+8spska542S02ibpr7KJMhGDicG2KHJuEKJrT/Z4="; hash = "sha256-n0J+8spska542S02ibpr7KJMhGDicG2KHJuEKJrT/Z4=";
}; };
patches = [
# Adds compatibility with Python 3.11.
# https://github.com/enthought/mayavi/pull/1199
(fetchpatch {
name = "python311-compat.patch";
url = "https://github.com/enthought/mayavi/commit/50c0cbfcf97560be69c84b7c924635a558ebf92f.patch";
hash = "sha256-zZOT6on/f5cEjnDBrNGog/wPQh7rBkaFqrxkBYDUQu0=";
includes = [ "tvtk/src/*" ];
})
# Fixes an incompatible function pointer conversion error
# https://github.com/enthought/mayavi/pull/1266
(fetchpatch {
name = "incompatible-pointer-conversion.patch";
url = "https://github.com/enthought/mayavi/commit/887adc8fe2b076a368070f5b1d564745b03b1964.patch";
hash = "sha256-88H1NNotd4pO0Zw1oLrYk5WNuuVrmTU01HJgsTRfKlo=";
})
];
postPatch = '' postPatch = ''
# building the docs fails with the usual Qt xcb error, so skip: # building the docs fails with the usual Qt xcb error, so skip:
substituteInPlace setup.py \ substituteInPlace setup.py \

View File

@ -2,25 +2,56 @@
, buildPythonPackage , buildPythonPackage
, fetchPypi , fetchPypi
, html5lib , html5lib
, pytestCheckHook
, pythonOlder
, setuptools
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "mechanize"; pname = "mechanize";
version = "0.4.8"; version = "0.4.9";
pyproject = true;
disabled = pythonOlder "3.7";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-XoasB3c1fgBusEzSj37Z+BHUjf+mA9OJGsbSuSKA3JE="; hash = "sha256-aaXtsJYvkh6LEINzaMIkLYrQSfC5H/aZzn9gG/xDFSE=";
}; };
propagatedBuildInputs = [ html5lib ]; nativeBuildInputs = [
setuptools
];
doCheck = false; propagatedBuildInputs = [
html5lib
];
nativeCheckInputs = [
pytestCheckHook
];
pythonImportsCheck = [
"mechanize"
];
disabledTestPaths = [
# Tests require network access
"test/test_urllib2_localnet.py"
"test/test_functional.py"
];
disabledTests = [
# Tests require network access
"test_pickling"
"test_password_manager"
];
meta = with lib; { meta = with lib; {
description = "Stateful programmatic web browsing in Python"; description = "Stateful programmatic web browsing in Python";
homepage = "https://github.com/python-mechanize/mechanize"; homepage = "https://github.com/python-mechanize/mechanize";
license = "BSD-style"; changelog = "https://github.com/python-mechanize/mechanize/blob/v${version}/ChangeLog";
license = licenses.bsd3;
maintainers = with maintainers; [ ];
}; };
} }

View File

@ -1,22 +1,47 @@
{ lib, buildPythonPackage, fetchPypi, cryptography, protobuf }: { lib
, buildPythonPackage
, cryptography
, fetchPypi
, protobuf
, pytestCheckHook
, pythonOlder
, setuptools
}:
buildPythonPackage rec { buildPythonPackage rec {
pname = "omemo-dr"; pname = "omemo-dr";
version = "1.0.0"; version = "1.0.1";
pyproject = true;
disabled = pythonOlder "3.10";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-sP5QI+lHoXt0D7ftSqJGEg1vIdgZtYEulN/JVwUgvmE="; hash = "sha256-KoqMdyMdc5Sb3TdSeNTVomElK9ruUstiQayyUcIC02E=";
}; };
nativeBuildInputs = [
setuptools
];
propagatedBuildInputs = [ propagatedBuildInputs = [
cryptography cryptography
protobuf protobuf
]; ];
meta = { nativeCheckInputs = [
pytestCheckHook
];
pythonImportsCheck = [
"omemo_dr"
];
meta = with lib; {
description = "OMEMO Double Ratchet"; description = "OMEMO Double Ratchet";
license = lib.licenses.lgpl3;
homepage = "https://dev.gajim.org/gajim/omemo-dr/"; homepage = "https://dev.gajim.org/gajim/omemo-dr/";
changelog = "https://dev.gajim.org/gajim/omemo-dr/-/blob/v${version}/CHANGELOG.md";
license = licenses.gpl3Only;
maintainers = with maintainers; [ ];
}; };
} }

View File

@ -20,11 +20,11 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "omrdatasettools"; pname = "omrdatasettools";
version = "1.3.1"; version = "1.4.0";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
sha256 = "0cdq02jp8vh78yjq9bncjjl0pb554idrcxkd62rzwk4l6ss2fkw5"; sha256 = "sha256-kUUcbti29uDnSEvCubMAUnptlaZGpEsW2IBGSAGnGyQ=";
}; };
propagatedBuildInputs = [ propagatedBuildInputs = [

View File

@ -16,7 +16,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "pyatmo"; pname = "pyatmo";
version = "7.5.0"; version = "7.6.0";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -25,7 +25,7 @@ buildPythonPackage rec {
owner = "jabesq"; owner = "jabesq";
repo = "pyatmo"; repo = "pyatmo";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-GucatimZTg0Fggrz4bG1x6YSa3wE/uLGB4ufil/km3w="; hash = "sha256-rAmSxayXljOJchiMtSOgnotzQmapK2n86HwNi9HJX68=";
}; };
SETUPTOOLS_SCM_PRETEND_VERSION = version; SETUPTOOLS_SCM_PRETEND_VERSION = version;

View File

@ -60,6 +60,10 @@ buildPythonPackage rec {
${python.pythonOnBuildForHost.interpreter} buildconfig/config.py ${python.pythonOnBuildForHost.interpreter} buildconfig/config.py
''; '';
env = lib.optionalAttrs stdenv.cc.isClang {
NIX_CFLAGS_COMPILE = "-Wno-error=incompatible-function-pointer-types";
};
checkPhase = '' checkPhase = ''
runHook preCheck runHook preCheck

View File

@ -18,11 +18,11 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "python-jenkins"; pname = "python-jenkins";
version = "1.8.1"; version = "1.8.2";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-/18dklOdkD+GmwLq8rExREfm1tePdn7c/dkpZ9UyucY="; hash = "sha256-VufauwYHvbjh1vxtLUMBq+2+2RZdorIG+svTBxy27ss=";
}; };
# test uses timeout mechanism unsafe for use with the "spawn" # test uses timeout mechanism unsafe for use with the "spawn"

View File

@ -1,11 +1,12 @@
{ lib { lib
, stdenv , stdenv
, fetchpatch
, buildPythonPackage , buildPythonPackage
, fetchPypi , fetchFromGitHub
, pythonOlder , pythonOlder
, setuptools , setuptools
, tdlib , tdlib
, telegram-text
, pytestCheckHook
}: }:
buildPythonPackage rec { buildPythonPackage rec {
@ -13,30 +14,33 @@ buildPythonPackage rec {
version = "0.18.0"; version = "0.18.0";
disabled = pythonOlder "3.6"; disabled = pythonOlder "3.6";
src = fetchPypi { src = fetchFromGitHub {
inherit pname version; owner = "alexander-akhmetov";
hash = "sha256-UbJW/op01qe/HchfJUlBPBY9/W8NbZkEmFM8gZ5+EmI="; repo = "python-telegram";
rev = version;
hash = "sha256-2Q0nUZ2TMVWznd05+fqYojkRn4xfFZJrlqb1PMuBsAY=";
}; };
patches = [
# Search for the system library first, and fallback to the embedded one if the system was not found
(fetchpatch {
url = "https://github.com/alexander-akhmetov/python-telegram/commit/b0af0985910ebb8940cff1b92961387aad683287.patch";
hash = "sha256-ZqsntaiC2y9l034gXDMeD2BLO/RcsbBII8FomZ65/24=";
})
];
postPatch = '' postPatch = ''
# Remove bundled libtdjson # Remove bundled libtdjson
rm -fr telegram/lib rm -fr telegram/lib
substituteInPlace telegram/tdjson.py \ substituteInPlace telegram/tdjson.py \
--replace "ctypes.util.find_library(\"libtdjson\")" \ --replace "ctypes.util.find_library(\"tdjson\")" \
"\"${tdlib}/lib/libtdjson${stdenv.hostPlatform.extensions.sharedLibrary}\"" "\"${tdlib}/lib/libtdjson${stdenv.hostPlatform.extensions.sharedLibrary}\""
''; '';
propagatedBuildInputs = [ propagatedBuildInputs = [
setuptools setuptools
telegram-text
];
nativeCheckInputs = [
pytestCheckHook
];
disabledTests = [
"TestGetTdjsonTdlibPath"
]; ];
pythonImportsCheck = [ pythonImportsCheck = [

View File

@ -27,12 +27,13 @@
, setuptools , setuptools
, pytestCheckHook , pytestCheckHook
, pytest-cov , pytest-cov
, pytest-mock
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "scikit-rf"; pname = "scikit-rf";
version = "0.29.0"; version = "0.29.1";
format = "pyproject"; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
@ -40,7 +41,7 @@ buildPythonPackage rec {
owner = "scikit-rf"; owner = "scikit-rf";
repo = pname; repo = pname;
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-rBOw1rIEF8Ia6xXlXxVzRRiUxrOjOAlipFuKiL+gRl0="; hash = "sha256-sLE6rcBGUKmk5y7oO06rHON3GVIjcvnKlr6Tgddj64Y=";
}; };
buildInputs = [ buildInputs = [
@ -88,6 +89,7 @@ buildPythonPackage rec {
coverage coverage
flake8 flake8
pytest-cov pytest-cov
pytest-mock
nbval nbval
matplotlib matplotlib
pyvisa pyvisa
@ -99,6 +101,12 @@ buildPythonPackage rec {
pytestCheckHook pytestCheckHook
]; ];
# test_calibration.py generates a divide by zero error on darwin
# https://github.com/scikit-rf/scikit-rf/issues/972
disabledTestPaths =
lib.optional (stdenv.isAarch64 && stdenv.isDarwin)
"skrf/calibration/tests/test_calibration.py";
pythonImportsCheck = [ pythonImportsCheck = [
"skrf" "skrf"
]; ];

View File

@ -5,7 +5,7 @@
, cmake , cmake
, qt5 , qt5
, libxcrypt , libxcrypt
, llvmPackages , llvmPackages_15
}: }:
stdenv.mkDerivation { stdenv.mkDerivation {
@ -21,12 +21,12 @@ stdenv.mkDerivation {
cd sources/shiboken2 cd sources/shiboken2
''; '';
CLANG_INSTALL_DIR = llvmPackages.libclang.out; CLANG_INSTALL_DIR = llvmPackages_15.libclang.out;
nativeBuildInputs = [ cmake ]; nativeBuildInputs = [ cmake ];
buildInputs = [ buildInputs = [
llvmPackages.libclang llvmPackages_15.libclang
python python
python.pkgs.setuptools python.pkgs.setuptools
qt5.qtbase qt5.qtbase

View File

@ -7,13 +7,14 @@
, pandas , pandas
, pytestCheckHook , pytestCheckHook
, pythonOlder , pythonOlder
, setuptools-scm
, setuptools , setuptools
, setuptools-scm
, jpype1
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "tabula-py"; pname = "tabula-py";
version = "2.8.1"; version = "2.8.2";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -22,28 +23,30 @@ buildPythonPackage rec {
owner = "chezou"; owner = "chezou";
repo = pname; repo = pname;
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-QqTfbSwGaNRXBiAzB1fsEawxCvlIunB1j2jSFD9imPI="; hash = "sha256-Zrq1i+HYXXNulyZ/fv00AgVd7ODj3rP9orLq5rT3ERU=";
}; };
patches = [
./java-interpreter-path.patch
];
postPatch = '' postPatch = ''
sed -i 's|@JAVA@|${jre}/bin/java|g' $(find -name '*.py') substituteInPlace tabula/backend.py \
--replace '"java"' '"${lib.getExe jre}"'
''; '';
SETUPTOOLS_SCM_PRETEND_VERSION = version; SETUPTOOLS_SCM_PRETEND_VERSION = version;
nativeBuildInputs = [ nativeBuildInputs = [
setuptools
setuptools-scm setuptools-scm
]; ];
buildInputs = [
jre
];
propagatedBuildInputs = [ propagatedBuildInputs = [
distro distro
numpy numpy
pandas pandas
setuptools jpype1
]; ];
nativeCheckInputs = [ nativeCheckInputs = [
@ -60,6 +63,11 @@ buildPythonPackage rec {
"test_read_pdf_with_remote_template" "test_read_pdf_with_remote_template"
"test_read_remote_pdf" "test_read_remote_pdf"
"test_read_remote_pdf_with_custom_user_agent" "test_read_remote_pdf_with_custom_user_agent"
# not sure what it checks
# probably related to jpype, but we use subprocess instead
# https://github.com/chezou/tabula-py/issues/352#issuecomment-1730791540
# Failed: DID NOT RAISE <class 'RuntimeError'>
"test_read_pdf_with_silent_true"
]; ];
meta = with lib; { meta = with lib; {

View File

@ -1,54 +0,0 @@
diff -ru origsource/tabula/io.py source/tabula/io.py
--- origsource/tabula/io.py 2022-11-23 17:19:35.419837514 +0100
+++ source/tabula/io.py 2022-11-23 17:22:08.204194807 +0100
@@ -79,7 +79,7 @@
)
)
- args = ["java"] + java_options + ["-jar", _jar_path()] + options.build_option_list()
+ args = ["@JAVA@"] + java_options + ["-jar", _jar_path()] + options.build_option_list()
if path:
args.append(path)
diff -ru origsource/tabula/util.py source/tabula/util.py
--- origsource/tabula/util.py 2022-11-23 17:19:35.422837521 +0100
+++ source/tabula/util.py 2022-11-23 17:21:41.514132392 +0100
@@ -26,7 +26,7 @@
try:
res = subprocess.check_output(
- ["java", "-version"], stderr=subprocess.STDOUT
+ ["@JAVA@", "-version"], stderr=subprocess.STDOUT
).decode()
except FileNotFoundError:
diff -ru origsource/tests/test_read_pdf_table.py source/tests/test_read_pdf_table.py
--- origsource/tests/test_read_pdf_table.py 2022-11-23 17:19:35.422837521 +0100
+++ source/tests/test_read_pdf_table.py 2022-11-23 17:21:22.008086776 +0100
@@ -281,7 +281,7 @@
tabula.read_pdf(self.pdf_path, encoding="utf-8")
- target_args = ["java"]
+ target_args = ["@JAVA@"]
if platform.system() == "Darwin":
target_args += ["-Djava.awt.headless=true"]
target_args += [
@@ -355,7 +355,7 @@
tabula.read_pdf(self.pdf_path, encoding="utf-8", silent=False)
- target_args = ["java"]
+ target_args = ["@JAVA@"]
if platform.system() == "Darwin":
target_args += ["-Djava.awt.headless=true"]
target_args += [
@@ -382,7 +382,7 @@
tabula.read_pdf(self.pdf_path, encoding="utf-8", silent=True)
- target_args = ["java"]
+ target_args = ["@JAVA@"]
if platform.system() == "Darwin":
target_args += ["-Djava.awt.headless=true"]
target_args += [

View File

@ -3,8 +3,9 @@
, aresponses , aresponses
, buildPythonPackage , buildPythonPackage
, fetchFromGitHub , fetchFromGitHub
, mashumaro
, orjson
, poetry-core , poetry-core
, pydantic
, pytest-asyncio , pytest-asyncio
, pytestCheckHook , pytestCheckHook
, pythonOlder , pythonOlder
@ -13,22 +14,22 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "tailscale"; pname = "tailscale";
version = "0.3.0"; version = "0.6.0";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.11";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "frenck"; owner = "frenck";
repo = "python-tailscale"; repo = "python-tailscale";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-gGDsVGsCBZi/pxD0cyH3+xrvHVBC+wJCcl/NGqsTqiE="; hash = "sha256-wO6yMMU5fxk8GQ0e4ZCse2atlR4wrzulZOFXkVKAsmU=";
}; };
postPatch = '' postPatch = ''
# Upstream doesn't set a version for the pyproject.toml # Upstream doesn't set a version for the pyproject.toml
substituteInPlace pyproject.toml \ substituteInPlace pyproject.toml \
--replace "0.0.0" "${version}" \ --replace 'version = "0.0.0"' 'version = "${version}"' \
--replace "--cov" "" --replace "--cov" ""
''; '';
@ -38,7 +39,8 @@ buildPythonPackage rec {
propagatedBuildInputs = [ propagatedBuildInputs = [
aiohttp aiohttp
pydantic mashumaro
orjson
yarl yarl
]; ];

View File

@ -0,0 +1,39 @@
{ lib
, stdenv
, buildPythonPackage
, fetchFromGitHub
, pythonOlder
, poetry-core
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "telegram-text";
version = "0.1.2";
pyproject = true;
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "SKY-ALIN";
repo = "telegram-text";
rev = "v${version}";
hash = "sha256-p8SVQq7IvkVuOFE8VDugROLY5Wk0L2HmXyacTzFFSP4=";
};
nativeBuildInputs = [
poetry-core
];
nativeCheckInputs = [
pytestCheckHook
];
meta = with lib; {
description = "Python markup module for Telegram messenger";
downloadPage = "https://github.com/SKY-ALIN/telegram-text";
homepage = "https://telegram-text.alinsky.tech/";
changelog = "https://github.com/SKY-ALIN/telegram-text/blob/v${version}/CHANGELOG.md";
license = licenses.mit;
maintainers = with maintainers; [ sikmir ];
};
}

View File

@ -10,7 +10,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "ulid-transform"; pname = "ulid-transform";
version = "0.8.1"; version = "0.9.0";
format = "pyproject"; format = "pyproject";
disabled = pythonOlder "3.9"; disabled = pythonOlder "3.9";
@ -19,7 +19,7 @@ buildPythonPackage rec {
owner = "bdraco"; owner = "bdraco";
repo = pname; repo = pname;
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-isngr9CZ2YYuq+5s3p4HXrTU20vPqZGZ1r8mBoVkxiI="; hash = "sha256-r9uxPXpmQSsL1rX4d9TH87olFbZugdGdNG++Ygjie1I=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -63,6 +63,11 @@ buildPythonPackage rec {
"wfuzz" "wfuzz"
]; ];
postInstall = ''
mkdir -p $out/share/wordlists/wfuzz
cp -R -T "wordlist" "$out/share/wordlists/wfuzz"
'';
meta = with lib; { meta = with lib; {
description = "Web content fuzzer to facilitate web applications assessments"; description = "Web content fuzzer to facilitate web applications assessments";
longDescription = '' longDescription = ''

View File

@ -1,4 +1,8 @@
{ skawarePackages, pkgs }: { lib
, stdenv
, skawarePackages
, pkgs
}:
with skawarePackages; with skawarePackages;
@ -21,6 +25,17 @@ buildPackage {
# Empty the default path, which would be "/usr/bin:bin". # Empty the default path, which would be "/usr/bin:bin".
# It would be set when PATH is empty. This hurts hermeticity. # It would be set when PATH is empty. This hurts hermeticity.
"--with-default-path=" "--with-default-path="
] ++ lib.optionals (stdenv.buildPlatform.config != stdenv.hostPlatform.config) [
# ./configure: sysdep posixspawnearlyreturn cannot be autodetected
# when cross-compiling. Please manually provide a value with the
# --with-sysdep-posixspawnearlyreturn=yes|no|... option.
#
# posixspawnearlyreturn: `yes` if the target has a broken
# `posix_spawn()` implementation that can return before the
# child has successfully exec'ed. That happens with old glibcs
# and some virtual platforms.
"--with-sysdep-posixspawnearlyreturn=no"
]; ];
postInstall = '' postInstall = ''

View File

@ -22,6 +22,7 @@
, file , file
, substituteAll , substituteAll
, writeTextFile , writeTextFile
, writeShellApplication
}: }:
let let
@ -128,6 +129,16 @@ let
defaultShellPath = lib.makeBinPath defaultShellUtils; defaultShellPath = lib.makeBinPath defaultShellUtils;
bashWithDefaultShellUtils = writeShellApplication {
name = "bash";
text = ''
if [[ "$PATH" == "/no-such-path" ]]; then
export PATH=${defaultShellPath}
fi
exec ${bash}/bin/bash "$@"
'';
};
platforms = lib.platforms.linux ++ lib.platforms.darwin; platforms = lib.platforms.linux ++ lib.platforms.darwin;
system = if stdenv.hostPlatform.isDarwin then "darwin" else "linux"; system = if stdenv.hostPlatform.isDarwin then "darwin" else "linux";
@ -420,8 +431,8 @@ stdenv.mkDerivation rec {
# If you add more replacements here, you must change the grep above! # If you add more replacements here, you must change the grep above!
# Only files containing /bin are taken into account. # Only files containing /bin are taken into account.
substituteInPlace "$path" \ substituteInPlace "$path" \
--replace /bin/bash ${bash}/bin/bash \ --replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash \
--replace "/usr/bin/env bash" ${bash}/bin/bash \ --replace "/usr/bin/env bash" ${bashWithDefaultShellUtils}/bin/bash \
--replace "/usr/bin/env python" ${python3}/bin/python \ --replace "/usr/bin/env python" ${python3}/bin/python \
--replace /usr/bin/env ${coreutils}/bin/env \ --replace /usr/bin/env ${coreutils}/bin/env \
--replace /bin/true ${coreutils}/bin/true --replace /bin/true ${coreutils}/bin/true
@ -436,17 +447,17 @@ stdenv.mkDerivation rec {
# bazel test runner include references to /bin/bash # bazel test runner include references to /bin/bash
substituteInPlace tools/build_rules/test_rules.bzl \ substituteInPlace tools/build_rules/test_rules.bzl \
--replace /bin/bash ${bash}/bin/bash --replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash
for i in $(find tools/cpp/ -type f) for i in $(find tools/cpp/ -type f)
do do
substituteInPlace $i \ substituteInPlace $i \
--replace /bin/bash ${bash}/bin/bash --replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash
done done
# Fixup scripts that generate scripts. Not fixed up by patchShebangs below. # Fixup scripts that generate scripts. Not fixed up by patchShebangs below.
substituteInPlace scripts/bootstrap/compile.sh \ substituteInPlace scripts/bootstrap/compile.sh \
--replace /bin/bash ${bash}/bin/bash --replace /bin/bash ${bashWithDefaultShellUtils}/bin/bash
# add nix environment vars to .bazelrc # add nix environment vars to .bazelrc
cat >> .bazelrc <<EOF cat >> .bazelrc <<EOF

View File

@ -16,14 +16,14 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "cargo-update"; pname = "cargo-update";
version = "13.1.0"; version = "13.2.0";
src = fetchCrate { src = fetchCrate {
inherit pname version; inherit pname version;
sha256 = "sha256-2j35R7QTn7Z3yqzOU+VWAoZfYodecDt45Plx/D7+GyU="; sha256 = "sha256-yMHGn/RPtYuxS3rHzm87mW7nBUEaSOGsCT7Ckxvhabk=";
}; };
cargoHash = "sha256-OEv9LOep4YNWY7oixY5zD9QgxqSYTrcf5oSXpxvnKIs="; cargoHash = "sha256-hO2W0NRV9fGHnnS1kOkQ+e0sFzVSBQk3MOm8qDYbA00=";
nativeBuildInputs = [ nativeBuildInputs = [
cmake cmake

View File

@ -2,18 +2,18 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "viceroy"; pname = "viceroy";
version = "0.9.2"; version = "0.9.3";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "fastly"; owner = "fastly";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
hash = "sha256-vMyNsLXMJk8MTiZYRiGQpOLZfeJbKlYcG1U8xTQIty0="; hash = "sha256-LOm4d6SV5rlb7NovhSp7V0JIaOfHIZOqeIcpIvTsZsA=";
}; };
buildInputs = lib.optional stdenv.isDarwin Security; buildInputs = lib.optional stdenv.isDarwin Security;
cargoHash = "sha256-+v2P9ISSA7Xy5fTjfVNETAStPo19dLxv5K57MC/GU4E="; cargoHash = "sha256-Pz+jA4uC/40mj5Jn/lB+XcoN/QSD23iLwsEowTUI0pg=";
cargoTestFlags = [ cargoTestFlags = [
"--package viceroy-lib" "--package viceroy-lib"

View File

@ -2,13 +2,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
name = "yarn-berry"; name = "yarn-berry";
version = "3.4.1"; version = "4.0.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "yarnpkg"; owner = "yarnpkg";
repo = "berry"; repo = "berry";
rev = "@yarnpkg/cli/${version}"; rev = "@yarnpkg/cli/${version}";
hash = "sha256-eBBB/F+mnGi93Qf23xgt306/ogoV76RXOM90O14u5Tw="; hash = "sha256-9QNeXamNqRx+Bfg8nAhnImPuNFyqrHIs1eF9prSwIR4=";
}; };
buildInputs = [ buildInputs = [
@ -33,11 +33,11 @@ stdenv.mkDerivation rec {
runHook postInstall runHook postInstall
''; '';
meta = with lib; { meta = with lib; {
homepage = "https://yarnpkg.com/"; homepage = "https://yarnpkg.com/";
description = "Fast, reliable, and secure dependency management."; description = "Fast, reliable, and secure dependency management.";
license = licenses.bsd2; license = licenses.bsd2;
maintainers = with maintainers; [ ryota-ka ]; maintainers = with maintainers; [ ryota-ka thehedgeh0g ];
platforms = platforms.unix; platforms = platforms.unix;
}; };
} }

View File

@ -3,22 +3,22 @@
let let
pname = "anki-bin"; pname = "anki-bin";
# Update hashes for both Linux and Darwin! # Update hashes for both Linux and Darwin!
version = "23.10"; version = "23.10.1";
sources = { sources = {
linux = fetchurl { linux = fetchurl {
url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-linux-qt6.tar.zst"; url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-linux-qt6.tar.zst";
sha256 = "sha256-dfL95UKu6kwD4WHLtXlIdkf5UItEtW2WCAKP7YGlCtc="; sha256 = "sha256-Kv0SH+bLnBSM/tYHe2kEJc4n7izZTBNWQs2nm/teLEU=";
}; };
# For some reason anki distributes completely separate dmg-files for the aarch64 version and the x86_64 version # For some reason anki distributes completely separate dmg-files for the aarch64 version and the x86_64 version
darwin-x86_64 = fetchurl { darwin-x86_64 = fetchurl {
url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-mac-intel-qt6.dmg"; url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-mac-intel-qt6.dmg";
sha256 = "sha256-Y8BZ7EA6Dn4+5kMCFyuXi17XDLn9YRxqVGautt9WUOo="; sha256 = "sha256-MSlKsEv4N/H7G1bUOBlPBXerpHIW32P6Va02aRq1+54=";
}; };
darwin-aarch64 = fetchurl { darwin-aarch64 = fetchurl {
url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-mac-apple-qt6.dmg"; url = "https://github.com/ankitects/anki/releases/download/${version}/anki-${version}-mac-apple-qt6.dmg";
sha256 = "sha256-IrKWJ16gMCR2MH8dgYUCtMj6mDQP18+HQr17hfekPIs="; sha256 = "sha256-jEm9WJBXx77KpldzBuxK1Pu6VGiARZPnRmMhEjZdm1I=";
}; };
}; };
@ -45,7 +45,7 @@ let
meta = with lib; { meta = with lib; {
inherit (anki.meta) license homepage description longDescription; inherit (anki.meta) license homepage description longDescription;
platforms = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ]; platforms = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ];
maintainers = with maintainers; [ mahmoudk1000 atemu ]; maintainers = with maintainers; [ mahmoudk1000 ];
}; };
passthru = { inherit sources; }; passthru = { inherit sources; };

View File

@ -5,11 +5,11 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "pdns-recursor"; pname = "pdns-recursor";
version = "4.9.1"; version = "4.9.2";
src = fetchurl { src = fetchurl {
url = "https://downloads.powerdns.com/releases/pdns-recursor-${version}.tar.bz2"; url = "https://downloads.powerdns.com/releases/pdns-recursor-${version}.tar.bz2";
sha256 = "sha256-Ch7cE+jyvWYfOeMWOH2UHiLeagO4p6L8Zi/fi5Quor4="; sha256 = "sha256-TLgYBFjs+1KKPZo0uihEts0u1pyhxGHd4koOvWaCkUQ=";
}; };
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [ pkg-config ];

View File

@ -0,0 +1,46 @@
#!/usr/bin/env python3
import json
import importlib_metadata
import sys
from packaging.requirements import Requirement
def check_requirement(req: str):
# https://packaging.pypa.io/en/stable/requirements.html
requirement = Requirement(req)
try:
version = importlib_metadata.distribution(requirement.name).version
except importlib_metadata.PackageNotFoundError:
print(f" - Dependency {requirement.name} is missing", file=sys.stderr)
return False
# https://packaging.pypa.io/en/stable/specifiers.html
if not version in requirement.specifier:
print(
f" - {requirement.name}{requirement.specifier} expected, but got {version}",
file=sys.stderr,
)
return False
return True
def check_manifest(manifest_file: str):
with open(manifest_file) as fd:
manifest = json.load(fd)
if "requirements" in manifest:
ok = True
for requirement in manifest["requirements"]:
ok &= check_requirement(requirement)
if not ok:
print("Manifest requirements are not met", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
if len(sys.argv) < 2:
raise RuntimeError(f"Usage {sys.argv[0]} <manifest>")
manifest_file = sys.argv[1]
check_manifest(manifest_file)

View File

@ -0,0 +1,38 @@
{ lib
, home-assistant
, makeSetupHook
}:
{ pname
, version
, format ? "other"
, ...
}@args:
let
manifestRequirementsCheckHook = import ./manifest-requirements-check-hook.nix {
inherit makeSetupHook;
inherit (home-assistant) python;
};
in
home-assistant.python.pkgs.buildPythonPackage (
{
inherit format;
installPhase = ''
runHook preInstall
mkdir $out
cp -r $src/custom_components/ $out/
runHook postInstall
'';
nativeCheckInputs = with home-assistant.python.pkgs; [
importlib-metadata
manifestRequirementsCheckHook
packaging
] ++ (args.nativeCheckInputs or []);
} // builtins.removeAttrs args [ "nativeCheckInputs" ]
)

View File

@ -0,0 +1,11 @@
{ python
, makeSetupHook
}:
makeSetupHook {
name = "manifest-requirements-check-hook";
substitutions = {
pythonCheckInterpreter = python.interpreter;
checkManifest = ./check_manifest.py;
};
} ./manifest-requirements-check-hook.sh

View File

@ -0,0 +1,25 @@
# Setup hook to check HA manifest requirements
echo "Sourcing manifest-requirements-check-hook"
function manifestCheckPhase() {
echo "Executing manifestCheckPhase"
runHook preCheck
manifests=$(shopt -s nullglob; echo $out/custom_components/*/manifest.json)
if [ ! -z "$manifests" ]; then
echo Checking manifests $manifests
@pythonCheckInterpreter@ @checkManifest@ $manifests
else
echo "No custom component manifests found in $out" >&2
exit 1
fi
runHook postCheck
echo "Finished executing manifestCheckPhase"
}
if [ -z "${dontCheckManifest-}" ] && [ -z "${installCheckPhase-}" ]; then
echo "Using manifestCheckPhase"
preDistPhases+=" manifestCheckPhase"
fi

View File

@ -2,12 +2,13 @@
# Do not edit! # Do not edit!
{ {
version = "2023.11.1"; version = "2023.11.2";
components = { components = {
"3_day_blinds" = ps: with ps; [ "3_day_blinds" = ps: with ps; [
]; ];
"abode" = ps: with ps; [ "abode" = ps: with ps; [
jaraco-abode jaraco-abode
jaraco-functools
]; ];
"accuweather" = ps: with ps; [ "accuweather" = ps: with ps; [
accuweather accuweather

View File

@ -0,0 +1,57 @@
# Packaging guidelines
## buildHomeAssistantComponent
Custom components should be packaged using the
`buildHomeAssistantComponent` function, that is provided at top-level.
It builds upon `buildPythonPackage` but uses a custom install and check
phase.
Python runtime dependencies can be directly consumed as unqualified
function arguments. Pass them into `propagatedBuildInputs`, for them to
be available to Home Assistant.
Out-of-tree components need to use python packages from
`home-assistant.python.pkgs` as to not introduce conflicting package
versions into the Python environment.
**Example Boilerplate:**
```nix
{ lib
, buildHomeAssistantcomponent
, fetchFromGitHub
}:
buildHomeAssistantComponent {
# pname, version
src = fetchFromGithub {
# owner, repo, rev, hash
};
propagatedBuildInputs = [
# python requirements, as specified in manifest.json
];
meta = with lib; {
# changelog, description, homepage, license, maintainers
}
}
## Package name normalization
Apply the same normalization rules as defined for python packages in
[PEP503](https://peps.python.org/pep-0503/#normalized-names).
The name should be lowercased and dots, underlines or multiple
dashes should all be replaced by a single dash.
## Manifest check
The `buildHomeAssistantComponent` builder uses a hook to check whether
the dependencies specified in the `manifest.json` are present and
inside the specified version range.
There shouldn't be a need to disable this hook, but you can set
`dontCheckManifest` to `true` in the derivation to achieve that.

View File

@ -0,0 +1,6 @@
{ callPackage
}:
{
prometheus-sensor = callPackage ./prometheus-sensor {};
}

View File

@ -0,0 +1,26 @@
{ lib
, fetchFromGitHub
, buildHomeAssistantComponent
}:
buildHomeAssistantComponent rec {
pname = "prometheus-sensor";
version = "1.0.0";
src = fetchFromGitHub {
owner = "mweinelt";
repo = "ha-prometheus-sensor";
rev = "refs/tags/${version}";
hash = "sha256-10COLFXvmpm8ONLyx5c0yiQdtuP0SC2NKq/ZYHro9II=";
};
dontBuild = true;
meta = with lib; {
changelog = "https://github.com/mweinelt/ha-prometheus-sensor/blob/${version}/CHANGELOG.md";
description = "Import prometheus query results into Home Assistant";
homepage = "https://github.com/mweinelt/ha-prometheus-sensor";
maintainers = with maintainers; [ hexa ];
license = licenses.mit;
};
}

View File

@ -0,0 +1,13 @@
# Packaging guidelines
## Entrypoint
Every lovelace module has an entrypoint in the form of a `.js` file. By
default the nixos module will try to load `${pname}.js` when a module is
configured.
The entrypoint used can be overridden in `passthru` like this:
```nix
passthru.entrypoint = "demo-card-bundle.js";
```

View File

@ -0,0 +1,8 @@
{ callPackage
}:
{
mini-graph-card = callPackage ./mini-graph-card {};
mini-media-player = callPackage ./mini-media-player {};
}

View File

@ -0,0 +1,38 @@
{ lib
, buildNpmPackage
, fetchFromGitHub
}:
buildNpmPackage rec {
pname = "mini-graph-card";
version = "0.11.0";
src = fetchFromGitHub {
owner = "kalkih";
repo = "mini-graph-card";
rev = "refs/tags/v${version}";
hash = "sha256-AC4VawRtWTeHbFqDJ6oQchvUu08b4F3ManiPPXpyGPc=";
};
npmDepsHash = "sha256-0ErOTkcCnMqMTsTkVL320SxZaET/izFj9GiNWC2tQtQ=";
installPhase = ''
runHook preInstall
mkdir $out
cp -v dist/mini-graph-card-bundle.js $out/
runHook postInstall
'';
passthru.entrypoint = "mini-graph-card-bundle.js";
meta = with lib; {
changelog = "https://github.com/kalkih/mini-graph-card/releases/tag/v${version}";
description = "Minimalistic graph card for Home Assistant Lovelace UI";
homepage = "https://github.com/kalkih/mini-graph-card";
maintainers = with maintainers; [ hexa ];
license = licenses.mit;
};
}

View File

@ -0,0 +1,37 @@
{ lib
, buildNpmPackage
, fetchFromGitHub
}:
buildNpmPackage rec {
pname = "mini-media-player";
version = "1.16.5";
src = fetchFromGitHub {
owner = "kalkih";
repo = "mini-media-player";
rev = "v${version}";
hash = "sha256-ydkY7Qx2GMh4CpvvBAQubJ7PlxSscDZRJayn82bOczM=";
};
npmDepsHash = "sha256-v9NvZOrQPMOoG3LKACnu79jKgZtcnGiopWad+dFbplw=";
installPhase = ''
runHook preInstall
mkdir $out
cp -v ./dist/mini-media-player-bundle.js $out/
runHook postInstall
'';
passthru.entrypoint = "mini-media-player-bundle.js";
meta = with lib; {
changelog = "https://github.com/kalkih/mini-media-player/releases/tag/v${version}";
description = "Minimalistic media card for Home Assistant Lovelace UI";
homepage = "https://github.com/kalkih/mini-media-player";
license = licenses.mit;
maintainers = with maintainers; [ hexa ];
};
}

View File

@ -3,7 +3,6 @@
, callPackage , callPackage
, fetchFromGitHub , fetchFromGitHub
, fetchPypi , fetchPypi
, fetchpatch
, python311 , python311
, substituteAll , substituteAll
, ffmpeg-headless , ffmpeg-headless
@ -193,6 +192,15 @@ let
}; };
}); });
psutil = super.psutil.overridePythonAttrs (oldAttrs: rec {
version = "5.9.6";
src = fetchPypi {
pname = "psutil";
inherit version;
hash = "sha256-5Lkt3NfdTN0/kAGA6h4QSTLHvOI0+4iXbio7KWRBIlo=";
};
});
py-synologydsm-api = super.py-synologydsm-api.overridePythonAttrs (oldAttrs: rec { py-synologydsm-api = super.py-synologydsm-api.overridePythonAttrs (oldAttrs: rec {
version = "2.1.4"; version = "2.1.4";
src = fetchFromGitHub { src = fetchFromGitHub {
@ -310,17 +318,6 @@ let
doCheck = false; doCheck = false;
}); });
# Pinned due to API changes in 0.3.0
tailscale = super.tailscale.overridePythonAttrs (oldAttrs: rec {
version = "0.2.0";
src = fetchFromGitHub {
owner = "frenck";
repo = "python-tailscale";
rev = "refs/tags/v${version}";
hash = "sha256-/tS9ZMUWsj42n3MYPZJYJELzX3h02AIHeRZmD2SuwWE=";
};
});
# Pinned due to API changes ~1.0 # Pinned due to API changes ~1.0
vultr = super.vultr.overridePythonAttrs (oldAttrs: rec { vultr = super.vultr.overridePythonAttrs (oldAttrs: rec {
version = "0.1.2"; version = "0.1.2";
@ -356,7 +353,7 @@ let
extraBuildInputs = extraPackages python.pkgs; extraBuildInputs = extraPackages python.pkgs;
# Don't forget to run parse-requirements.py after updating # Don't forget to run parse-requirements.py after updating
hassVersion = "2023.11.1"; hassVersion = "2023.11.2";
in python.pkgs.buildPythonApplication rec { in python.pkgs.buildPythonApplication rec {
pname = "homeassistant"; pname = "homeassistant";
@ -372,7 +369,7 @@ in python.pkgs.buildPythonApplication rec {
# Primary source is the pypi sdist, because it contains translations # Primary source is the pypi sdist, because it contains translations
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-4OIvY6blun++7JDY+B0Cjrr4yNgnjTd8G55SWkhS3Cs="; hash = "sha256-cnneRq0hIyvgKo0du/52ze0IVs8TgTPNQM3T1kyy03s=";
}; };
# Secondary source is git for tests # Secondary source is git for tests
@ -380,7 +377,7 @@ in python.pkgs.buildPythonApplication rec {
owner = "home-assistant"; owner = "home-assistant";
repo = "core"; repo = "core";
rev = "refs/tags/${version}"; rev = "refs/tags/${version}";
hash = "sha256-Z/CV1sGdJsdc4OxUZulC0boHaMP7WpajbY8Y6R9Q//I="; hash = "sha256-OljfYmlXSJVoWWsd4jcSF4nI/FXHqRA8e4LN5AaPVv8=";
}; };
nativeBuildInputs = with python.pkgs; [ nativeBuildInputs = with python.pkgs; [
@ -396,17 +393,14 @@ in python.pkgs.buildPythonApplication rec {
# leave this in, so users don't have to constantly update their downstream patch handling # leave this in, so users don't have to constantly update their downstream patch handling
patches = [ patches = [
# Follow symlinks in /var/lib/hass/www
./patches/static-symlinks.patch
# Patch path to ffmpeg binary
(substituteAll { (substituteAll {
src = ./patches/ffmpeg-path.patch; src = ./patches/ffmpeg-path.patch;
ffmpeg = "${lib.getBin ffmpeg-headless}/bin/ffmpeg"; ffmpeg = "${lib.getBin ffmpeg-headless}/bin/ffmpeg";
}) })
(fetchpatch {
# freeze time in litterrobot tests
# https://github.com/home-assistant/core/pull/103444
name = "home-assistant-litterrobot-freeze-test-time.patch";
url = "https://github.com/home-assistant/core/commit/806205952ff863e2cf1875be406ea0254be5f13a.patch";
hash = "sha256-OVbmJWy275nYWrif9awAGIYlgZqrRPcYBhB0Vil8rmk=";
})
]; ];
postPatch = let postPatch = let
@ -526,6 +520,8 @@ in python.pkgs.buildPythonApplication rec {
"--deselect=tests/helpers/test_entity_registry.py::test_get_or_create_updates_data" "--deselect=tests/helpers/test_entity_registry.py::test_get_or_create_updates_data"
# AssertionError: assert 2 == 1 # AssertionError: assert 2 == 1
"--deselect=tests/helpers/test_entity_values.py::test_override_single_value" "--deselect=tests/helpers/test_entity_values.py::test_override_single_value"
# AssertionError: assert 'WARNING' not in '2023-11-10 ...nt abc[L]>\n'"
"--deselect=tests/helpers/test_script.py::test_multiple_runs_repeat_choose"
# tests are located in tests/ # tests are located in tests/
"tests" "tests"
]; ];

View File

@ -4,7 +4,7 @@ buildPythonPackage rec {
# the frontend version corresponding to a specific home-assistant version can be found here # the frontend version corresponding to a specific home-assistant version can be found here
# https://github.com/home-assistant/home-assistant/blob/master/homeassistant/components/frontend/manifest.json # https://github.com/home-assistant/home-assistant/blob/master/homeassistant/components/frontend/manifest.json
pname = "home-assistant-frontend"; pname = "home-assistant-frontend";
version = "20231030.1"; version = "20231030.2";
format = "wheel"; format = "wheel";
src = fetchPypi { src = fetchPypi {
@ -12,7 +12,7 @@ buildPythonPackage rec {
pname = "home_assistant_frontend"; pname = "home_assistant_frontend";
dist = "py3"; dist = "py3";
python = "py3"; python = "py3";
hash = "sha256-S363j7HnOxLqCBaml1Kb9xfY0AaqBIgj09NutByn6Xo="; hash = "sha256-qzodzqWpAXZjwBJkiCyBi5zzfpEqqtauJn2PKZ5UtJ0=";
}; };
# there is nothing to strip in this package # there is nothing to strip in this package

View File

@ -56,6 +56,15 @@ EXTRA_COMPONENT_DEPS = {
], ],
} }
# Sometimes we have unstable versions for libraries that are not
# well-maintained. This allows us to mark our weird version as newer
# than a certain wanted version
OUR_VERSION_IS_NEWER_THAN = {
"blinkstick": "1.2.0",
"gps3": "0.33.3",
"pybluez": "0.22",
}
def run_sync(cmd: List[str]) -> None: def run_sync(cmd: List[str]) -> None:
@ -226,7 +235,12 @@ def main() -> None:
Version.parse(our_version) Version.parse(our_version)
except InvalidVersion: except InvalidVersion:
print(f"Attribute {attr_name} has invalid version specifier {our_version}", file=sys.stderr) print(f"Attribute {attr_name} has invalid version specifier {our_version}", file=sys.stderr)
attr_outdated = True
# allow specifying that our unstable version is newer than some version
if newer_than_version := OUR_VERSION_IS_NEWER_THAN.get(attr_name):
attr_outdated = Version.parse(newer_than_version) < Version.parse(required_version)
else:
attr_outdated = True
else: else:
attr_outdated = Version.parse(our_version) < Version.parse(required_version) attr_outdated = Version.parse(our_version) < Version.parse(required_version)
finally: finally:

View File

@ -0,0 +1,37 @@
diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py
index 2ec991750f..9a937006ce 100644
--- a/homeassistant/components/frontend/__init__.py
+++ b/homeassistant/components/frontend/__init__.py
@@ -383,7 +383,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
local = hass.config.path("www")
if os.path.isdir(local):
- hass.http.register_static_path("/local", local, not is_dev)
+ hass.http.register_static_path("/local", local, not is_dev, follow_symlinks=True)
# Can be removed in 2023
hass.http.register_redirect("/config/server_control", "/developer-tools/yaml")
diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py
index 122b7b79ce..3cf2b7e0db 100644
--- a/homeassistant/components/http/__init__.py
+++ b/homeassistant/components/http/__init__.py
@@ -411,16 +411,16 @@ class HomeAssistantHTTP:
)
def register_static_path(
- self, url_path: str, path: str, cache_headers: bool = True
+ self, url_path: str, path: str, cache_headers: bool = True, follow_symlinks: bool = False
) -> None:
"""Register a folder or file to serve as a static path."""
if os.path.isdir(path):
if cache_headers:
resource: CachingStaticResource | web.StaticResource = (
- CachingStaticResource(url_path, path)
+ CachingStaticResource(url_path, path, follow_symlinks=follow_symlinks)
)
else:
- resource = web.StaticResource(url_path, path)
+ resource = web.StaticResource(url_path, path, follow_symlinks=follow_symlinks)
self.app.router.register_resource(resource)
self.app["allow_configured_cors"](resource)
return

View File

@ -8,7 +8,7 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "homeassistant-stubs"; pname = "homeassistant-stubs";
version = "2023.11.1"; version = "2023.11.2";
format = "pyproject"; format = "pyproject";
disabled = python.version != home-assistant.python.version; disabled = python.version != home-assistant.python.version;
@ -17,7 +17,7 @@ buildPythonPackage rec {
owner = "KapJI"; owner = "KapJI";
repo = "homeassistant-stubs"; repo = "homeassistant-stubs";
rev = "refs/tags/${version}"; rev = "refs/tags/${version}";
hash = "sha256-eLmWOMKLzhZ7M/gdUHhlDZ3T+N4h5aHxMwOI8ZUepps="; hash = "sha256-stVfFXb5QfC+wZUSk53+jt/hb8kO1gCcgeOnHHpNlWE=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -1,8 +1,10 @@
{ lib, stdenv { lib
, stdenv
, bzip2 , bzip2
, cmake , cmake
, doxygen , doxygen
, fetchurl , fetchurl
, fetchpatch
, fuse , fuse
, libevent , libevent
, xz , xz
@ -23,6 +25,15 @@ stdenv.mkDerivation rec {
sha256 = "0pwsj9rf6a6q7cnfbpcrfq2gjcy7sylqzqqr49g2zi39lrrh8533"; sha256 = "0pwsj9rf6a6q7cnfbpcrfq2gjcy7sylqzqqr49g2zi39lrrh8533";
}; };
patches = [
# this patch fixes the build for glibc >= 2.38
(fetchpatch {
name = "strlcpy-glibc238.patch";
url = "https://bugs.debian.org/cgi-bin/bugreport.cgi?att=0;bug=1052360;msg=10";
hash = "sha256-uhQj+ZcHCV36Tm0pF/+JG59bSaRdTZCrMcKL3YhZTk8=";
})
];
nativeBuildInputs = [ cmake doxygen pkg-config ]; nativeBuildInputs = [ cmake doxygen pkg-config ];
buildInputs = [ bzip2 fuse libevent xz openssl systemd tcp_wrappers zlib c-ares ]; buildInputs = [ bzip2 fuse libevent xz openssl systemd tcp_wrappers zlib c-ares ];

View File

@ -1,13 +1,12 @@
{ stdenv { stdenv
, lib , lib
, fetchFromGitHub , fetchFromGitHub
, fetchpatch
, gitUpdater , gitUpdater
, testers , testers
, cmake , cmake
, pkg-config , pkg-config
, python3 , python3
, doxygen
, libxslt
, boost , boost
, egl-wayland , egl-wayland
, freetype , freetype
@ -40,15 +39,25 @@
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "mir"; pname = "mir";
version = "2.14.1"; version = "2.15.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "MirServer"; owner = "MirServer";
repo = "mir"; repo = "mir";
rev = "v${finalAttrs.version}"; rev = "v${finalAttrs.version}";
hash = "sha256-IEGeZVNxwzHn5GASCyjNuQsnCzzfQBHdC33MWVMeZws="; hash = "sha256-c1+gxzLEtNCjR/mx76O5QElQ8+AO4WsfcG7Wy1+nC6E=";
}; };
patches = [
# Fix gbm-kms tests
# Remove when version > 2.15.0
(fetchpatch {
name = "0001-mir-Fix-the-signature-of-drmModeCrtcSetGamma.patch";
url = "https://github.com/MirServer/mir/commit/98250e9c32c5b9b940da2fb0a32d8139bbc68157.patch";
hash = "sha256-tTtOHGNue5rsppOIQSfkOH5sVfFSn/KPGHmubNlRtLI=";
})
];
postPatch = '' postPatch = ''
# Fix scripts that get run in tests # Fix scripts that get run in tests
patchShebangs tools/detect_fd_leaks.bash tests/acceptance-tests/wayland-generator/test_wayland_generator.sh.in patchShebangs tools/detect_fd_leaks.bash tests/acceptance-tests/wayland-generator/test_wayland_generator.sh.in
@ -73,21 +82,13 @@ stdenv.mkDerivation (finalAttrs: {
substituteInPlace src/platform/graphics/CMakeLists.txt \ substituteInPlace src/platform/graphics/CMakeLists.txt \
--replace "/usr/include/drm/drm_fourcc.h" "${lib.getDev libdrm}/include/libdrm/drm_fourcc.h" \ --replace "/usr/include/drm/drm_fourcc.h" "${lib.getDev libdrm}/include/libdrm/drm_fourcc.h" \
--replace "/usr/include/libdrm/drm_fourcc.h" "${lib.getDev libdrm}/include/libdrm/drm_fourcc.h" --replace "/usr/include/libdrm/drm_fourcc.h" "${lib.getDev libdrm}/include/libdrm/drm_fourcc.h"
# Fix date in generated docs not honouring SOURCE_DATE_EPOCH
# Install docs to correct dir
substituteInPlace cmake/Doxygen.cmake \
--replace '"date"' '"date" "--date=@'"$SOURCE_DATE_EPOCH"'"' \
--replace "\''${CMAKE_INSTALL_PREFIX}/share/doc/mir-doc" "\''${CMAKE_INSTALL_DOCDIR}"
''; '';
strictDeps = true; strictDeps = true;
nativeBuildInputs = [ nativeBuildInputs = [
cmake cmake
doxygen
glib # gdbus-codegen glib # gdbus-codegen
libxslt
lttng-ust # lttng-gen-tp lttng-ust # lttng-gen-tp
pkg-config pkg-config
(python3.withPackages (ps: with ps; [ (python3.withPackages (ps: with ps; [
@ -137,9 +138,8 @@ stdenv.mkDerivation (finalAttrs: {
wlcs wlcs
]; ];
buildFlags = [ "all" "doc" ];
cmakeFlags = [ cmakeFlags = [
"-DBUILD_DOXYGEN=OFF"
"-DMIR_PLATFORM='gbm-kms;x11;eglstream-kms;wayland'" "-DMIR_PLATFORM='gbm-kms;x11;eglstream-kms;wayland'"
"-DMIR_ENABLE_TESTS=${if finalAttrs.doCheck then "ON" else "OFF"}" "-DMIR_ENABLE_TESTS=${if finalAttrs.doCheck then "ON" else "OFF"}"
# BadBufferTest.test_truncated_shm_file *doesn't* throw an error as the test expected, mark as such # BadBufferTest.test_truncated_shm_file *doesn't* throw an error as the test expected, mark as such
@ -160,7 +160,7 @@ stdenv.mkDerivation (finalAttrs: {
export XDG_RUNTIME_DIR=/tmp export XDG_RUNTIME_DIR=/tmp
''; '';
outputs = [ "out" "dev" "doc" ]; outputs = [ "out" "dev" ];
passthru = { passthru = {
tests.pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage; tests.pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;

View File

@ -1,19 +1,47 @@
{ lib, stdenv, fetchurl, perl, php, gd, libpng, zlib, unzip, nixosTests }: { lib
, stdenv
, fetchFromGitHub
, perl
, php
, gd
, libpng
, openssl
, zlib
, unzip
, nixosTests
, nix-update-script
}:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "nagios"; pname = "nagios";
version = "4.4.6"; version = "4.4.14";
src = fetchurl { src = fetchFromGitHub {
url = "mirror://sourceforge/nagios/nagios-4.x/${pname}-${version}/${pname}-${version}.tar.gz"; owner = "NagiosEnterprises";
sha256 = "1x5hb97zbvkm73q53ydp1gwj8nnznm72q9c4rm6ny7phr995l3db"; repo = "nagioscore";
rev = "refs/tags/nagios-${version}";
hash = "sha256-EJKMgU3Nzfefq2VXxBrfDDrQZWZvj7HqKnWR9j75fGI=";
}; };
patches = [ ./nagios.patch ]; patches = [ ./nagios.patch ];
nativeBuildInputs = [ unzip ]; nativeBuildInputs = [ unzip ];
buildInputs = [ php perl gd libpng zlib ];
configureFlags = [ "--localstatedir=/var/lib/nagios" ]; buildInputs = [
php
perl
gd
libpng
openssl
zlib
];
configureFlags = [
"--localstatedir=/var/lib/nagios"
"--with-ssl=${openssl.dev}"
"--with-ssl-inc=${openssl.dev}/include"
"--with-ssl-lib=${lib.getLib openssl}/lib"
];
buildFlags = [ "all" ]; buildFlags = [ "all" ];
# Do not create /var directories # Do not create /var directories
@ -28,15 +56,22 @@ stdenv.mkDerivation rec {
sed -i 's@/bin/@@g' $out/etc/objects/commands.cfg sed -i 's@/bin/@@g' $out/etc/objects/commands.cfg
''; '';
passthru.tests = { passthru = {
inherit (nixosTests) nagios; tests = {
inherit (nixosTests) nagios;
};
updateScript = nix-update-script {
extraArgs = [ "--version-regex" "nagios-(.*)" ];
};
}; };
meta = { meta = {
description = "A host, service and network monitoring program"; description = "A host, service and network monitoring program";
homepage = "https://www.nagios.org/"; homepage = "https://www.nagios.org/";
license = lib.licenses.gpl2; changelog = "https://github.com/NagiosEnterprises/nagioscore/blob/nagios-${version}/Changelog";
platforms = lib.platforms.linux; license = lib.licenses.gpl2;
maintainers = with lib.maintainers; [ immae thoughtpolice relrod ]; platforms = lib.platforms.linux;
mainProgram = "nagios";
maintainers = with lib.maintainers; [ immae thoughtpolice relrod anthonyroussel ];
}; };
} }

View File

@ -1,56 +0,0 @@
{ lib, stdenv, fetchurl, fetchpatch, perl, openssl, db, zlib, uwimap, html-tidy, pam}:
let
ssl = lib.optionals uwimap.withSSL
"-e 's/CCLIENT_SSL_ENABLE.*= false/CCLIENT_SSL_ENABLE=true/'";
in
stdenv.mkDerivation rec {
pname = "prayer";
version = "1.3.5";
src = fetchurl {
url = "ftp://ftp.csx.cam.ac.uk/pub/software/email/prayer/${pname}-${version}.tar.gz";
sha256 = "135fjbxjn385b6cjys6qhbwfw61mdcl2akkll4jfpdzfvhbxlyda";
};
patches = [
./install.patch
# fix build errors which result from openssl changes
(fetchpatch {
url = "https://sources.debian.org/data/main/p/prayer/1.3.5-dfsg1-6/debian/patches/disable_ssl3.patch";
sha256 = "1rx4bidc9prh4gffipykp144cyi3zd6qzd990s2aad3knzv5bkdd";
})
(fetchpatch {
url = "https://sources.debian.org/data/main/p/prayer/1.3.5-dfsg1-6/debian/patches/openssl1.1.patch";
sha256 = "0zinylvq3bcifdmki867gir49pbjx6qb5h019hawwif2l4jmlxw1";
})
];
postPatch = ''
sed -i -e s/gmake/make/ -e 's/LDAP_ENABLE.*= true/LDAP_ENABLE=false/' \
${ssl} \
-e 's/CCLIENT_LIBS=.*/CCLIENT_LIBS=-lc-client/' \
-e 's,^PREFIX .*,PREFIX='$out, \
-e 's,^CCLIENT_DIR=.*,CCLIENT_DIR=${uwimap}/include/c-client,' \
Config
sed -i -e s,/usr/bin/perl,${perl}/bin/perl, \
templates/src/*.pl
sed -i -e '/<stropts.h>/d' lib/os_linux.h
'' + /* html-tidy updates */ ''
substituteInPlace ./session/html_secure_tidy.c \
--replace buffio.h tidybuffio.h
'';
buildInputs = [ openssl db zlib uwimap html-tidy pam ];
nativeBuildInputs = [ perl ];
NIX_LDFLAGS = "-lpam";
meta = {
homepage = "http://www-uxsup.csx.cam.ac.uk/~dpc22/prayer/";
description = "Yet another Webmail interface for IMAP servers on Unix systems written in C";
license = lib.licenses.gpl2Plus;
platforms = lib.platforms.linux;
};
}

View File

@ -1,170 +0,0 @@
diff --git a/accountd/Makefile b/accountd/Makefile
index c3e8107..7946776 100644
--- a/accountd/Makefile
+++ b/accountd/Makefile
@@ -75,6 +75,6 @@ clean:
-rm -f prayer-accountd test core *.o *~ \#*\#
install:
- $(INSTALL) -m 755 -o ${RO_USER} -g ${RW_GROUP} \
+ $(INSTALL) -m 755 \
prayer-accountd ${BROOT}${BIN_DIR}
diff --git a/files/Makefile b/files/Makefile
index 743d0ed..7eff064 100644
--- a/files/Makefile
+++ b/files/Makefile
@@ -52,20 +52,20 @@ distclean:
install-cert:
if [ -f certs/prayer.pem ]; then \
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) \
+ $(INSTALL) \
-m $(PRIVATE_FILE) certs/prayer.pem ${BROOT}${PREFIX}/certs; \
fi
install-config: etc/prayer.cf
- $(INSTALL) -D -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_FILE) \
+ $(INSTALL) -D -m $(PUBLIC_FILE) \
etc/prayer.cf ${BROOT}${PRAYER_CONFIG_FILE}
install-aconfig:
- $(INSTALL) -D -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_FILE) \
+ $(INSTALL) -D -m $(PUBLIC_FILE) \
etc/prayer-accountd.cf ${BROOT}${ACCOUNTD_CONFIG_FILE}
install-motd:
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_FILE) \
+ $(INSTALL) -m $(PUBLIC_FILE) \
etc/motd.html ${BROOT}${PREFIX}/etc
install:
@@ -83,6 +83,6 @@ install:
if [ ! -f $(BROOT)$(PREFIX)/etc/motd.html ]; then $(MAKE) install-motd; fi
redhat-install-init.d:
- install -D -o root -g root -m 755 \
+ install -D -m 755 \
./init.d/prayer $(BROOT)/etc/rc.d/init.d/prayer
#chkconfig prayer --level 2345 on
diff --git a/files/install.sh b/files/install.sh
index 8d1d1f4..0804a08 100755
--- a/files/install.sh
+++ b/files/install.sh
@@ -2,8 +2,6 @@
#
# $Cambridge: hermes/src/prayer/files/install.sh,v 1.7 2008/09/16 09:59:56 dpc22 Exp $
-PATH=/bin:/sbin/:/usr/bin:/usr/sbin
-
error=0
if [ "x$PREFIX" = "x" ]; then
@@ -55,24 +53,20 @@ if [ $error != 0 ]; then
exit 1
fi
-if [ ! -d ${VAR_PREFIX} -a `whoami` = "root" ]; then
- ${INSTALL} -d -o ${RW_USER} -g ${RW_GROUP} -m ${PRIVATE_DIR} ${VAR_PREFIX}
-fi
-
if [ ! -d ${PREFIX} ]; then
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PUBLIC_DIR} ${PREFIX}
+ ${INSTALL} -d -m ${PUBLIC_DIR} ${PREFIX}
fi
if [ ! -d ${PREFIX}/etc ]; then
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PUBLIC_DIR} ${PREFIX}/etc
+ ${INSTALL} -d -m ${PUBLIC_DIR} ${PREFIX}/etc
fi
if [ ! -d ${PREFIX}/certs ]; then
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PRIVATE_DIR} ${PREFIX}/certs
+ ${INSTALL} -d -m ${PRIVATE_DIR} ${PREFIX}/certs
fi
if [ ! -d ${BIN_DIR} ]; then
- ${INSTALL} -d -o ${RO_USER} -g ${RO_GROUP} -m ${PUBLIC_DIR} ${BIN_DIR}
+ ${INSTALL} -d -m ${PUBLIC_DIR} ${BIN_DIR}
fi
for i in icons static
@@ -83,5 +77,4 @@ do
fi
echo Copying ${i}
(tar cf - ${i}) | (cd ${PREFIX} ; tar xf -)
- (cd ${PREFIX}; chown -R ${RO_USER}:${RO_GROUP} ${i})
done
diff --git a/servers/Makefile b/servers/Makefile
index 021aed5..5ccbd08 100644
--- a/servers/Makefile
+++ b/servers/Makefile
@@ -107,13 +107,13 @@ clean:
-rm -f $(BIN) core *.o *.flc *~ \#*\#
install: all
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
$(BROOT)$(BIN_DIR)
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
+ $(INSTALL) -m $(PUBLIC_EXEC) \
prayer $(BROOT)$(BIN_DIR)
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
+ $(INSTALL) -m $(PUBLIC_EXEC) \
prayer-chroot $(BROOT)$(BIN_DIR)
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
+ $(INSTALL) -m $(PUBLIC_EXEC) \
prayer-session $(BROOT)$(BIN_DIR)
prayer: $(PRAYER_OBJS) prayer_main.o
diff --git a/templates/cam/Makefile b/templates/cam/Makefile
index 9f4122a..396b628 100644
--- a/templates/cam/Makefile
+++ b/templates/cam/Makefile
@@ -124,7 +124,7 @@ _template_index.c:
$(COMPILE) $(TYPE) $@ $*
install:
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
$(BROOT)$(PREFIX)/templates/$(TYPE)
cp *.t $(BROOT)$(PREFIX)/templates/$(TYPE)
cp *.vars $(BROOT)$(PREFIX)/templates/$(TYPE)
diff --git a/templates/old/Makefile b/templates/old/Makefile
index 31016cf..288a64c 100644
--- a/templates/old/Makefile
+++ b/templates/old/Makefile
@@ -123,7 +123,7 @@ _template_index.c:
$(COMPILE) $(TYPE) $@ $*
install:
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
$(BROOT)$(PREFIX)/templates/$(TYPE)
cp *.t $(BROOT)$(PREFIX)/templates/$(TYPE)
cp *.vars $(BROOT)$(PREFIX)/templates/$(TYPE)
diff --git a/utils/Makefile b/utils/Makefile
index 9c79916..ef82481 100644
--- a/utils/Makefile
+++ b/utils/Makefile
@@ -72,15 +72,15 @@ clean:
-rm -f $(BIN) core *.o *.flc *~ \#*\#
install: all
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_DIR) -d \
+ $(INSTALL) -m $(PUBLIC_DIR) -d \
$(BROOT)$(BIN_DIR)
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
+ $(INSTALL) -m $(PUBLIC_EXEC) \
prayer-ssl-prune $(BROOT)$(BIN_DIR)
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
+ $(INSTALL) -m $(PUBLIC_EXEC) \
prayer-sem-prune $(BROOT)$(BIN_DIR)
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
+ $(INSTALL) -m $(PUBLIC_EXEC) \
prayer-db-prune $(BROOT)$(BIN_DIR)
- $(INSTALL) -o $(RO_USER) -g $(RO_GROUP) -m $(PUBLIC_EXEC) \
+ $(INSTALL) -m $(PUBLIC_EXEC) \
prayer-cyclog $(BROOT)$(BIN_DIR)
prayer-ssl-prune: $(PRUNE_OBJS)

View File

@ -1,6 +1,7 @@
{ stdenv, writeScriptBin, makeWrapper, lib, fetchurl, git, cacert, libpng, libjpeg, libwebp { stdenv, writeScriptBin, makeWrapper, lib, fetchurl, git, cacert, libpng, libjpeg, libwebp
, erlang, openssl, expat, libyaml, bash, gnused, gnugrep, coreutils, util-linux, procps, gd , erlang, openssl, expat, libyaml, bash, gnused, gnugrep, coreutils, util-linux, procps, gd
, flock, autoreconfHook , flock, autoreconfHook
, gawk
, nixosTests , nixosTests
, withMysql ? false , withMysql ? false
, withPgsql ? false , withPgsql ? false
@ -12,7 +13,7 @@
}: }:
let let
ctlpath = lib.makeBinPath [ bash gnused gnugrep coreutils util-linux procps ]; ctlpath = lib.makeBinPath [ bash gnused gnugrep gawk coreutils util-linux procps ];
in stdenv.mkDerivation rec { in stdenv.mkDerivation rec {
pname = "ejabberd"; pname = "ejabberd";
version = "23.01"; version = "23.01";

Some files were not shown because too many files have changed in this diff Show More