Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2023-08-24 06:01:45 +00:00 committed by GitHub
commit 8088a97067
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
112 changed files with 4514 additions and 20643 deletions

View File

@ -205,6 +205,8 @@ The module update takes care of the new config syntax and the data itself (user
- `programs.gnupg.agent.pinentryFlavor` is now set in `/etc/gnupg/gpg-agent.conf`, and will no longer take precedence over a `pinentry-program` set in `~/.gnupg/gpg-agent.conf`.
- `services.influxdb2` now supports doing an automatic initial setup and provisioning of users, organizations, buckets and authentication tokens, see [#249502](https://github.com/NixOS/nixpkgs/pull/249502) for more details.
- `wrapHelm` now exposes `passthru.pluginsDir` which can be passed to `helmfile`. For convenience, a top-level package `helmfile-wrapped` has been added, which inherits `passthru.pluginsDir` from `kubernetes-helm-wrapped`. See [#217768](https://github.com/NixOS/nixpkgs/issues/217768) for details.
- `boot.initrd.network.udhcp.enable` allows control over dhcp during stage 1 regardless of what `networking.useDHCP` is set to.

View File

@ -105,36 +105,25 @@ in
}
];
services.zram-generator.enable = true;
system.requiredKernelConfig = with config.lib.kernelConfig; [
(isModule "ZRAM")
];
# Disabling this for the moment, as it would create and mkswap devices twice,
# once in stage 2 boot, and again when the zram-reloader service starts.
# boot.kernelModules = [ "zram" ];
systemd.packages = [ pkgs.zram-generator ];
systemd.services."systemd-zram-setup@".path = [ pkgs.util-linux ]; # for mkswap
environment.etc."systemd/zram-generator.conf".source =
(pkgs.formats.ini { }).generate "zram-generator.conf" (lib.listToAttrs
(builtins.map
(dev: {
name = dev;
value =
let
size = "${toString cfg.memoryPercent} / 100 * ram";
in
{
zram-size = if cfg.memoryMax != null then "min(${size}, ${toString cfg.memoryMax} / 1024 / 1024)" else size;
compression-algorithm = cfg.algorithm;
swap-priority = cfg.priority;
} // lib.optionalAttrs (cfg.writebackDevice != null) {
writeback-device = cfg.writebackDevice;
};
})
devices));
services.zram-generator.settings = lib.listToAttrs
(builtins.map
(dev: {
name = dev;
value =
let
size = "${toString cfg.memoryPercent} / 100 * ram";
in
{
zram-size = if cfg.memoryMax != null then "min(${size}, ${toString cfg.memoryMax} / 1024 / 1024)" else size;
compression-algorithm = cfg.algorithm;
swap-priority = cfg.priority;
} // lib.optionalAttrs (cfg.writebackDevice != null) {
writeback-device = cfg.writebackDevice;
};
})
devices);
};

View File

@ -1172,6 +1172,7 @@
./services/system/self-deploy.nix
./services/system/systembus-notify.nix
./services/system/uptimed.nix
./services/system/zram-generator.nix
./services/torrent/deluge.nix
./services/torrent/flexget.nix
./services/torrent/magnetico.nix

View File

@ -3,34 +3,291 @@
let
inherit
(lib)
any
attrNames
attrValues
count
escapeShellArg
filterAttrs
flatten
flip
getExe
hasAttr
hasInfix
listToAttrs
literalExpression
mapAttrsToList
mdDoc
mkEnableOption
mkIf
mkOption
nameValuePair
optional
subtractLists
types
unique
;
format = pkgs.formats.json { };
cfg = config.services.influxdb2;
configFile = format.generate "config.json" cfg.settings;
validPermissions = [
"authorizations"
"buckets"
"dashboards"
"orgs"
"tasks"
"telegrafs"
"users"
"variables"
"secrets"
"labels"
"views"
"documents"
"notificationRules"
"notificationEndpoints"
"checks"
"dbrp"
"annotations"
"sources"
"scrapers"
"notebooks"
"remotes"
"replications"
];
# Determines whether at least one active api token is defined
anyAuthDefined =
flip any (attrValues cfg.provision.organizations)
(o: o.present && flip any (attrValues o.auths)
(a: a.present && a.tokenFile != null));
provisionState = pkgs.writeText "provision_state.json" (builtins.toJSON {
inherit (cfg.provision) organizations users;
});
provisioningScript = pkgs.writeShellScript "post-start-provision" ''
set -euo pipefail
export INFLUX_HOST="http://"${escapeShellArg (
if ! hasAttr "http-bind-address" cfg.settings
|| hasInfix "0.0.0.0" cfg.settings.http-bind-address
then "localhost:8086"
else cfg.settings.http-bind-address
)}
# Wait for the influxdb server to come online
count=0
while ! influx ping &>/dev/null; do
if [ "$count" -eq 300 ]; then
echo "Tried for 30 seconds, giving up..."
exit 1
fi
if ! kill -0 "$MAINPID"; then
echo "Main server died, giving up..."
exit 1
fi
sleep 0.1
count=$((count++))
done
# Do the initial database setup. Pass /dev/null as configs-path to
# avoid saving the token as the active config.
if test -e "$STATE_DIRECTORY/.first_startup"; then
influx setup \
--configs-path /dev/null \
--org ${escapeShellArg cfg.provision.initialSetup.organization} \
--bucket ${escapeShellArg cfg.provision.initialSetup.bucket} \
--username ${escapeShellArg cfg.provision.initialSetup.username} \
--password "$(< "$CREDENTIALS_DIRECTORY/admin-password")" \
--token "$(< "$CREDENTIALS_DIRECTORY/admin-token")" \
--retention ${toString cfg.provision.initialSetup.retention}s \
--force >/dev/null
rm -f "$STATE_DIRECTORY/.first_startup"
fi
provision_result=$(${getExe pkgs.influxdb2-provision} ${provisionState} "$INFLUX_HOST" "$(< "$CREDENTIALS_DIRECTORY/admin-token")")
if [[ "$(jq '[.auths[] | select(.action == "created")] | length' <<< "$provision_result")" -gt 0 ]]; then
echo "Created at least one new token, queueing service restart so we can manipulate secrets"
touch "$STATE_DIRECTORY/.needs_restart"
fi
'';
restarterScript = pkgs.writeShellScript "post-start-restarter" ''
set -euo pipefail
if test -e "$STATE_DIRECTORY/.needs_restart"; then
rm -f "$STATE_DIRECTORY/.needs_restart"
/run/current-system/systemd/bin/systemctl restart influxdb2
fi
'';
organizationSubmodule = types.submodule (organizationSubmod: let
org = organizationSubmod.config._module.args.name;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this organization is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = mdDoc "Optional description for the organization.";
default = null;
type = types.nullOr types.str;
};
buckets = mkOption {
description = mdDoc "Buckets to provision in this organization.";
default = {};
type = types.attrsOf (types.submodule (bucketSubmod: let
bucket = bucketSubmod.config._module.args.name;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this bucket is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = mdDoc "Optional description for the bucket.";
default = null;
type = types.nullOr types.str;
};
retention = mkOption {
type = types.ints.unsigned;
default = 0;
description = mdDoc "The duration in seconds for which the bucket will retain data (0 is infinite).";
};
};
}));
};
auths = mkOption {
description = mdDoc "API tokens to provision for the user in this organization.";
default = {};
type = types.attrsOf (types.submodule (authSubmod: let
auth = authSubmod.config._module.args.name;
in {
options = {
id = mkOption {
description = mdDoc "A unique identifier for this authentication token. Since influx doesn't store names for tokens, this will be hashed and appended to the description to identify the token.";
readOnly = true;
default = builtins.substring 0 32 (builtins.hashString "sha256" "${org}:${auth}");
defaultText = "<a hash derived from org and name>";
type = types.str;
};
present = mkOption {
description = mdDoc "Whether to ensure that this user is present or absent.";
type = types.bool;
default = true;
};
description = mkOption {
description = ''
Optional description for the API token.
Note that the actual token will always be created with a descriptionregardless
of whether this is given or not. The name is always added plus a unique suffix
to later identify the token to track whether it has already been created.
'';
default = null;
type = types.nullOr types.str;
};
tokenFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc "The token value. If not given, influx will automatically generate one.";
};
operator = mkOption {
description = mdDoc "Grants all permissions in all organizations.";
default = false;
type = types.bool;
};
allAccess = mkOption {
description = mdDoc "Grants all permissions in the associated organization.";
default = false;
type = types.bool;
};
readPermissions = mkOption {
description = mdDoc ''
The read permissions to include for this token. Access is usually granted only
for resources in the associated organization.
Available permissions are `authorizations`, `buckets`, `dashboards`,
`orgs`, `tasks`, `telegrafs`, `users`, `variables`, `secrets`, `labels`, `views`,
`documents`, `notificationRules`, `notificationEndpoints`, `checks`, `dbrp`,
`annotations`, `sources`, `scrapers`, `notebooks`, `remotes`, `replications`.
Refer to `influx auth create --help` for a full list with descriptions.
`buckets` grants read access to all associated buckets. Use `readBuckets` to define
more granular access permissions.
'';
default = [];
type = types.listOf (types.enum validPermissions);
};
writePermissions = mkOption {
description = mdDoc ''
The read permissions to include for this token. Access is usually granted only
for resources in the associated organization.
Available permissions are `authorizations`, `buckets`, `dashboards`,
`orgs`, `tasks`, `telegrafs`, `users`, `variables`, `secrets`, `labels`, `views`,
`documents`, `notificationRules`, `notificationEndpoints`, `checks`, `dbrp`,
`annotations`, `sources`, `scrapers`, `notebooks`, `remotes`, `replications`.
Refer to `influx auth create --help` for a full list with descriptions.
`buckets` grants write access to all associated buckets. Use `writeBuckets` to define
more granular access permissions.
'';
default = [];
type = types.listOf (types.enum validPermissions);
};
readBuckets = mkOption {
description = mdDoc "The organization's buckets which should be allowed to be read";
default = [];
type = types.listOf types.str;
};
writeBuckets = mkOption {
description = mdDoc "The organization's buckets which should be allowed to be written";
default = [];
type = types.listOf types.str;
};
};
}));
};
};
});
in
{
options = {
services.influxdb2 = {
enable = mkEnableOption (lib.mdDoc "the influxdb2 server");
enable = mkEnableOption (mdDoc "the influxdb2 server");
package = mkOption {
default = pkgs.influxdb2-server;
defaultText = literalExpression "pkgs.influxdb2";
description = lib.mdDoc "influxdb2 derivation to use.";
description = mdDoc "influxdb2 derivation to use.";
type = types.package;
};
settings = mkOption {
default = { };
description = lib.mdDoc ''configuration options for influxdb2, see <https://docs.influxdata.com/influxdb/v2.0/reference/config-options> for details.'';
description = mdDoc ''configuration options for influxdb2, see <https://docs.influxdata.com/influxdb/v2.0/reference/config-options> for details.'';
type = format.type;
};
@ -41,52 +298,135 @@ in
organization = mkOption {
type = types.str;
example = "main";
description = "Primary organization name";
description = mdDoc "Primary organization name";
};
bucket = mkOption {
type = types.str;
example = "example";
description = "Primary bucket name";
description = mdDoc "Primary bucket name";
};
username = mkOption {
type = types.str;
default = "admin";
description = "Primary username";
description = mdDoc "Primary username";
};
retention = mkOption {
type = types.str;
default = "0";
description = ''
The duration for which the bucket will retain data (0 is infinite).
Accepted units are `ns` (nanoseconds), `us` or `µs` (microseconds), `ms` (milliseconds),
`s` (seconds), `m` (minutes), `h` (hours), `d` (days) and `w` (weeks).
'';
type = types.ints.unsigned;
default = 0;
description = mdDoc "The duration in seconds for which the bucket will retain data (0 is infinite).";
};
passwordFile = mkOption {
type = types.path;
description = "Password for primary user. Don't use a file from the nix store!";
description = mdDoc "Password for primary user. Don't use a file from the nix store!";
};
tokenFile = mkOption {
type = types.path;
description = "API Token to set for the admin user. Don't use a file from the nix store!";
description = mdDoc "API Token to set for the admin user. Don't use a file from the nix store!";
};
};
organizations = mkOption {
description = mdDoc "Organizations to provision.";
example = literalExpression ''
{
myorg = {
description = "My organization";
buckets.mybucket = {
description = "My bucket";
retention = 31536000; # 1 year
};
auths.mytoken = {
readBuckets = ["mybucket"];
tokenFile = "/run/secrets/mytoken";
};
};
}
'';
default = {};
type = types.attrsOf organizationSubmodule;
};
users = mkOption {
description = mdDoc "Users to provision.";
default = {};
example = literalExpression ''
{
# admin = {}; /* The initialSetup.username will automatically be added. */
myuser.passwordFile = "/run/secrets/myuser_password";
}
'';
type = types.attrsOf (types.submodule (userSubmod: let
user = userSubmod.config._module.args.name;
org = userSubmod.config.org;
in {
options = {
present = mkOption {
description = mdDoc "Whether to ensure that this user is present or absent.";
type = types.bool;
default = true;
};
passwordFile = mkOption {
description = mdDoc "Password for the user. If unset, the user will not be able to log in until a password is set by an operator! Don't use a file from the nix store!";
default = null;
type = types.nullOr types.path;
};
};
}));
};
};
};
};
config = mkIf cfg.enable {
assertions = [
{
assertion = !(hasAttr "bolt-path" cfg.settings) && !(hasAttr "engine-path" cfg.settings);
message = "services.influxdb2.config: bolt-path and engine-path should not be set as they are managed by systemd";
}
];
assertions =
[
{
assertion = !(hasAttr "bolt-path" cfg.settings) && !(hasAttr "engine-path" cfg.settings);
message = "services.influxdb2.config: bolt-path and engine-path should not be set as they are managed by systemd";
}
]
++ flatten (flip mapAttrsToList cfg.provision.organizations (orgName: org:
flip mapAttrsToList org.auths (authName: auth:
[
{
assertion = 1 == count (x: x) [
auth.operator
auth.allAccess
(auth.readPermissions != []
|| auth.writePermissions != []
|| auth.readBuckets != []
|| auth.writeBuckets != [])
];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: The `operator` and `allAccess` options are mutually exclusive with each other and the granular permission settings.";
}
(let unknownBuckets = subtractLists (attrNames org.buckets) auth.readBuckets; in {
assertion = unknownBuckets == [];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: Refers to invalid buckets in readBuckets: ${toString unknownBuckets}";
})
(let unknownBuckets = subtractLists (attrNames org.buckets) auth.writeBuckets; in {
assertion = unknownBuckets == [];
message = "influxdb2: provision.organizations.${orgName}.auths.${authName}: Refers to invalid buckets in writeBuckets: ${toString unknownBuckets}";
})
]
)
));
services.influxdb2.provision = mkIf cfg.provision.enable {
organizations.${cfg.provision.initialSetup.organization} = {
buckets.${cfg.provision.initialSetup.bucket} = {
inherit (cfg.provision.initialSetup) retention;
};
};
users.${cfg.provision.initialSetup.username} = {
inherit (cfg.provision.initialSetup) passwordFile;
};
};
systemd.services.influxdb2 = {
description = "InfluxDB is an open-source, distributed, time series database";
@ -111,58 +451,38 @@ in
"admin-password:${cfg.provision.initialSetup.passwordFile}"
"admin-token:${cfg.provision.initialSetup.tokenFile}"
];
ExecStartPost = mkIf cfg.provision.enable (
[provisioningScript] ++
# Only the restarter runs with elevated privileges
optional anyAuthDefined "+${restarterScript}"
);
};
path = [pkgs.influxdb2-cli];
path = [
pkgs.influxdb2-cli
pkgs.jq
];
# Mark if this is the first startup so postStart can do the initial setup
preStart = mkIf cfg.provision.enable ''
# Mark if this is the first startup so postStart can do the initial setup.
# Also extract any token secret mappings and apply them if this isn't the first start.
preStart = let
tokenPaths = listToAttrs (flatten
# For all organizations
(flip mapAttrsToList cfg.provision.organizations
# For each contained token that has a token file
(_: org: flip mapAttrsToList (filterAttrs (_: x: x.tokenFile != null) org.auths)
# Collect id -> tokenFile for the mapping
(_: auth: nameValuePair auth.id auth.tokenFile))));
tokenMappings = pkgs.writeText "token_mappings.json" (builtins.toJSON tokenPaths);
in mkIf cfg.provision.enable ''
if ! test -e "$STATE_DIRECTORY/influxd.bolt"; then
touch "$STATE_DIRECTORY/.first_startup"
else
# Manipulate provisioned api tokens if necessary
${getExe pkgs.influxdb2-token-manipulator} "$STATE_DIRECTORY/influxd.bolt" ${tokenMappings}
fi
'';
postStart = let
initCfg = cfg.provision.initialSetup;
in mkIf cfg.provision.enable (
''
set -euo pipefail
export INFLUX_HOST="http://"${escapeShellArg (cfg.settings.http-bind-address or "localhost:8086")}
# Wait for the influxdb server to come online
count=0
while ! influx ping &>/dev/null; do
if [ "$count" -eq 300 ]; then
echo "Tried for 30 seconds, giving up..."
exit 1
fi
if ! kill -0 "$MAINPID"; then
echo "Main server died, giving up..."
exit 1
fi
sleep 0.1
count=$((count++))
done
# Do the initial database setup. Pass /dev/null as configs-path to
# avoid saving the token as the active config.
if test -e "$STATE_DIRECTORY/.first_startup"; then
influx setup \
--configs-path /dev/null \
--org ${escapeShellArg initCfg.organization} \
--bucket ${escapeShellArg initCfg.bucket} \
--username ${escapeShellArg initCfg.username} \
--password "$(< "$CREDENTIALS_DIRECTORY/admin-password")" \
--token "$(< "$CREDENTIALS_DIRECTORY/admin-token")" \
--retention ${escapeShellArg initCfg.retention} \
--force >/dev/null
rm -f "$STATE_DIRECTORY/.first_startup"
fi
''
);
};
users.extraUsers.influxdb2 = {

View File

@ -0,0 +1,38 @@
{ config, lib, pkgs, ... }:
let
cfg = config.services.zram-generator;
settingsFormat = pkgs.formats.ini { };
in
{
meta = {
maintainers = with lib.maintainers; [ nickcao ];
};
options.services.zram-generator = {
enable = lib.mkEnableOption (lib.mdDoc "Systemd unit generator for zram devices");
package = lib.mkPackageOptionMD pkgs "zram-generator" { };
settings = lib.mkOption {
type = lib.types.submodule {
freeformType = settingsFormat.type;
};
default = { };
description = lib.mdDoc ''
Configuration for zram-generator,
see https://github.com/systemd/zram-generator for documentation.
'';
};
};
config = lib.mkIf cfg.enable {
system.requiredKernelConfig = with config.lib.kernelConfig; [
(isModule "ZRAM")
];
systemd.packages = [ cfg.package ];
systemd.services."systemd-zram-setup@".path = [ pkgs.util-linux ]; # for mkswap
environment.etc."systemd/zram-generator.conf".source = settingsFormat.generate "zram-generator.conf" cfg.settings;
};
}

View File

@ -443,10 +443,8 @@ in {
loki = handleTest ./loki.nix {};
luks = handleTest ./luks.nix {};
lvm2 = handleTest ./lvm2 {};
lxd = handleTest ./lxd.nix {};
lxd-nftables = handleTest ./lxd-nftables.nix {};
lxd = handleTest ./lxd {};
lxd-image-server = handleTest ./lxd-image-server.nix {};
lxd-ui = handleTest ./lxd-ui.nix {};
#logstash = handleTest ./logstash.nix {};
lorri = handleTest ./lorri/default.nix {};
maddy = discoverTests (import ./maddy { inherit handleTest; });

View File

@ -1,24 +0,0 @@
storage_pools:
- name: default
driver: dir
config:
source: /var/lxd-pool
networks:
- name: lxdbr0
type: bridge
config:
ipv4.address: auto
ipv6.address: none
profiles:
- name: default
devices:
eth0:
name: eth0
network: lxdbr0
type: nic
root:
path: /
pool: default
type: disk

View File

@ -6,6 +6,9 @@ import ./make-test-python.nix ({ pkgs, ...} : {
nodes.machine = { lib, ... }: {
environment.systemPackages = [ pkgs.influxdb2-cli ];
# Make sure that the service is restarted immediately if tokens need to be rewritten
# without relying on any Restart=on-failure behavior
systemd.services.influxdb2.serviceConfig.RestartSec = 6000;
services.influxdb2.enable = true;
services.influxdb2.provision = {
enable = true;
@ -15,22 +18,208 @@ import ./make-test-python.nix ({ pkgs, ...} : {
passwordFile = pkgs.writeText "admin-pw" "ExAmPl3PA55W0rD";
tokenFile = pkgs.writeText "admin-token" "verysecureadmintoken";
};
organizations.someorg = {
buckets.somebucket = {};
auths.sometoken = {
description = "some auth token";
readBuckets = ["somebucket"];
writeBuckets = ["somebucket"];
};
};
users.someuser.passwordFile = pkgs.writeText "tmp-pw" "abcgoiuhaoga";
};
specialisation.withModifications.configuration = { ... }: {
services.influxdb2.provision = {
organizations.someorg.buckets.somebucket.present = false;
organizations.someorg.auths.sometoken.present = false;
users.someuser.present = false;
organizations.myorg = {
description = "Myorg description";
buckets.mybucket = {
description = "Mybucket description";
};
auths.mytoken = {
operator = true;
description = "operator token";
tokenFile = pkgs.writeText "tmp-tok" "someusertoken";
};
};
users.myuser.passwordFile = pkgs.writeText "tmp-pw" "abcgoiuhaoga";
};
};
specialisation.withParentDelete.configuration = { ... }: {
services.influxdb2.provision = {
organizations.someorg.present = false;
# Deleting the parent implies:
#organizations.someorg.buckets.somebucket.present = false;
#organizations.someorg.auths.sometoken.present = false;
};
};
specialisation.withNewTokens.configuration = { ... }: {
services.influxdb2.provision = {
organizations.default = {
auths.operator = {
operator = true;
description = "new optoken";
tokenFile = pkgs.writeText "tmp-tok" "newoptoken";
};
auths.allaccess = {
operator = true;
description = "new allaccess";
tokenFile = pkgs.writeText "tmp-tok" "newallaccess";
};
auths.specifics = {
description = "new specifics";
readPermissions = ["users" "tasks"];
writePermissions = ["tasks"];
tokenFile = pkgs.writeText "tmp-tok" "newspecificstoken";
};
};
};
};
};
testScript = { nodes, ... }:
let
specialisations = "${nodes.machine.system.build.toplevel}/specialisation";
tokenArg = "--token verysecureadmintoken";
in ''
def assert_contains(haystack, needle):
if needle not in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack)
print("---")
raise Exception(f"Expected string '{needle}' was not found")
def assert_lacks(haystack, needle):
if needle in haystack:
print("The haystack that will cause the following exception is:")
print("---")
print(haystack, end="")
print("---")
raise Exception(f"Unexpected string '{needle}' was found")
machine.wait_for_unit("influxdb2.service")
machine.fail("curl --fail -X POST 'http://localhost:8086/api/v2/signin' -u admin:wrongpassword")
machine.succeed("curl --fail -X POST 'http://localhost:8086/api/v2/signin' -u admin:ExAmPl3PA55W0rD")
out = machine.succeed("influx org list ${tokenArg}")
assert "default" in out
assert_contains(out, "default")
assert_lacks(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert "default" in out
assert_contains(out, "default")
machine.fail("influx bucket list ${tokenArg} --org myorg")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_contains(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_lacks(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_lacks(out, "operator token")
assert_contains(out, "some auth token")
with subtest("withModifications"):
machine.succeed('${specialisations}/withModifications/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_lacks(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_lacks(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_lacks(out, "some auth token")
# Make sure the user token is also usable
machine.succeed("influx auth list --token someusertoken")
with subtest("keepsUnrelated"):
machine.succeed('${nodes.machine.system.build.toplevel}/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_contains(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert_contains(out, "default")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
out = machine.succeed("influx bucket list ${tokenArg} --org someorg")
assert_contains(out, "somebucket")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_contains(out, "some auth token")
with subtest("withParentDelete"):
machine.succeed('${specialisations}/withParentDelete/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx org list ${tokenArg}")
assert_contains(out, "default")
assert_contains(out, "myorg")
assert_lacks(out, "someorg")
out = machine.succeed("influx bucket list ${tokenArg} --org default")
assert_contains(out, "default")
out = machine.succeed("influx bucket list ${tokenArg} --org myorg")
assert_contains(out, "mybucket")
machine.fail("influx bucket list ${tokenArg} --org someorg")
out = machine.succeed("influx user list ${tokenArg}")
assert_contains(out, "admin")
assert_contains(out, "myuser")
assert_contains(out, "someuser")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_lacks(out, "some auth token")
with subtest("withNewTokens"):
machine.succeed('${specialisations}/withNewTokens/bin/switch-to-configuration test')
machine.wait_for_unit("influxdb2.service")
out = machine.succeed("influx auth list ${tokenArg}")
assert_contains(out, "operator token")
assert_contains(out, "some auth token")
assert_contains(out, "new optoken")
assert_contains(out, "new allaccess")
assert_contains(out, "new specifics")
'';
})

View File

@ -61,14 +61,14 @@ in {
machine.wait_for_unit("lxd.service")
machine.wait_for_file("/var/lib/lxd/unix.socket")
# It takes additional second for lxd to settle
machine.sleep(1)
# Wait for lxd to settle
machine.succeed("lxd waitready")
# lxd expects the pool's directory to already exist
machine.succeed("mkdir /var/lxd-pool")
machine.succeed(
"cat ${./common/lxd/config.yaml} | lxd init --preseed"
"lxd init --minimal"
)
machine.succeed(

View File

@ -1,7 +1,7 @@
import ./make-test-python.nix ({ pkgs, lib, ... } :
import ../make-test-python.nix ({ pkgs, lib, ... } :
let
lxd-image = import ../release.nix {
lxd-image = import ../../release.nix {
configuration = {
# Building documentation makes the test unnecessarily take a longer time:
documentation.enable = lib.mkForce false;
@ -38,19 +38,18 @@ in {
};
testScript = ''
def instance_is_up(_) -> bool:
status, _ = machine.execute("lxc exec container --disable-stdin --force-interactive /run/current-system/sw/bin/true")
return status == 0
machine.wait_for_unit("sockets.target")
machine.wait_for_unit("lxd.service")
machine.wait_for_file("/var/lib/lxd/unix.socket")
# It takes additional second for lxd to settle
machine.sleep(1)
# Wait for lxd to settle
machine.succeed("lxd waitready")
# lxd expects the pool's directory to already exist
machine.succeed("mkdir /var/lxd-pool")
machine.succeed(
"cat ${./common/lxd/config.yaml} | lxd init --preseed"
)
machine.succeed("lxd init --minimal")
machine.succeed(
"lxc image import ${lxd-image-metadata}/*/*.tar.xz ${lxd-image-rootfs}/*/*.tar.xz --alias nixos"
@ -58,21 +57,23 @@ in {
with subtest("Container can be managed"):
machine.succeed("lxc launch nixos container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
machine.succeed("echo true | lxc exec container /run/current-system/sw/bin/bash -")
machine.succeed("lxc exec container true")
machine.succeed("lxc delete -f container")
with subtest("Container is mounted with lxcfs inside"):
machine.succeed("lxc launch nixos container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
## ---------- ##
## limits.cpu ##
machine.succeed("lxc config set container limits.cpu 1")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"1"
@ -81,7 +82,8 @@ in {
machine.succeed("lxc config set container limits.cpu 2")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"2"
@ -93,7 +95,8 @@ in {
machine.succeed("lxc config set container limits.memory 64MB")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"MemTotal: 62500 kB"
@ -102,7 +105,8 @@ in {
machine.succeed("lxc config set container limits.memory 128MB")
machine.succeed("lxc restart container")
machine.sleep(5)
with machine.nested("Waiting for instance to start and be usable"):
retry(instance_is_up)
assert (
"MemTotal: 125000 kB"

View File

@ -0,0 +1,9 @@
{
system ? builtins.currentSystem,
config ? {},
pkgs ? import ../../.. {inherit system config;},
}: {
container = import ./container.nix {inherit system pkgs;};
nftables = import ./nftables.nix {inherit system pkgs;};
ui = import ./ui.nix {inherit system pkgs;};
}

View File

@ -5,7 +5,7 @@
# iptables to nftables requires a full reboot, which is a bit hard inside NixOS
# tests.
import ./make-test-python.nix ({ pkgs, ...} : {
import ../make-test-python.nix ({ pkgs, ...} : {
name = "lxd-nftables";
meta = with pkgs.lib.maintainers; {

View File

@ -1,4 +1,4 @@
import ./make-test-python.nix ({ pkgs, lib, ... }: {
import ../make-test-python.nix ({ pkgs, lib, ... }: {
name = "lxd-ui";
meta = with pkgs.lib.maintainers; {

View File

@ -23,7 +23,24 @@ See the [CONTRIBUTING.md](../CONTRIBUTING.md) document for more general informat
## Quick Start to Adding a Package
To add a package to Nixpkgs:
We welcome new contributors of new packages to Nixpkgs, arguably the greatest software database known. However, each new package comes with a cost for the maintainers, Continuous Integration, caching servers and users downloading Nixpkgs.
Before adding a new package, please consider the following questions:
* Is the package ready for general use? We don't want to include projects that are too immature or are going to be abandoned immediately. In case of doubt, check with upstream.
* Does the project have a clear license statement? Remember that softwares are unfree by default (all rights reserved), and merely providing access to the source code does not imply its redistribution. In case of doubt, ask upstream.
* How realistic is it that it will be used by other people? It's good that nixpkgs caters to various niches, but if it's a niche of 5 people it's probably too small.
* Are you willing to maintain the package? You should care enough about the package to be willing to keep it up and running for at least one complete Nixpkgs' release life-cycle.
If any of these questions' answer is no, then you should probably not add the package.
This is section describes a general framework of understanding and exceptions might apply.
Luckily it's pretty easy to maintain your own package set with Nix, which can then be added to the [Nix User Repository](https://github.com/nix-community/nur) project.
---
Now that this is out of the way. To add a package to Nixpkgs:
1. Checkout the Nixpkgs source tree:
@ -67,7 +84,9 @@ To add a package to Nixpkgs:
Some notes:
- All [`meta`](https://nixos.org/manual/nixpkgs/stable/#chap-meta) attributes are optional, but its still a good idea to provide at least the `description`, `homepage` and [`license`](https://nixos.org/manual/nixpkgs/stable/#sec-meta-license).
- Add yourself as the maintainer of the package.
- All other [`meta`](https://nixos.org/manual/nixpkgs/stable/#chap-meta) attributes are optional, but its still a good idea to provide at least the `description`, `homepage` and [`license`](https://nixos.org/manual/nixpkgs/stable/#sec-meta-license).
- You can use `nix-prefetch-url url` to get the SHA-256 hash of source distributions. There are similar commands as `nix-prefetch-git` and `nix-prefetch-hg` available in `nix-prefetch-scripts` package.

View File

@ -63,6 +63,22 @@ self: let
popd
'';
});
xeft = super.xeft.overrideAttrs (old: let
libExt = pkgs.stdenv.targetPlatform.extensions.sharedLibrary;
in {
dontUnpack = false;
buildInputs = (old.buildInputs or [ ]) ++ [ pkgs.xapian ];
buildPhase = (old.buildPhase or "") + ''
$CXX -shared -o xapian-lite${libExt} xapian-lite.cc $NIX_CFLAGS_COMPILE -lxapian
'';
postInstall = (old.postInstall or "") + "\n" + ''
outd=$out/share/emacs/site-lisp/elpa/xeft-*
install -m444 -t $outd xapian-lite${libExt}
rm $outd/xapian-lite.cc $outd/emacs-module.h $outd/emacs-module-prelude.h $outd/demo.gif $outd/Makefile
'';
});
};
elpaDevelPackages = super // overrides;

View File

@ -158,6 +158,23 @@ self: let
}
);
xeft = super.xeft.overrideAttrs (old: let
libExt = pkgs.stdenv.targetPlatform.extensions.sharedLibrary;
in {
dontUnpack = false;
buildInputs = (old.buildInputs or [ ]) ++ [ pkgs.xapian ];
buildPhase = (old.buildPhase or "") + ''
$CXX -shared -o xapian-lite${libExt} xapian-lite.cc $NIX_CFLAGS_COMPILE -lxapian
'';
postInstall = (old.postInstall or "") + "\n" + ''
outd=$out/share/emacs/site-lisp/elpa/xeft-*
install -m444 -t $outd xapian-lite${libExt}
rm $outd/xapian-lite.cc $outd/emacs-module.h $outd/emacs-module-prelude.h $outd/demo.gif $outd/Makefile
'';
});
};
elpaPackages = super // overrides;

View File

@ -1635,7 +1635,6 @@ self: super: {
"coc-haxe"
"coc-highlight"
"coc-html"
"coc-imselect"
"coc-java"
"coc-jest"
"coc-json"

View File

@ -14,19 +14,19 @@
stdenv.mkDerivation rec {
pname = "drawio";
version = "21.6.1";
version = "21.6.8";
src = fetchFromGitHub {
owner = "jgraph";
repo = "drawio-desktop";
rev = "v${version}";
fetchSubmodules = true;
hash = "sha256-60fOecWDYGkn4rJzxmum14L4IAaHAG+uKyjNo9nkVHg=";
hash = "sha256-k16npV8N4zPIXjc8ZJcQHgv76h2VhbqtT2ZCzDqkF8U";
};
offlineCache = fetchYarnDeps {
yarnLock = src + "/yarn.lock";
hash = "sha256-Knk9ys8Kjk1QOl80vmIA2H6wP8Mj6iNcmb/bR4zMQgw=";
hash = "sha256-rJvwXhtO/HsfpbDyOh+jFc6E9wQ+sZMT8vnhJpGlkF8";
};
nativeBuildInputs = [

View File

@ -1,28 +1,26 @@
{ lib, mkDerivation, fetchpatch, fetchFromGitHub, cmake, qttools, qtwebkit }:
{ lib
, mkDerivation
, fetchFromGitHub
, cmake
, qttools
, qtwebkit
}:
mkDerivation rec {
pname = "fontmatrix";
version = "0.6.0-qt5";
version = "0.9.100";
src = fetchFromGitHub {
owner = "fcoiffie";
owner = "fontmatrix";
repo = "fontmatrix";
rev = "1ff8382d8c85c18d9962918f461341ff4fe21993";
sha256 = "0yx1gbsjj9ddq1kiqplif1w5x5saw250zbmhmd4phqmaqzr60w0h";
rev = "v${version}";
sha256 = "sha256-DtajGhx79DiecglXHja9q/TKVq8Jl2faQdA5Ib/yT88=";
};
# Add missing QAction include
patches = [ (fetchpatch {
url = "https://github.com/fcoiffie/fontmatrix/commit/dc6de8c414ae21516b72daead79c8db88309b102.patch";
sha256 = "092860fdyf5gq67jqfxnlgwzjgpizi6j0njjv3m62aiznrhig7c8";
})];
buildInputs = [ qttools qtwebkit ];
nativeBuildInputs = [ cmake ];
hardeningDisable = [ "format" ];
meta = with lib; {
description = "Fontmatrix is a free/libre font explorer for Linux, Windows and Mac";
homepage = "https://github.com/fontmatrix/fontmatrix";

View File

@ -27,8 +27,8 @@ mkDerivation rec {
src = fetchFromGitHub {
owner = "cnr-isti-vclab";
repo = "meshlab";
rev = "Meshlab-${version}";
sha256 = "sha256-MP+jkiV6yS1T1eWClxM56kZWLXwu0g4w/zBHy6CSL6Y=";
rev = "MeshLab-${version}";
sha256 = "sha256-jcc3PfsiIeYyipteZgzd0NwZgFFgR/mMBiaInzhOcDY=";
fetchSubmodules = true; # for vcglib
};

View File

@ -16,6 +16,17 @@ let
inherit version;
hash = "sha256-4RIMIoyi9VO0cN9KX6knq2YlhGdSYGmYGz6wqRkCaH0=";
};
patches = [
# Pulling in this patch lets us continue running tests without any
# other changes using setuptools >= 67.5.0.
(fetchpatch {
name = "remove-deprecated-pkg-resources.patch";
url = "https://github.com/pallets/flask/commit/751d85f3de3f726446bb12e4ddfae885a6645ba1.patch";
hash = "sha256-T4vKSSe3P0xtb2/iQjm0RH2Bwk1ZHWiPoX1Ycr63EqU=";
includes = [ "src/flask/cli.py" ];
})
];
});
flask-wtf = super.flask-wtf.overridePythonAttrs (old: rec {
version = "0.15.1";

View File

@ -27,6 +27,11 @@
, freeglut
, libGLU
, xcbuild
# for passthru.tests
, cups-filters
, python3
, zathura
}:
let
@ -146,6 +151,11 @@ stdenv.mkDerivation rec {
enableParallelBuilding = true;
passthru.tests = {
inherit cups-filters zathura;
inherit (python3.pkgs) pikepdf pymupdf;
};
meta = with lib; {
homepage = "https://mupdf.com";
description = "Lightweight PDF, XPS, and E-book viewer and toolkit written in portable C";

View File

@ -9,14 +9,14 @@
"vendorHash": null
},
"acme": {
"hash": "sha256-azNFQ4U7iGIKLingq4GItjXvdcsm0YkrQ4PRvEeDjVU=",
"hash": "sha256-5KR32V4sE5AkOVroLmelNBzBZpD4KfhC491X+5eo+n8=",
"homepage": "https://registry.terraform.io/providers/vancluever/acme",
"owner": "vancluever",
"proxyVendor": true,
"repo": "terraform-provider-acme",
"rev": "v2.16.1",
"rev": "v2.17.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-9F853+GHfwGH0JQRLawLEB8X76z/Xll1Aa4+vBRWk1o="
"vendorHash": "sha256-UIV0dIoRZxNiaEq1HGPIV4mFLn4pAoGPo6tx6zV3r3A="
},
"age": {
"hash": "sha256-bJrzjvkrCX93bNqCA+FdRibHnAw6cb61StqtwUY5ok4=",
@ -28,13 +28,13 @@
"vendorHash": "sha256-jK7JuARpoxq7hvq5+vTtUwcYot0YqlOZdtDwq4IqKvk="
},
"aiven": {
"hash": "sha256-Nm5flY+BN9PpQY+4LyohFwDfdEPxfVpT/rkfn8aLQyI=",
"hash": "sha256-3agD22viTP+yntNg2nyYi5OpknXnfI2Jk/xEcvXgia8=",
"homepage": "https://registry.terraform.io/providers/aiven/aiven",
"owner": "aiven",
"repo": "terraform-provider-aiven",
"rev": "v4.8.0",
"rev": "v4.8.2",
"spdx": "MIT",
"vendorHash": "sha256-eScN0by/rnCf4+p4g3yhz2kJRyfFyqlVi+0MJXPdzKw="
"vendorHash": "sha256-sVPby/MLAgU7DfBDACqxvkLWblBhisHcUaoOgR3fMaM="
},
"akamai": {
"hash": "sha256-LGgZF2/YCYpoDOSu0UeuPqK9wGXrvPQE4WUGGS0sx30=",
@ -182,13 +182,13 @@
"vendorHash": "sha256-/dOiXO2aPkuZaFiwv/6AXJdIADgx8T7eOwvJfBBoqg8="
},
"buildkite": {
"hash": "sha256-nDJ4XsWvielQYqShBav7g/pZyDcU0jqgemXUqaNJHnA=",
"hash": "sha256-xojTeS+p9XG+wO9thmrSOWrizF56FCg+nwRBdaXqr/4=",
"homepage": "https://registry.terraform.io/providers/buildkite/buildkite",
"owner": "buildkite",
"repo": "terraform-provider-buildkite",
"rev": "v0.25.0",
"rev": "v0.25.1",
"spdx": "MIT",
"vendorHash": "sha256-C/jT+vcZat8UHXgOhtj+gyl8ttCEb564byp/npI2Ei8="
"vendorHash": "sha256-V2BsVBhtdPOT9iseWPhPTOrUe4iMhq4YUiBWd0ne5Xg="
},
"checkly": {
"hash": "sha256-tOTrAi6hd4HFbHAj0p/LTYdxQl1R1WuQ9L4hzqmDVqI=",
@ -218,13 +218,13 @@
"vendorHash": "sha256-qIgr+ynaNSfNx1iW5RJrNHvEnlr46dBzIi+5IXYn+3Q="
},
"cloudflare": {
"hash": "sha256-ayxekJkQt/7K/qwMKvjqkyVkux5+Jw3uyepmaiy3Ptc=",
"hash": "sha256-l1cTzPiOOLyvbvbt7dWR9lRgqVFiO5gRq4XNnLqvac0=",
"homepage": "https://registry.terraform.io/providers/cloudflare/cloudflare",
"owner": "cloudflare",
"repo": "terraform-provider-cloudflare",
"rev": "v4.12.0",
"rev": "v4.13.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-VTSbi2pDllzyKDhWs5EpWSXO5oKl+khVqLg/Ro3x8ys="
"vendorHash": "sha256-uZ0zc+/RmEiqxBSZLgLPmwN29BEJitPN13HE88zPxcI="
},
"cloudfoundry": {
"hash": "sha256-yEqsdgTSlwppt6ILRZQ6Epyh5WVN6Il3xsBOa/NfIdo=",
@ -282,13 +282,13 @@
"vendorHash": "sha256-ZCMSmOCPEMxCSpl3DjIUGPj1W/KNJgyjtHpmQ19JquA="
},
"datadog": {
"hash": "sha256-sytQJgrfgtJ761mGo0KUTxAukqvmPYyLM8+vsYGtoZc=",
"hash": "sha256-FAqtbze6Lw6SCU84R6aB0oY+pcWyDBdTZRBZhM2pjyc=",
"homepage": "https://registry.terraform.io/providers/DataDog/datadog",
"owner": "DataDog",
"repo": "terraform-provider-datadog",
"rev": "v3.28.0",
"rev": "v3.29.0",
"spdx": "MPL-2.0",
"vendorHash": "sha256-foS7GyRUdhF/M8uTPf2I4WQo7qEg4Z/3FXjagoeSRkU="
"vendorHash": "sha256-UJRuj5qmWWjkqMBGf0500//83ky0Dxx04IQETPwwSsw="
},
"dexidp": {
"hash": "sha256-69r3m3lIKftZQ8NXBD5KEHbsNUwCGpFgn/CYO+921M4=",
@ -363,13 +363,13 @@
"vendorHash": "sha256-oVTanZpCWs05HwyIKW2ajiBPz1HXOFzBAt5Us+EtTRw="
},
"equinix": {
"hash": "sha256-MEsE1OQwKjd1Y+Ek7UmZMbLq4x84iQW40sMl78UbW2c=",
"hash": "sha256-SSCKl0etImK9dXhq9ycQi/U38cZ+SuaoBiaeeg/+JDA=",
"homepage": "https://registry.terraform.io/providers/equinix/equinix",
"owner": "equinix",
"repo": "terraform-provider-equinix",
"rev": "v1.14.7",
"rev": "v1.15.0",
"spdx": "MIT",
"vendorHash": "sha256-cfJG0DJJJX85ISz7dSZ+di1uhgJJd5xUH99PhqGMPgw="
"vendorHash": "sha256-7oLAF+HpL/eNN6KXYp8zA9Yu6h5S+XrWJN4dE3B9H58="
},
"exoscale": {
"hash": "sha256-93pCsHrsYLJYgg8MXHz2Gg+vaPC9gcHdLastb89/BMg=",
@ -827,11 +827,11 @@
"vendorHash": "sha256-LRIfxQGwG988HE5fftGl6JmBG7tTknvmgpm4Fu1NbWI="
},
"oci": {
"hash": "sha256-sxhykS4pXF00VJVtVd7kO2GasAqBUUMqPDPLE3BzUFI=",
"hash": "sha256-S+gHfQsqnOlegd5JcuBOUKO7fynWQAWCZGrlqjY03e0=",
"homepage": "https://registry.terraform.io/providers/oracle/oci",
"owner": "oracle",
"repo": "terraform-provider-oci",
"rev": "v5.9.0",
"rev": "v5.10.0",
"spdx": "MPL-2.0",
"vendorHash": null
},

View File

@ -13,11 +13,11 @@
stdenv.mkDerivation rec {
pname = "appflowy";
version = "0.2.6";
version = "0.3.0";
src = fetchzip {
url = "https://github.com/AppFlowy-IO/appflowy/releases/download/${version}/AppFlowy_x86_64-unknown-linux-gnu_ubuntu-20.04.tar.gz";
sha256 = "sha256-e7nzJ81rMehpxwsbOlwnMh1jzCsGwc+kAo/6+AcCiLE=";
sha256 = "sha256-05RQtvf6I4/sjGtMDfc5U4esxfFFeTwIuxFAkbr6p4A";
stripRoot = false;
};

View File

@ -0,0 +1,55 @@
{ lib
, stdenv
, fetchFromGitHub
, zlib
}:
stdenv.mkDerivation (finalAttrs: {
pname = "bwa-mem2";
version = "unstable-2023-03-18";
src = fetchFromGitHub {
owner = "bwa-mem2";
repo = "bwa-mem2";
rev = "cf4306a47dac35e7e79a9e75398a35f33900cfd0";
fetchSubmodules = true;
hash = "sha256-1AYSn7nBrDwbX7oSrdEoa1d3t6xzwKnA0S87Y/XeXJg=";
};
buildInputs = [ zlib ];
# see https://github.com/bwa-mem2/bwa-mem2/issues/93
postPatch = lib.optionalString stdenv.isDarwin ''
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/include/safe_mem_lib.h
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset16_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset32_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/memset_s.c
sed -i 's/memset_s/memset8_s/g' ext/safestringlib/safeclib/wmemset_s.c
'';
buildFlags = [
(if stdenv.hostPlatform.sse4_2Support then "arch=sse42"
else if stdenv.hostPlatform.avxSupport then "arch=avx"
else if stdenv.hostPlatform.avx2Support then "arch=avx2"
else if stdenv.hostPlatform.avx512Support then "arch=avx512"
else "arch=sse41")
];
enableParallelBuilding = true;
installPhase = ''
runHook preInstall
mkdir -p $out/bin
cp bwa-mem2* $out/bin/
runHook postInstall
'';
meta = with lib; {
description = "Next version of the bwa-mem algorithm in bwa, a software package for mapping low-divergent sequences against a large reference genome";
license = licenses.mit;
homepage = "https://github.com/bwa-mem2/bwa-mem2/";
changelog = "https://github.com/bwa-mem2/bwa-mem2/blob/${finalAttrs.src.rev}/NEWS.md";
platforms = platforms.x86_64;
maintainers = with maintainers; [ alxsimon ];
};
})

View File

@ -8,16 +8,16 @@
rustPlatform.buildRustPackage rec {
pname = "gql";
version = "0.4.1";
version = "0.5.0";
src = fetchFromGitHub {
owner = "AmrDeveloper";
repo = "GQL";
rev = version;
hash = "sha256-d6uncWHq9bLDODFle7xij9YjhpiQPL7mmyFmVxmy8hY=";
hash = "sha256-UTyP9ugUXiPMzkeIvPJUtORvcJ93YOBltglmlcym3sI=";
};
cargoHash = "sha256-jR79xchMpib76oVnpy+UIbcwhDXvDPyl+jWmVPfXVog=";
cargoHash = "sha256-AIt7Ns3vNrHQxJU7cSNr+h3tFGZ9hL1OMBqPHS61YUQ=";
nativeBuildInputs = [
pkg-config

View File

@ -26,14 +26,14 @@
stdenv.mkDerivation (finalAttrs: {
pname = "qmplay2";
version = "23.06.17";
version = "23.08.22";
src = fetchFromGitHub {
owner = "zaps166";
repo = "QMPlay2";
rev = finalAttrs.version;
fetchSubmodules = true;
hash = "sha256-f4lIXB0eTyteCJdWFP0XnsnxGWc32CV+HlqpaCjmgOE=";
hash = "sha256-Ug7WAqZ+BxspQUXweL/OnVBGCsU60DOWNexbi0GpDo0=";
};
nativeBuildInputs = [
@ -79,7 +79,7 @@ stdenv.mkDerivation (finalAttrs: {
'';
changelog = "https://github.com/zaps166/QMPlay2/releases/tag/${finalAttrs.version}";
license = lib.licenses.lgpl3Plus;
maintainers = with lib.maintainers; [ AndersonTorres ];
maintainers = with lib.maintainers; [ AndersonTorres kashw2 ];
platforms = lib.platforms.linux;
};
})

View File

@ -73,6 +73,7 @@ grimshot = stdenv.mkDerivation rec {
meta = with lib; {
description = "A helper for screenshots within sway";
maintainers = with maintainers; [ evils ];
mainProgram = "grimshot";
};
};

View File

@ -244,6 +244,11 @@ let
++ lib.optionals (langD) [
"--with-target-system-zlib=yes"
]
# On mips64-unknown-linux-gnu libsanitizer defines collide with
# glibc's definitions and fail the build. It was fixed in gcc-13+.
++ lib.optionals (targetPlatform.isMips && targetPlatform.parsed.abi.name == "gnu" && lib.versions.major version == "12") [
"--disable-libsanitizer"
]
;
in configureFlags

View File

@ -6,13 +6,13 @@
stdenv.mkDerivation (finalAttrs: {
pname = "wamr";
version = "1.2.2";
version = "1.2.3";
src = fetchFromGitHub {
owner = "bytecodealliance";
repo = "wasm-micro-runtime";
rev = "WAMR-${finalAttrs.version}";
hash = "sha256-jpT42up9HAVJpo03cFrffQQk2JiHEAEepBGlU4RUfNU=";
hash = "sha256-bnia0ORC0YajO7I3XDMdpjlktDqOiXDlGcf12N1G+eg=";
};
nativeBuildInputs = [ cmake ];
@ -23,6 +23,7 @@ stdenv.mkDerivation (finalAttrs: {
description = "WebAssembly Micro Runtime";
homepage = "https://github.com/bytecodealliance/wasm-micro-runtime";
license = licenses.asl20;
mainProgram = "iwasm";
maintainers = with maintainers; [ ereslibre ];
# TODO (ereslibre): this derivation should be improved to support
# more platforms.

View File

@ -0,0 +1,42 @@
{ lib
, stdenv
, fetchFromGitHub
, cmake
, gtest
, static ? stdenv.hostPlatform.isStatic
, cxxStandard ? null
}:
stdenv.mkDerivation (finalAttrs: {
pname = "abseil-cpp";
version = "20230802.0";
src = fetchFromGitHub {
owner = "abseil";
repo = "abseil-cpp";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-yILAsAERUDMbRWh8t4o6W74YiswvGIHSyBAIuLVbzxY=";
};
cmakeFlags = [
"-DABSL_BUILD_TEST_HELPERS=ON"
"-DABSL_USE_EXTERNAL_GOOGLETEST=ON"
"-DBUILD_SHARED_LIBS=${if static then "OFF" else "ON"}"
] ++ lib.optionals (cxxStandard != null) [
"-DCMAKE_CXX_STANDARD=${cxxStandard}"
];
strictDeps = true;
nativeBuildInputs = [ cmake ];
buildInputs = [ gtest ];
meta = with lib; {
description = "An open-source collection of C++ code designed to augment the C++ standard library";
homepage = "https://abseil.io/";
license = licenses.asl20;
platforms = platforms.all;
maintainers = [ maintainers.andersk ];
};
})

View File

@ -7,14 +7,14 @@
}:
stdenv.mkDerivation (finalAttrs: {
name = "librecast";
version = "0.7-RC3";
version = "0.7.0";
src = fetchFromGitea {
domain = "codeberg.org";
owner = "librecast";
repo = "librecast";
rev = "v${finalAttrs.version}";
hash = "sha256-AD3MpWg8Lp+VkizwYTuuS2YWM8e0xaMEavVIvwhSZRo=";
hash = "sha256-NlwYJJn1yewx92y6UKJcj6R2MnPn+XuEiKOmsR2oE3g=";
};
buildInputs = [ lcrq libsodium ];
installFlags = [ "PREFIX=$(out)" ];

View File

@ -22,13 +22,13 @@
stdenv.mkDerivation rec {
pname = "pdal";
version = "2.5.5";
version = "2.5.6";
src = fetchFromGitHub {
owner = "PDAL";
repo = "PDAL";
rev = version;
sha256 = "sha256-AhekpvWAdbDAYAr38VXPBDGE40xvP0BnEAI2ZKF3ctY=";
sha256 = "sha256-JKwa89c05EfZ/FxOkj8lYmw0o2EgSqafRDIV2mTpZ5E=";
};
nativeBuildInputs = [

View File

@ -6,7 +6,7 @@
stdenv.mkDerivation rec {
pname = "libupnp";
version = "1.14.17";
version = "1.14.18";
outputs = [ "out" "dev" ];
@ -14,7 +14,7 @@ stdenv.mkDerivation rec {
owner = "pupnp";
repo = "pupnp";
rev = "release-${version}";
sha256 = "sha256-vb540oqDn6Y+oD0LriOJckYYkI/zcHkEVc8mL/+9bps=";
sha256 = "sha256-eQKtZioZjI53J1fsoer032pzqebbK5IabOnkAXwBPos=";
};
nativeBuildInputs = [

View File

@ -5,13 +5,13 @@
stdenv.mkDerivation rec {
pname = "tbox";
version = "1.7.3";
version = "1.7.4";
src = fetchFromGitHub {
owner = "tboox";
repo = pname;
rev = "v${version}";
hash = "sha256-6SqMvwxKSiJO7Z33xx7cJoECu5AJ1gWF8ZsiERWx8DU=";
hash = "sha256-b461JNTS7jNI/qawumDjL2vfC4fAaWB7a++9PpUUDB0=";
};
configureFlags = [

View File

@ -44,6 +44,7 @@ mapAliases {
"@githubnext/github-copilot-cli" = pkgs.github-copilot-cli; # Added 2023-05-02
"@google/clasp" = pkgs.google-clasp; # Added 2023-05-07
"@maizzle/cli" = pkgs.maizzle; # added 2023-08-17
"@medable/mdctl-cli" = throw "@medable/mdctl-cli was removed because it was broken"; # added 2023-08-21
"@nestjs/cli" = pkgs.nest-cli; # Added 2023-05-06
antennas = pkgs.antennas; # added 2023-07-30
balanceofsatoshis = pkgs.balanceofsatoshis; # added 2023-07-31
@ -54,13 +55,16 @@ mapAliases {
inherit (pkgs) carto; # added 2023-08-17
castnow = pkgs.castnow; # added 2023-07-30
inherit (pkgs) clean-css-cli; # added 2023-08-18
coc-imselect = throw "coc-imselect was removed because it was broken"; # added 2023-08-21
inherit (pkgs) configurable-http-proxy; # added 2023-08-19
inherit (pkgs) cordova; # added 2023-08-18
dat = throw "dat was removed because it was broken"; # added 2023-08-21
eask = pkgs.eask; # added 2023-08-17
inherit (pkgs.elmPackages) elm-test;
eslint_d = pkgs.eslint_d; # Added 2023-05-26
inherit (pkgs) firebase-tools; # added 2023-08-18
flood = pkgs.flood; # Added 2023-07-25
git-ssb = throw "git-ssb was removed because it was broken"; # added 2023-08-21
inherit (pkgs) graphqurl; # added 2023-08-19
gtop = pkgs.gtop; # added 2023-07-31
inherit (pkgs) htmlhint; # added 2023-08-19
@ -74,9 +78,13 @@ mapAliases {
manta = pkgs.node-manta; # Added 2023-05-06
markdownlint-cli = pkgs.markdownlint-cli; # added 2023-07-29
inherit (pkgs) markdownlint-cli2; # added 2023-08-22
mdctl-cli = self."@medable/mdctl-cli"; # added 2023-08-21
node-inspector = throw "node-inspector was removed because it was broken"; # added 2023-08-21
readability-cli = pkgs.readability-cli; # Added 2023-06-12
reveal-md = pkgs.reveal-md; # added 2023-07-31
s3http = throw "s3http was removed because it was abandoned upstream"; # added 2023-08-18
ssb-server = throw "ssb-server was removed because it was broken"; # added 2023-08-21
stf = throw "stf was removed because it was broken"; # added 2023-08-21
thelounge = pkgs.thelounge; # Added 2023-05-22
triton = pkgs.triton; # Added 2023-05-06
typescript = pkgs.typescript; # Added 2023-06-21

View File

@ -17,7 +17,6 @@
"@commitlint/cli" = "commitlint";
"@forge/cli" = "forge";
"@gitbeaker/cli" = "gitbeaker";
"@medable/mdctl-cli" = "mdctl";
"@mermaid-js/mermaid-cli" = "mmdc";
"@nerdwallet/shepherd" = "shepherd";
"@prisma/language-server" = "prisma-language-server";

View File

@ -6,7 +6,6 @@
, "@commitlint/cli"
, "@commitlint/config-conventional"
, "@forge/cli"
, "@medable/mdctl-cli"
, "@mermaid-js/mermaid-cli"
, "@microsoft/rush"
, "@nerdwallet/shepherd"
@ -57,7 +56,6 @@
, "coc-haxe"
, "coc-highlight"
, "coc-html"
, "coc-imselect"
, "coc-java"
, "coc-jest"
, "coc-json"
@ -104,7 +102,6 @@
, "create-react-native-app"
, "cspell"
, "csslint"
, "dat"
, "degit"
, "dhcp"
, "diagnostic-languageserver"
@ -134,7 +131,6 @@
, "generator-code"
, "get-graphql-schema"
, "git-run"
, "git-ssb"
, "git-standup"
, "@gitbeaker/cli"
, "gitmoji-cli"
@ -202,7 +198,6 @@
, "nijs"
, "node-gyp"
, "node-gyp-build"
, "node-inspector"
, "node-pre-gyp"
, "node-red"
, "node2nix"
@ -262,9 +257,7 @@
, "socket.io"
, "speed-test"
, "sql-formatter"
, "ssb-server"
, "stackdriver-statsd-backend"
, "stf"
, "stylelint"
, "surge"
, "svelte-check"

File diff suppressed because it is too large Load Diff

View File

@ -41,25 +41,6 @@ final: prev: {
];
};
"@medable/mdctl-cli" = prev."@medable/mdctl-cli".override (oldAttrs: {
nativeBuildInputs = with pkgs; with darwin.apple_sdk.frameworks; [
glib
libsecret
pkg-config
] ++ lib.optionals stdenv.isDarwin [
AppKit
Security
];
buildInputs = [
final.node-gyp-build
final.node-pre-gyp
nodejs
];
meta = oldAttrs.meta // { broken = since "16"; };
});
mdctl-cli = final."@medable/mdctl-cli";
autoprefixer = prev.autoprefixer.override {
nativeBuildInputs = [ pkgs.buildPackages.makeWrapper ];
postInstall = ''
@ -92,16 +73,6 @@ final: prev: {
'';
};
coc-imselect = prev.coc-imselect.override (oldAttrs: {
meta = oldAttrs.meta // { broken = since "10"; };
});
dat = prev.dat.override (oldAttrs: {
buildInputs = [ final.node-gyp-build pkgs.libtool pkgs.autoconf pkgs.automake ];
meta = oldAttrs.meta // { broken = since "12"; };
});
expo-cli = prev."expo-cli".override (oldAttrs: {
# The traveling-fastlane-darwin optional dependency aborts build on Linux.
dependencies = builtins.filter (d: d.packageName != "@expo/traveling-fastlane-${if stdenv.isLinux then "darwin" else "linux"}") oldAttrs.dependencies;
@ -126,11 +97,6 @@ final: prev: {
};
git-ssb = prev.git-ssb.override (oldAttrs: {
buildInputs = [ final.node-gyp-build ];
meta = oldAttrs.meta // { broken = since "10"; };
});
graphite-cli = prev."@withgraphite/graphite-cli".override {
name = "graphite-cli";
nativeBuildInputs = with pkgs; [ installShellFiles pkg-config ];
@ -260,11 +226,6 @@ final: prev: {
'';
};
node-inspector = prev.node-inspector.override (oldAttrs: {
buildInputs = [ final.node-pre-gyp ];
meta = oldAttrs.meta // { broken = since "10"; };
});
node-red = prev.node-red.override {
buildInputs = [ final.node-pre-gyp ];
};
@ -384,15 +345,6 @@ final: prev: {
name = "rush";
};
ssb-server = prev.ssb-server.override (oldAttrs: {
buildInputs = [ pkgs.automake pkgs.autoconf final.node-gyp-build ];
meta = oldAttrs.meta // { broken = since "10"; };
});
stf = prev.stf.override (oldAttrs: {
meta = oldAttrs.meta // { broken = since "10"; };
});
tailwindcss = prev.tailwindcss.override {
plugins = [ ];
nativeBuildInputs = [ pkgs.buildPackages.makeWrapper ];

View File

@ -3,6 +3,7 @@
, aresponses
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, poetry-core
, pytest-aiohttp
, pytest-asyncio
@ -27,6 +28,20 @@ buildPythonPackage rec {
hash = "sha256-ar2UGSlVukMD5EZsEn7TFfIOovaI+B3Ym+UeGo95oks=";
};
patches = [
# This patch removes references to setuptools and wheel that are no longer
# necessary and changes poetry to poetry-core, so that we don't need to add
# unnecessary nativeBuildInputs.
#
# https://github.com/bachya/aioambient/pull/295
#
(fetchpatch {
name = "clean-up-build-dependencies.patch";
url = "https://github.com/bachya/aioambient/commit/fa21a2e82678a231a73c8a1153032980926f4c35.patch";
hash = "sha256-RLRbHmaR2A8MNc96WHx0L8ccyygoBUaOulAuRJkFuUM=";
})
];
postPatch = ''
substituteInPlace pyproject.toml \
--replace 'websockets = ">=11.0.1"' 'websockets = "*"'
@ -43,6 +58,8 @@ buildPythonPackage rec {
websockets
];
__darwinAllowLocalNetworking = true;
nativeCheckInputs = [
aresponses
pytest-aiohttp

View File

@ -7,7 +7,6 @@
, hatchling
, importlib-metadata
, ipywidgets
, jupyterlab
, psygnal
, typing-extensions
, watchfiles
@ -25,10 +24,17 @@ buildPythonPackage rec {
hash = "sha256-OUKxmYceEKURJeQTVI7oLT4SdZM90V7BoZf0UykkEV4=";
};
# We do not need the jupyterlab build dependency, because we do not need to
# build any JS components; these are present already in the PyPI artifact.
#
postPatch = ''
substituteInPlace pyproject.toml \
--replace '"jupyterlab==3.*"' ""
'';
nativeBuildInputs = [
hatch-jupyter-builder
hatchling
jupyterlab
];
propagatedBuildInputs = [

View File

@ -3,6 +3,7 @@
, aresponses
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, poetry-core
, pytest-asyncio
, pytestCheckHook
@ -24,6 +25,22 @@ buildPythonPackage rec {
hash = "sha256-3f6N4b6WZPAUUQTuGeb20q0f7ZqDR+O24QRze5RpRlw=";
};
patches = [
# https://github.com/klaasnicolaas/python-autarco/pull/265
(fetchpatch {
name = "remove-setuptools-dependency.patch";
url = "https://github.com/klaasnicolaas/python-autarco/commit/bf40e8a4f64cd9c9cf72930260895537ea5b2adc.patch";
hash = "sha256-Fgijy7sd67LUIqh3qjQjyothnjdW7Zcil/bQSuVsBR8=";
})
];
postPatch = ''
# Upstream doesn't set a version for the pyproject.toml
substituteInPlace pyproject.toml \
--replace "0.0.0" "${version}" \
--replace "--cov" ""
'';
nativeBuildInputs = [
poetry-core
];
@ -33,19 +50,14 @@ buildPythonPackage rec {
yarl
];
__darwinAllowLocalNetworking = true;
nativeCheckInputs = [
aresponses
pytest-asyncio
pytestCheckHook
];
postPatch = ''
# Upstream doesn't set a version for the pyproject.toml
substituteInPlace pyproject.toml \
--replace "0.0.0" "${version}" \
--replace "--cov" ""
'';
pythonImportsCheck = [
"autarco"
];

View File

@ -13,7 +13,6 @@
buildPythonPackage rec {
pname = "bqscales";
version = "0.3.1";
format = "pyproject";
disabled = pythonOlder "3.6";
@ -22,6 +21,19 @@ buildPythonPackage rec {
hash = "sha256-C+/GLpqYpePngbn5W0MwvpdmVgFZF7aGHyKMgO5XM90=";
};
# We relax dependencies here instead of pulling in a patch because upstream
# has released a new version using hatch-jupyter-builder, but it is not yet
# trivial to upgrade to that.
#
# Per https://github.com/bqplot/bqscales/issues/76, jupyterlab is not needed
# as a build dependency right now.
#
postPatch = ''
substituteInPlace pyproject.toml \
--replace '"jupyterlab==3.*",' "" \
--replace 'jupyter_packaging~=' 'jupyter_packaging>='
'';
nativeBuildInputs = [
hatchling
jupyter-packaging

View File

@ -1,5 +1,6 @@
{ lib
, buildPythonPackage
, fetchpatch
, fetchPypi
, poetry-core
, pythonOlder
@ -18,10 +19,15 @@ buildPythonPackage rec {
hash = "sha256-1OLUJxsuxG/sCKDxKiU4i7o5HyaJdIW8rPo8UofMI28=";
};
patchPhase = ''
substituteInPlace pyproject.toml \
--replace "poetry.masonry.api" "poetry.core.masonry.api"
'';
patches = [
# remove extraneous build dependencies:
# https://github.com/sbdchd/celery-types/pull/138
(fetchpatch {
name = "clean-up-build-dependencies.patch";
url = "https://github.com/sbdchd/celery-types/commit/ff83f06a0302084e1a690e2a5a8b25f2c0dfc6e7.patch";
hash = "sha256-c68SMugg6Qk88FC842/czoxLpk0uVAVSlWsvo4NI9uo=";
})
];
propagatedBuildInputs = [
typing-extensions

View File

@ -3,6 +3,7 @@
, aresponses
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, poetry-core
, pytest-asyncio
, pytestCheckHook
@ -24,6 +25,15 @@ buildPythonPackage rec {
hash = "sha256-BorgGHxoEeIGyJKqe9mFRDpcGHhi6/8IV7ubEI8yQE4=";
};
patches = [
# https://github.com/klaasnicolaas/python-cemm/pull/360
(fetchpatch {
name = "remove-setuptools-dependency.patch";
url = "https://github.com/klaasnicolaas/python-cemm/commit/1e373dac078f18563264e6733baf6a93962cac4b.patch";
hash = "sha256-DVNn4BZwi8yNpKFmzt7YSYhzzB4vaAyrd/My8TtYzj0=";
})
];
postPatch = ''
substituteInPlace pyproject.toml \
--replace '"0.0.0"' '"${version}"' \
@ -39,6 +49,8 @@ buildPythonPackage rec {
yarl
];
__darwinAllowLocalNetworking = true;
nativeCheckInputs = [
aresponses
pytest-asyncio

View File

@ -1,5 +1,6 @@
{ lib
, buildPythonPackage
, fetchpatch
, fetchPypi
, poetry-core
, pythonOlder
@ -21,6 +22,15 @@ buildPythonPackage rec {
hash = "sha256-i6kbjugulAcmmInFb+rH4WB50dM7SDO1HNW/JgD4OTQ=";
};
patches = [
# https://github.com/cohere-ai/cohere-python/pull/289
(fetchpatch {
name = "replace-poetry-with-poetry-core.patch";
url = "https://github.com/cohere-ai/cohere-python/commit/e86480336331c0cf6f67e26b0825467dfca5b277.patch";
hash = "sha256-P1Ioq5ypzT3tx6cxrI3ep34Fi4cUx88YkfJ5ErN3VHk=";
})
];
nativeBuildInputs = [
poetry-core
];

View File

@ -1,23 +1,32 @@
{ lib, buildPythonPackage, fetchPypi, isPyPy
, pytest, pytest-cov, pytest-mock, freezegun
, pytest, pytest-cov, pytest-mock, freezegun, safety, pre-commit
, jinja2, future, binaryornot, click, jinja2-time, requests
, python-slugify
, pyyaml
, arrow
, rich
}:
buildPythonPackage rec {
pname = "cookiecutter";
version = "2.1.1";
version = "2.3.0";
# not sure why this is broken
disabled = isPyPy;
src = fetchPypi {
inherit pname version;
hash = "sha256-85gr6NnFPawSYYZAE/3sf4Ov0uQu3m9t0GnF4UnFQNU=";
hash = "sha256-lCp5SYF0f21/Q51uSdOdyRqaZBKDYUFgyTxHTHLCliE=";
};
nativeCheckInputs = [ pytest pytest-cov pytest-mock freezegun ];
nativeCheckInputs = [
pytest
pytest-cov
pytest-mock
freezegun
safety
pre-commit
];
propagatedBuildInputs = [
binaryornot
jinja2
@ -26,6 +35,8 @@ buildPythonPackage rec {
jinja2-time
python-slugify
requests
arrow
rich
];
# requires network access for cloning git repos

View File

@ -1,7 +1,6 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, isPy3k
, flask
, werkzeug
}:
@ -9,6 +8,7 @@
buildPythonPackage rec {
pname = "flask-reverse-proxy-fix";
version = "0.2.1";
format = "setuptools";
# master fixes flask import syntax and has no major changes
# new release requested: https://github.com/sublee/flask-silk/pull/6
@ -16,15 +16,17 @@ buildPythonPackage rec {
owner = "antarctica";
repo = "flask-reverse-proxy-fix";
rev = "v${version}";
sha256 = "1jbr67cmnryn0igv05qkvqjwrwj2rsajvvjnv3cdkm9bkgb4h5k5";
hash = "sha256-ZRZI1psr1dnY2FbuLZXOQvLMJd4TF7BfBNZnW9kxeck=";
};
disabled = !isPy3k;
postPatch = ''
sed -i 's@werkzeug.contrib.fixers@werkzeug.middleware.proxy_fix@g' flask_reverse_proxy_fix/middleware/__init__.py
'';
# This is needed so that setup.py does not add "devNone" to the version,
# after which setuptools throws an error for an invalid version.
env.CI_COMMIT_TAG = "v${version}";
propagatedBuildInputs = [
flask
werkzeug

View File

@ -57,6 +57,11 @@ buildPythonPackage rec {
hash = "sha256-lZzm43m30y+2qjxNddFEeg9HDlQP9afq5VtuR25zaLc=";
};
postPatch = ''
# This should be removed after updating to version 5.3.0.
sed -i '/filterwarnings =/a ignore:pkg_resources is deprecated:DeprecationWarning' pytest.ini
'';
propagatedBuildInputs = [
blinker
email-validator

View File

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, pythonOlder
, poetry-core
, grpcio
@ -20,13 +21,17 @@ buildPythonPackage rec {
owner = "d5h-foss";
repo = pname;
rev = "refs/tags/v${version}";
sha256 = "sha256-zulK0SVehzHcrmgIyH0D5sUOiAN53eIg88IoVyF6/DA=";
hash = "sha256-zulK0SVehzHcrmgIyH0D5sUOiAN53eIg88IoVyF6/DA=";
};
postPatch = ''
substituteInPlace pyproject.toml \
--replace "poetry.masonry.api" "poetry.core.masonry.api"
'';
patches = [
# https://github.com/d5h-foss/grpc-interceptor/pull/44
(fetchpatch {
name = "replace-poetry-with-poetry-core.patch";
url = "https://github.com/d5h-foss/grpc-interceptor/commit/916cb394acd8dd7abb4f5edcb4e88aee961a32d0.patch";
hash = "sha256-W2SF2zyjusTxgvCxBDLpisD03bofzDug1eyd4FLJmKs=";
})
];
nativeBuildInputs = [
poetry-core
@ -37,6 +42,8 @@ buildPythonPackage rec {
protobuf
];
__darwinAllowLocalNetworking = true;
nativeCheckInputs = [
pytest-asyncio
pytestCheckHook

View File

@ -20,6 +20,16 @@ buildPythonPackage rec {
hash = "sha256-+cOUBoG8ODgzkPjEbqXYRF1uEcbaZITDfYnfWuHawTE=";
};
# We relax dependencies here instead of pulling in a patch because upstream
# has released a new version using hatch-jupyter-builder, but it is not yet
# trivial to upgrade to that.
#
postPatch = ''
substituteInPlace pyproject.toml \
--replace '"jupyterlab==3.*",' "" \
--replace 'jupyter_packaging~=' 'jupyter_packaging>='
'';
nativeBuildInputs = [ jupyter-packaging ];
propagatedBuildInputs = [ ipywidgets numpy pillow ];

View File

@ -21,7 +21,18 @@ buildPythonPackage rec {
hash = "sha256-kym7949VI6C+62p3IOQ2QIzWnuSBcrmySb83oqUwhjI=";
};
nativeBuildInputs = [ hatchling hatch-jupyter-builder ];
# We do not need the jupyterlab build dependency, because we do not need to
# build any JS components; these are present already in the PyPI artifact.
#
postPatch = ''
substituteInPlace pyproject.toml \
--replace '"jupyterlab==3.*",' ""
'';
nativeBuildInputs = [
hatchling
hatch-jupyter-builder
];
propagatedBuildInputs = [ ipywidgets jupyter-ui-poll ];

View File

@ -28,6 +28,14 @@ buildPythonPackage rec {
hash = "sha256-o5ql75VgFwvw6a/typ/wReG5wYMsSTAzd+3Mkc6p+3c=";
};
# We do not need the jupyterlab build dependency, because we do not need to
# build any JS components; these are present already in the PyPI artifact.
#
postPatch = ''
substituteInPlace pyproject.toml \
--replace '"jupyterlab>=3.0.0,==3.*",' ""
'';
nativeBuildInputs = [
hatchling
];

View File

@ -27,6 +27,15 @@ buildPythonPackage rec {
hash = "sha256-14vIih+r/PHLxhgG29YtwuosSBLpewD2CluWpH2+pLc=";
};
# Opened https://github.com/progressivis/ipytablewidgets/issues/3 to ask if
# jupyterlab can be updated upstream. (From commits, it looks like it was
# set to this version on purpose.) In the meantime, the build still works.
#
postPatch = ''
substituteInPlace pyproject.toml \
--replace 'jupyterlab>=3.0.0,<3.7' 'jupyterlab>=3.0.0'
'';
nativeBuildInputs = [
jupyter-packaging
jupyterlab

View File

@ -1,7 +1,9 @@
{ lib
, buildPythonPackage
, fetchpatch
, fetchPypi
, setuptools
, wheel
}:
buildPythonPackage rec {
@ -16,8 +18,18 @@ buildPythonPackage rec {
hash = "sha256-MeurZ6731qjeBK6HTwXYLVs6+nXF9Hf1p8/NNwxmae4=";
};
patches = [
# https://github.com/XKNX/knx-frontend/pull/96
(fetchpatch {
name = "relax-setuptools-dependency.patch";
url = "https://github.com/XKNX/knx-frontend/commit/72ac6dc42eeeb488992b0709ee58ea4a79287817.patch";
hash = "sha256-EpfgEq4pIx7ahqJZalzo30ruj8NlZYHcKHxFXCGL98w=";
})
];
nativeBuildInputs = [
setuptools
wheel
];
pythonImportsCheck = [

View File

@ -2,17 +2,18 @@
, stdenv
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, scikit-build-core
, pybind11
, cmake
, LASzip
, ninja
, pythonOlder
}:
buildPythonPackage rec {
pname = "laszip-python";
version = "0.2.3";
format = "pyproject";
disabled = pythonOlder "3.7";
@ -24,14 +25,25 @@ buildPythonPackage rec {
hash = "sha256-MiPzL9TDCf1xnCv7apwdfcpkFnBRi4PO/atTQxqL8cw=";
};
patches = [
# Removes depending on the cmake and ninja PyPI packages, since we can pass
# in the tools directly, and scikit-build-core can use them.
# https://github.com/tmontaigu/laszip-python/pull/9
(fetchpatch {
name = "remove-cmake-ninja-pypi-dependencies.patch";
url = "https://github.com/tmontaigu/laszip-python/commit/17e648d04945fa2d095d6d74d58c790a4fcde84a.patch";
hash = "sha256-k58sS1RqVzT1WPh2OVt/D4Y045ODtj6U3bUjegd44VY=";
})
];
env.NIX_CFLAGS_COMPILE = lib.optionalString stdenv.cc.isGNU "-std=c++17";
nativeBuildInputs = [
cmake
ninja
pybind11
scikit-build-core
scikit-build-core.optional-dependencies.pyproject
];
] ++ scikit-build-core.optional-dependencies.pyproject;
dontUseCmakeConfigure = true;

View File

@ -26,6 +26,12 @@ buildPythonPackage rec {
hash = "sha256-oaNZ0US0YR/PSwAZ5GfRpAW+HRYVhdCZI83fC00rgok=";
};
postPatch = ''
# Asked in https://github.com/Project-MONAI/monai-deploy-app-sdk/issues/450
# if this patch can be incorporated upstream.
substituteInPlace pyproject.toml --replace 'versioneer-518' 'versioneer'
'';
nativeBuildInputs = [ versioneer ];
propagatedBuildInputs = [

View File

@ -1,7 +1,9 @@
{ lib
, buildPythonPackage
, fetchPypi
, oldest-supported-numpy
, setuptools-scm
, wheel
, pythonOlder
, gsl
, numpy
@ -25,8 +27,10 @@ buildPythonPackage rec {
};
nativeBuildInputs = [
setuptools-scm
gsl
oldest-supported-numpy
setuptools-scm
wheel
];
buildInputs = [

View File

@ -1,8 +1,10 @@
{ lib
, buildPythonPackage
, fetchpatch
, fetchPypi
, setuptools
, setuptools-scm
, wheel
, pytestCheckHook
}:
@ -16,9 +18,13 @@ buildPythonPackage rec {
hash = "sha256-5FZxyug4Wo5iSKmwejqDKAwtDMQxJxMFjPus3F7Jlz4=";
};
nativeBuildInputs = [
setuptools
setuptools-scm
patches = [
# https://github.com/wheerd/multiset/pull/115
(fetchpatch {
name = "relax-setuptools-scm-dependency.patch";
url = "https://github.com/wheerd/multiset/commit/296187b07691c94b783f65504afc580a355abd96.patch";
hash = "sha256-vnZR1cyM/2/JfbLuVOxJuC9oMVVVploUHpbzagmo+AE=";
})
];
postPatch = ''
@ -26,6 +32,12 @@ buildPythonPackage rec {
sed -i '/python_requires/d' setup.cfg
'';
nativeBuildInputs = [
setuptools
setuptools-scm
wheel
];
pythonImportsCheck = [
"multiset"
];

View File

@ -10,14 +10,14 @@
buildPythonPackage rec {
pname = "mypy-protobuf";
version = "3.4.0";
version = "3.5.0";
format = "pyproject";
disabled = pythonOlder "3.6";
disabled = pythonOlder "3.8";
src = fetchPypi {
inherit pname version;
hash = "sha256-fXWgeWUbEFB2d2o1pUBeP6dzuKFnEY8bcS5EPppsGKI=";
hash = "sha256-IfJw2gqXkqnax2sN9GPAJ+VhZkq2lzxZvk5NBk3+Z9w=";
};
propagatedBuildInputs = [

View File

@ -1,30 +1,44 @@
{ lib, buildPythonPackage, fetchFromGitHub, setuptools, napalm, netmiko
, pytestCheckHook }:
{ lib
, buildPythonPackage
, fetchFromGitHub
, napalm
, netmiko
, pip
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "napalm-hp-procurve";
version = "0.7.0";
format = "setuptools";
src = fetchFromGitHub {
owner = "napalm-automation-community";
repo = pname;
rev = version;
sha256 = "1lspciddkd1w5lfyz35i0qwgpbn5jq9cbqkwjbsvi4kliz229vkh";
hash = "sha256-cO4kxI90krj1knzixRKWxa77OAaxjO8dLTy02VpkV9M=";
};
nativeBuildInputs = [
pip
];
# dependency installation in setup.py doesn't work
patchPhase = ''
echo -n > requirements.txt
'';
buildInputs = [ setuptools napalm ];
buildInputs = [ napalm ];
propagatedBuildInputs = [ netmiko ];
# setup.cfg seems to contain invalid pytest parameters
preCheck = ''
rm setup.cfg
'';
nativeCheckInputs = [ pytestCheckHook ];
disabledTests = [
# AssertionError: Some methods vary.
"test_method_signatures"

View File

@ -3,7 +3,9 @@
, fetchPypi
, isPyPy
, python
, oldest-supported-numpy
, setuptools
, wheel
, numpy
, zlib
, netcdf
@ -26,7 +28,12 @@ buildPythonPackage rec {
hash = "sha256-A4KwL/aiiEGfb/7IXexA9FH0G4dVVHFUxXXd2fD0rlM=";
};
nativeBuildInputs = [ setuptools cython ];
nativeBuildInputs = [
cython
oldest-supported-numpy
setuptools
wheel
];
propagatedBuildInputs = [
cftime

View File

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, packaging
, poetry-core
, pytestCheckHook
@ -22,6 +23,15 @@ buildPythonPackage rec {
hash = "sha256-27HWMzSzyAbiOW7OUhlupRWIVJG6DrpXObXmxlCsmxU=";
};
patches = [
# https://github.com/vemel/newversion/pull/9
(fetchpatch {
name = "remove-setuptools-dependency.patch";
url = "https://github.com/vemel/newversion/commit/b50562671029dd6834bc7a8ad0dd3f9e0fbdfc1d.patch";
hash = "sha256-6dXVQ9Hk0/EfSwPbW19ZV8MAFcSx+ZRO5G94kbh23GM=";
})
];
nativeBuildInputs = [
poetry-core
];

View File

@ -5,6 +5,9 @@
, pythonOlder
, pytestCheckHook
, cython
, setuptools
, setuptools-scm
, wheel
, numpy
, scipy
, matplotlib
@ -23,11 +26,32 @@ buildPythonPackage rec {
hash = "sha256-NnoVrSt6MTTcNup1e+/1v5JoHCYcycuQH4rHLzXJt+Y=";
};
buildInputs = [ cython ];
propagatedBuildInputs = [ numpy scipy matplotlib networkx nibabel ];
# Upstream wants to build against the oldest version of numpy possible, but
# we only want to build against the most recent version.
postPatch = ''
substituteInPlace pyproject.toml \
--replace "numpy==" "numpy>="
'';
nativeBuildInputs = [
cython
setuptools
setuptools-scm
wheel
];
propagatedBuildInputs = [
numpy
scipy
matplotlib
networkx
nibabel
];
nativeCheckInputs = [ pytestCheckHook ];
doCheck = !stdenv.isDarwin; # tests hang indefinitely
pythonImportsCheck = [ "nitime" ];
meta = with lib; {

View File

@ -33,7 +33,7 @@ buildPythonPackage rec {
hatch-vcs
];
propagatedBuildInput = [
propagatedBuildInputs = [
pip
];

View File

@ -1,9 +1,11 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, pytestCheckHook
, pythonOlder
, setuptools
, wheel
}:
buildPythonPackage rec {
@ -20,6 +22,15 @@ buildPythonPackage rec {
hash = "sha256-e9ZDqcS3MaMlXi2a2JHI6NtRPqIV7rjsucGXEH6V8LA=";
};
patches = [
# https://github.com/frederickjansen/polyline/pull/15
(fetchpatch {
name = "relax-build-dependencies.patch";
url = "https://github.com/frederickjansen/polyline/commit/cb9fc80606c33dbbcaa0d94de25ae952358443b6.patch";
hash = "sha256-epg2pZAG+9QuICa1ms+/EO2DDmYEz+KEtxxnvG7rsWY=";
})
];
postPatch = ''
substituteInPlace pyproject.toml \
--replace " --cov=polyline --cov-report term-missing" ""
@ -27,6 +38,7 @@ buildPythonPackage rec {
nativeBuildInputs = [
setuptools
wheel
];
nativeCheckInputs = [

View File

@ -4,8 +4,10 @@
, buildPythonPackage
, cryptography
, fetchFromGitHub
, fetchpatch
, pythonOlder
, setuptools
, wheel
}:
buildPythonPackage rec {
@ -22,8 +24,18 @@ buildPythonPackage rec {
hash = "sha256-1jIsKQa27XNVievU02jjanRWFtJDYsHolgPBab6qpM0=";
};
patches = [
# https://github.com/emontnemery/py-dormakaba-dkey/pull/45
(fetchpatch {
name = "relax-setuptools-dependency.patch";
url = "https://github.com/emontnemery/py-dormakaba-dkey/commit/cfda4be71d39f2cfd1c0d4f7fff9018050c57f1a.patch";
hash = "sha256-JGsaLQNbUfz0uK/MeGnR2XTJDs4RnTOEg7BavfDPArg=";
})
];
nativeBuildInputs = [
setuptools
wheel
];
propagatedBuildInputs = [

View File

@ -4,6 +4,7 @@
, ipykernel
, ipywidgets
, jinja2
, jupyter
, numpy
, pandas
, pytestCheckHook
@ -26,6 +27,8 @@ buildPythonPackage rec {
};
nativeBuildInputs = [
jinja2
jupyter
setuptools
wheel
];

View File

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, pythonOlder
, pytestCheckHook
, pythonRelaxDepsHook
@ -26,10 +27,14 @@ buildPythonPackage rec {
fetchSubmodules = true;
};
postPatch = ''
substituteInPlace pyproject.toml \
--replace "poetry.masonry.api" "poetry.core.masonry.api"
'';
patches = [
# https://github.com/razorx89/pydicom-seg/pull/54
(fetchpatch {
name = "replace-poetry-with-poetry-core.patch";
url = "https://github.com/razorx89/pydicom-seg/commit/ac91eaefe3b0aecfe745869972c08de5350d2b61.patch";
hash = "sha256-xBOVjWZPjyQ8gSj6JLe9B531e11TI3FUFFtL+IelZOM=";
})
];
pythonRelaxDeps = [
"jsonschema"

View File

@ -4,6 +4,7 @@
, async-generator
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, pypubsub
, pyserial
, pyserial-asyncio
@ -11,6 +12,7 @@
, pythonOlder
, setuptools
, voluptuous
, wheel
}:
buildPythonPackage rec {
@ -27,8 +29,18 @@ buildPythonPackage rec {
hash = "sha256-KKF+XYQgdmLbbicyMFyZBG4ol69xAWCF2W/r15gH2Mo=";
};
patches = [
# https://github.com/pyinsteon/pyinsteon/pull/361
(fetchpatch {
name = "relax-setuptools-dependency.patch";
url = "https://github.com/pyinsteon/pyinsteon/commit/676bc5fff11b73a4c3fd189a6ac6d3de9ca21ae0.patch";
hash = "sha256-kTu1+IwDrcdqelyK/vfhxw8MQBis5I1jag7YTytKQhs=";
})
];
nativeBuildInputs = [
setuptools
wheel
];
propagatedBuildInputs = [

View File

@ -2,11 +2,13 @@
, stdenv
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, pytest-asyncio
, pytest-timeout
, pytestCheckHook
, pythonOlder
, setuptools
, wheel
}:
buildPythonPackage rec {
@ -23,8 +25,18 @@ buildPythonPackage rec {
hash = "sha256-Vlt4+fRULb9mB0ceRmc7MJ50DnF9DAJPHA8iCbNVvcE=";
};
patches = [
# https://github.com/alengwenus/pypck/pull/109
(fetchpatch {
name = "relax-setuptools-dependency.patch";
url = "https://github.com/alengwenus/pypck/commit/17023ebe8082120b1eec086842ca809ec6e9df2b.patch";
hash = "sha256-kTu1+IwDrcdqelyK/vfhxw8MQBis5I1jag7YTytKQhs=";
})
];
nativeBuildInputs = [
setuptools
wheel
];
nativeCheckInputs = [

View File

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, runtimeShell
# build
@ -27,14 +28,20 @@ buildPythonPackage rec {
hash = "sha256-8pXOnLNjhIv0d+BqjW8wlb6BT6CmFHSsxn5wLOv3LBQ=";
};
patches = [
# https://github.com/jedie/python-creole/pull/77
(fetchpatch {
name = "replace-poetry-with-poetry-core.patch";
url = "https://github.com/jedie/python-creole/commit/bfc46730ab4a189f3142246cead8d26005a28671.patch";
hash = "sha256-WtoEQyu/154Cfj6eSnNA+t37+o7Ij328QGMKxwcLg5k=";
})
];
nativeBuildInputs = [
poetry-core
];
postPatch = ''
substituteInPlace pyproject.toml \
--replace "poetry.masonry.api" "poetry.core.masonry.api"
substituteInPlace Makefile \
--replace "/bin/bash" "${runtimeShell}"

View File

@ -4,6 +4,7 @@
, awesomeversion
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, poetry-core
, protobuf
, pytest-asyncio
@ -25,6 +26,15 @@ buildPythonPackage rec {
hash = "sha256-XTSnIL/hBL1Rsyv/tBce/WCvA3n7mZern0v3i6gTOeA=";
};
patches = [
# https://github.com/DCSBL/python-homewizard-energy/pull/235
(fetchpatch {
name = "remove-setuptools-dependency.patch";
url = "https://github.com/DCSBL/python-homewizard-energy/commit/b006b0bc1f3d0b4a7569654a1afa90dd4cffaf18.patch";
hash = "sha256-WQeepxiYnBfFcQAmrc3pavBz5j1Qo0HmUcOxsK/pr50=";
})
];
nativeBuildInputs = [
poetry-core
];
@ -34,6 +44,8 @@ buildPythonPackage rec {
aiohttp
];
__darwinAllowLocalNetworking = true;
nativeCheckInputs = [
aresponses
pytest-asyncio

View File

@ -4,6 +4,7 @@
, buildPythonPackage
, docstring-to-markdown
, fetchFromGitHub
, fetchpatch
, flake8
, flaky
, jedi
@ -28,6 +29,7 @@
, ujson
, websockets
, whatthepatch
, wheel
, yapf
}:
@ -45,7 +47,14 @@ buildPythonPackage rec {
hash = "sha256-plciPUROFileVULGBZpwUTkW2NZVHy4Nuf4+fSjd8nM=";
};
SETUPTOOLS_SCM_PRETEND_VERSION = version;
patches = [
# https://github.com/python-lsp/python-lsp-server/pull/416
(fetchpatch {
name = "bump-jedi-upper-pin-to-0.20.patch";
url = "https://github.com/python-lsp/python-lsp-server/commit/f33a93afc8c3a0f16751f9e1f6601a37967fd7df.patch";
hash = "sha256-lBpzXxjlQp2ig0z2DRJw+jQZ5eRLIOJYjGrzfgvknDA=";
})
];
postPatch = ''
substituteInPlace pyproject.toml \
@ -53,6 +62,8 @@ buildPythonPackage rec {
--replace "--cov pylsp --cov test" ""
'';
env.SETUPTOOLS_SCM_PRETEND_VERSION = version;
pythonRelaxDeps = [
"autopep8"
"flake8"
@ -65,6 +76,7 @@ buildPythonPackage rec {
nativeBuildInputs = [
pythonRelaxDepsHook
setuptools-scm
wheel
];
propagatedBuildInputs = [

View File

@ -4,10 +4,13 @@
, buildPythonPackage
, cryptography
, fetchFromGitHub
, fetchpatch
, pytest-asyncio
, pytestCheckHook
, pythonOlder
, setuptools
, voluptuous
, wheel
}:
buildPythonPackage rec {
@ -24,8 +27,18 @@ buildPythonPackage rec {
hash = "sha256-bPN2h60ypjlKpXs1xDS7bZcGRXvatA3EdlAX/HLxxTM=";
};
patches = [
# https://github.com/home-assistant-libs/python-otbr-api/pull/68
(fetchpatch {
name = "relax-setuptools-dependency.patch";
url = "https://github.com/home-assistant-libs/python-otbr-api/commit/37eb19c12d17ac7d040ded035d8401def872fbda.patch";
hash = "sha256-JGsaLQNbUfz0uK/MeGnR2XTJDs4RnTOEg7BavfDPArg=";
})
];
nativeBuildInputs = [
setuptools
wheel
];
propagatedBuildInputs = [
@ -36,6 +49,7 @@ buildPythonPackage rec {
];
nativeCheckInputs = [
pytest-asyncio
pytestCheckHook
];

View File

@ -1,21 +1,29 @@
{ lib
, buildPythonPackage
, fetchPypi
, fetchFromGitHub
, setuptools
, setuptools-scm
, wheel
}:
buildPythonPackage rec {
version = "1.0.0";
version = "1.1.0";
pname = "python-vagrant";
format = "pyproject";
src = fetchPypi {
inherit pname version;
hash = "sha256-qP6TzPL/N+zJXsL0nqdKkabOc6TbShapjdJtOXz9CeU=";
src = fetchFromGitHub {
owner = "pycontribs";
repo = "python-vagrant";
rev = "refs/tags/v${version}";
hash = "sha256-apvYzH0IY6ZyUP/FiOVbGN3dXejgN7gn7Mq2tlEaTww=";
};
env.SETUPTOOLS_SCM_PRETEND_VERSION = version;
nativeBuildInputs = [
setuptools
setuptools-scm
wheel
];
# The tests try to connect to qemu

View File

@ -7,6 +7,9 @@
, stringparser
, typing-extensions
, pytestCheckHook
, setuptools
, setuptools-scm
, wheel
}:
buildPythonPackage rec {
@ -19,9 +22,15 @@ buildPythonPackage rec {
src = fetchPypi {
pname = "PyVISA-sim";
inherit version;
sha256 = "sha256-vWxW941/1e58pqL/Rzq+eoZJpwsvLphgIe48SuJtohY=";
hash = "sha256-vWxW941/1e58pqL/Rzq+eoZJpwsvLphgIe48SuJtohY=";
};
nativeBuildInputs = [
setuptools
setuptools-scm
wheel
];
propagatedBuildInputs = [
pyvisa
pyyaml

View File

@ -1,20 +1,31 @@
{ lib
, buildPythonPackage
, fetchPypi
, pytestCheckHook
, zope_interface
, zope_testrunner
, sphinx
}:
buildPythonPackage rec {
pname = "repoze.sphinx.autointerface";
version = "1.0.0";
format = "setuptools";
src = fetchPypi {
inherit pname version;
hash = "sha256-SGvxQjpGlrkVPkiM750ybElv/Bbd6xSwyYh7RsYOKKE=";
};
propagatedBuildInputs = [ zope_interface sphinx ];
propagatedBuildInputs = [
zope_interface
sphinx
];
nativeCheckInputs = [
pytestCheckHook
zope_testrunner
];
meta = with lib; {
homepage = "https://github.com/repoze/repoze.sphinx.autointerface";

View File

@ -1,5 +1,6 @@
{ buildPythonPackage
, fetchFromGitHub
, fetchpatch
, lib
, cerberus
, pyyaml
@ -9,6 +10,7 @@
buildPythonPackage rec {
pname = "riscv-config";
version = "3.5.2";
format = "setuptools";
src = fetchFromGitHub {
owner = "riscv-software-src";
@ -17,6 +19,15 @@ buildPythonPackage rec {
hash = "sha256-K7W6yyqy/2c4WHyOojuvw2P/v7bND5K6WFfTujkofBw=";
};
patches = [
# Remove when updating to v3.8.0+
(fetchpatch {
name = "remove-dangling-pip-import.patch";
url = "https://github.com/riscv-software-src/riscv-config/commit/f75e7e13fe600b71254b0391be015ec533d3c3ef.patch";
hash = "sha256-oVRynBIJevq3UzlMDRh2rVuBJZoEwEYhDma3Bb/QV2E=";
})
];
propagatedBuildInputs = [ cerberus pyyaml ruamel-yaml ];
meta = with lib; {

View File

@ -2,6 +2,8 @@
, buildPythonPackage
, fetchPypi
, setuptools
, setuptools-scm
, wheel
, docutils
, importlib-metadata
, jinja2
@ -19,18 +21,19 @@
buildPythonPackage rec {
pname = "rst2pdf";
version = "0.101";
format = "pyproject";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-AF8FssEIFHmeY2oVrAPNe85pbmgKWO52yD6ycNNzTSg=";
hash = "sha256-AF8FssEIFHmeY2oVrAPNe85pbmgKWO52yD6ycNNzTSg=";
};
outputs = [ "out" "man" ];
nativeBuildInputs = [
setuptools
setuptools-scm
wheel
];
propagatedBuildInputs = [

View File

@ -8,6 +8,8 @@
, pkg-config
, numpy
, openblas
, setuptools
, wheel
}:
buildPythonPackage rec {
@ -23,6 +25,9 @@ buildPythonPackage rec {
postPatch = ''
patchShebangs .
substituteInPlace pyproject.toml \
--replace 'numpy==' 'numpy>='
'';
nativeBuildInputs = [
@ -30,7 +35,10 @@ buildPythonPackage rec {
gfortran
git
meson-python
numpy
pkg-config
setuptools
wheel
];
buildInputs = [

View File

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, poetry-core
, django
, sly
@ -20,6 +21,15 @@ buildPythonPackage rec {
hash = "sha256-QEPTYpWlRPWO6Evyt4zoqUST4ousF67GmiOpD7WUqcI=";
};
patches = [
# https://github.com/15five/scim2-filter-parser/pull/43
(fetchpatch {
name = "replace-poetry-with-poetry-core.patch";
url = "https://github.com/15five/scim2-filter-parser/commit/675d85f3a3ff338e96a408827d64d9e893fa5255.patch";
hash = "sha256-PjJH1S5CDe/BMI0+mB34KdpNNcHfexBFYBmHolsWH4o=";
})
];
nativeBuildInputs = [
poetry-core
];

View File

@ -4,10 +4,12 @@
, git
, mock
, pep440
, pip
, pytestCheckHook
, pythonOlder
, setuptools-scm
, six
, wheel
}:
buildPythonPackage rec {
@ -30,12 +32,14 @@ buildPythonPackage rec {
nativeBuildInputs = [
setuptools-scm
wheel
];
nativeCheckInputs = [
git
mock
pep440
pip
pytestCheckHook
six
];

View File

@ -5,7 +5,9 @@
, importlib-metadata
, sphinx
, pyenchant
, pbr
, setuptools
, setuptools-scm
, wheel
}:
buildPythonPackage rec {
@ -21,7 +23,9 @@ buildPythonPackage rec {
};
nativeBuildInputs = [
pbr
setuptools
setuptools-scm
wheel
];
propagatedBuildInputs = [

View File

@ -3,6 +3,8 @@
, fetchPypi
, nose
, setuptools
, setuptools-scm
, wheel
, arrow
, requests
, units
@ -22,6 +24,12 @@ buildPythonPackage rec {
hash = "sha256-P00oxUz0oVQB969c/N2wpKLe09wtvQWPH4DH4EZUaxc=";
};
nativeBuildInputs = [
setuptools
setuptools-scm
wheel
];
nativeCheckInputs = [
nose
];
@ -33,7 +41,6 @@ buildPythonPackage rec {
pint
pydantic
pytz
setuptools
six
];

View File

@ -21,6 +21,11 @@ buildPythonPackage rec {
hash = "sha256-wgrY5ajaMYxznyNvlD0ul1PFr3W8oV9I/OVsStlZEBM=";
};
postPatch = ''
sed -i '/\[project\]/a version = "${version}"' pyproject.toml
sed -i '/\[project\]/a name = "tweedledum"' pyproject.toml
'';
nativeBuildInputs = [ cmake ninja scikit-build ];
dontUseCmakeConfigure = true;

View File

@ -7,14 +7,15 @@
buildPythonPackage rec {
pname = "u-msgpack-python";
version = "2.7.2";
version = "2.8.0";
format = "setuptools";
src = fetchPypi {
inherit pname version;
hash = "sha256-6G96xqoO9MbEnwBLT9Q1vOmcI+LdXXMAPz+YFgJMK9g=";
hash = "sha256-uAGoPW7XXm30HkRRi08qnCIdwtpLzVOA46D+2lILxho=";
};
LC_ALL="en_US.UTF-8";
env.LC_ALL="en_US.UTF-8";
buildInputs = [ glibcLocales ];
@ -23,7 +24,7 @@ buildPythonPackage rec {
meta = {
description = "A portable, lightweight MessagePack serializer and deserializer written in pure Python";
homepage = "https://github.com/vsergeev/u-msgpack-python";
changelog = "https://github.com/vsergeev/u-msgpack-python/blob/v${version}/CHANGELOG.md";
license = lib.licenses.mit;
};
}

View File

@ -2,6 +2,8 @@
, buildPythonPackage
, fetchPypi
, setuptools
, setuptools-scm
, wheel
}:
buildPythonPackage rec {
@ -16,6 +18,8 @@ buildPythonPackage rec {
nativeBuildInputs = [
setuptools
setuptools-scm
wheel
];
meta = with lib; {

View File

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchpatch
, lxml
, poetry-core
, pythonOlder
@ -10,16 +11,25 @@
buildPythonPackage rec {
pname = "xpath-expressions";
version = "1.1.0";
disabled = pythonOlder "3.5";
format = "pyproject";
disabled = pythonOlder "3.5";
src = fetchFromGitHub {
owner = "orf";
repo = pname;
rev = "v${version}";
sha256 = "0l289iw2zmzxyfi3g2z7b917vmsaz47h5jp871zvykpmpigc632h";
hash = "sha256-UAzDXrz1Tr9/OOjKAg/5Std9Qlrnizei8/3XL3hMSFA=";
};
patches = [
# https://github.com/orf/xpath-expressions/pull/4
(fetchpatch {
name = "replace-poetry-with-poetry-core.patch";
url = "https://github.com/orf/xpath-expressions/commit/3c5900fd6b2d08dd9468707f35ab42072cf75bd3.patch";
hash = "sha256-IeV6ncJyt/w2s5TPpbM5a3pljNT6Bp5PIiqgTg2iTRA=";
})
];
nativeBuildInputs = [
poetry-core
];
@ -29,12 +39,6 @@ buildPythonPackage rec {
pytestCheckHook
];
postPatch = ''
# Was fixed upstream but not released
substituteInPlace pyproject.toml \
--replace "poetry.masonry.api" "poetry.core.masonry.api"
'';
pythonImportsCheck = [ "xpath" ];
meta = with lib; {

View File

@ -22,14 +22,14 @@ with py.pkgs;
buildPythonApplication rec {
pname = "checkov";
version = "2.4.6";
version = "2.4.7";
format = "setuptools";
src = fetchFromGitHub {
owner = "bridgecrewio";
repo = pname;
rev = "refs/tags/${version}";
hash = "sha256-1o8l/c6DeT35GjEV/7+9+LLJwoCpWuq0LBkyr08mWaE=";
hash = "sha256-NveRGWf0Aghu0fxVAVSukrH9zFl+QJ2rWNYm5JIHIjk=";
};
patches = [

View File

@ -4,13 +4,13 @@
}:
buildGoModule rec {
pname = "litestream";
version = "0.3.9";
version = "0.3.11";
src = fetchFromGitHub {
owner = "benbjohnson";
repo = pname;
rev = "v${version}";
sha256 = "sha256-zs+Li8ylw+zexxuEkXX4qk7qslk23BLBcoHXRIuQNmU=";
sha256 = "sha256-03gGGx8RZEK2RrToN30gkIlHss/e3UcSi3AmMh9twDU=";
};
ldflags = [
@ -19,7 +19,7 @@ buildGoModule rec {
"-X main.Version=${version}"
];
vendorSha256 = "sha256-GiCvifdbWz+hH6aHACzlBpppNC5p24MHRWlbtKLIFhE=";
vendorHash = "sha256-sYIY3Z3VrCqbjEbQtEY7q6Jljg8jMoa2qWEB/IkDjzM=";
meta = with lib; {
description = "Streaming replication for SQLite";

View File

@ -2,7 +2,7 @@
buildGoModule rec {
pname = "doctl";
version = "1.97.1";
version = "1.98.0";
vendorHash = null;
@ -31,7 +31,7 @@ buildGoModule rec {
owner = "digitalocean";
repo = "doctl";
rev = "v${version}";
sha256 = "sha256-qEoSq4sLobsYYdwR8vp5WpugeQdLbXDtBVBTAztxPkY=";
sha256 = "sha256-M9kSQoYcJudL/y/Yc6enVT/rJusd+oe3BdjkaLRQ0gU=";
};
meta = with lib; {

File diff suppressed because it is too large Load Diff

View File

@ -6,24 +6,16 @@
rustPlatform.buildRustPackage rec {
pname = "postgres-lsp";
version = "unstable-2023-08-08";
version = "unstable-2023-08-23";
src = fetchFromGitHub {
owner = "supabase";
repo = "postgres_lsp";
rev = "1250f5ed14a0e86b2b7fa581214284c67b960621";
hash = "sha256-Y43sTgKNcAI3h6McDc0g6o9CX6jOKBfURLWyjJhvmwk=";
rev = "47dd0132b12661ab6c97f5fba892e567a5109c84";
hash = "sha256-aV3QAp6DkNrHiDe1Ytiu6UyTWrelV6vO83Baiv4ONLg=";
};
cargoLock = {
lockFile = ./Cargo.lock;
};
# Cargo.lock is ignored
# https://github.com/supabase/postgres_lsp/pull/28
postPatch = ''
ln -s ${./Cargo.lock} Cargo.lock
'';
cargoHash = "sha256-9d/KiQ7IXhmYvTb97FKJh/cGTdnxAgCXSx4+V74b+RE=";
nativeBuildInputs = [
protobuf

View File

@ -2,12 +2,12 @@
stdenv.mkDerivation rec {
pname = "opengrok";
version = "1.12.12";
version = "1.12.13";
# binary distribution
src = fetchurl {
url = "https://github.com/oracle/opengrok/releases/download/${version}/${pname}-${version}.tar.gz";
hash = "sha256-0ppkexmVchHL+lCfB+xtLcDCZ24Sv0Opr5RNLILKg2M=";
hash = "sha256-DfSLQj6cbgDT56MwwnlC6hK/y3Hce2Ueprw0o3NURW0=";
};
nativeBuildInputs = [ makeWrapper ];

View File

@ -6,13 +6,13 @@
buildGoModule rec {
pname = "oh-my-posh";
version = "18.3.3";
version = "18.3.5";
src = fetchFromGitHub {
owner = "jandedobbeleer";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-AJw+NNTbksYSW2VqUzxLwxwd3OjM9uK/ou2CVS2zNvw=";
hash = "sha256-5wcKG97NXTTTBJOD9kOsOp1MuHazAPlE4yLPWJoeCA8=";
};
vendorHash = "sha256-xkguBWk2Nh8w7C7tKbvaP0tRgZO4z08AEsdjNlJYC6Q=";

View File

@ -13,14 +13,14 @@
rustPlatform.buildRustPackage rec {
pname = "rust-analyzer-unwrapped";
version = "2023-08-14";
cargoSha256 = "sha256-sau5lno9jqC4NVDY62aNlyRMW/T/xEHUtzyL5wIE6yQ=";
version = "2023-08-21";
cargoSha256 = "sha256-aQFBNUXkoEsm5qKsMasqTIKoC0V7UUgmlukgOr5Vqpc=";
src = fetchFromGitHub {
owner = "rust-lang";
repo = "rust-analyzer";
rev = version;
sha256 = "sha256-KxbpMaIH7GkLecWCQsoDtpql1N869RIIfZcLDRcuB5k=";
sha256 = "sha256-ribQkxEbMMb8vcBMKvcrPHFftMmlaF3HIAbJty9fDeY=";
};
cargoBuildFlags = [ "--bin" "rust-analyzer" "--bin" "rust-analyzer-proc-macro-srv" ];

View File

@ -14,13 +14,13 @@
stdenv.mkDerivation rec {
pname = "r2modman";
version = "3.1.42";
version = "3.1.43";
src = fetchFromGitHub {
owner = "ebkr";
repo = "r2modmanPlus";
rev = "v${version}";
hash = "sha256-16sE706iivYoI40JJUkqVmtxkYsgAFBg+0tXOc6scqc=";
hash = "sha256-qZeBF58VB/wW0N2MZgZfiIJdDqHUdfruAoCuDEFeCPA=";
};
offlineCache = fetchYarnDeps {

View File

@ -26,5 +26,6 @@ stdenv.mkDerivation rec {
license = licenses.gpl2;
maintainers = [ ];
platforms = platforms.linux;
mainProgram = "vlock";
};
}

Some files were not shown because too many files have changed in this diff Show More