Merge remote-tracking branch 'upstream/staging-next' into staging

Conflicts:
	pkgs/development/python-modules/pynetdicom/default.nix
This commit is contained in:
annalee 2024-04-05 00:09:07 +00:00
commit 6df4f7b89b
No known key found for this signature in database
76 changed files with 1935 additions and 1215 deletions

View File

@ -19,7 +19,7 @@ For new packages please briefly describe the package or provide a link to its ho
- [ ] `sandbox = true` - [ ] `sandbox = true`
- [ ] Tested, as applicable: - [ ] Tested, as applicable:
- [NixOS test(s)](https://nixos.org/manual/nixos/unstable/index.html#sec-nixos-tests) (look inside [nixos/tests](https://github.com/NixOS/nixpkgs/blob/master/nixos/tests)) - [NixOS test(s)](https://nixos.org/manual/nixos/unstable/index.html#sec-nixos-tests) (look inside [nixos/tests](https://github.com/NixOS/nixpkgs/blob/master/nixos/tests))
- and/or [package tests](https://nixos.org/manual/nixpkgs/unstable/#sec-package-tests) - and/or [package tests](https://github.com/NixOS/nixpkgs/blob/master/pkgs/README.md#package-tests)
- or, for functions and "core" functionality, tests in [lib/tests](https://github.com/NixOS/nixpkgs/blob/master/lib/tests) or [pkgs/test](https://github.com/NixOS/nixpkgs/blob/master/pkgs/test) - or, for functions and "core" functionality, tests in [lib/tests](https://github.com/NixOS/nixpkgs/blob/master/lib/tests) or [pkgs/test](https://github.com/NixOS/nixpkgs/blob/master/pkgs/test)
- made sure NixOS tests are [linked](https://nixos.org/manual/nixpkgs/unstable/#ssec-nixos-tests-linking) to the relevant packages - made sure NixOS tests are [linked](https://nixos.org/manual/nixpkgs/unstable/#ssec-nixos-tests-linking) to the relevant packages
- [ ] Tested compilation of all packages that depend on this change using `nix-shell -p nixpkgs-review --run "nixpkgs-review rev HEAD"`. Note: all changes have to be committed, also see [nixpkgs-review usage](https://github.com/Mic92/nixpkgs-review#usage) - [ ] Tested compilation of all packages that depend on this change using `nix-shell -p nixpkgs-review --run "nixpkgs-review rev HEAD"`. Note: all changes have to be committed, also see [nixpkgs-review usage](https://github.com/Mic92/nixpkgs-review#usage)

View File

@ -69,7 +69,7 @@ let
hasAttr head isAttrs isBool isInt isList isPath isString length hasAttr head isAttrs isBool isInt isList isPath isString length
lessThan listToAttrs pathExists readFile replaceStrings seq lessThan listToAttrs pathExists readFile replaceStrings seq
stringLength sub substring tail trace; stringLength sub substring tail trace;
inherit (self.trivial) id const pipe concat or and bitAnd bitOr bitXor inherit (self.trivial) id const pipe concat or and xor bitAnd bitOr bitXor
bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max
importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
info showWarnings nixpkgsVersion version isInOldestRelease info showWarnings nixpkgsVersion version isInOldestRelease

View File

@ -106,6 +106,7 @@ let
types types
updateManyAttrsByPath updateManyAttrsByPath
versions versions
xor
; ;
testingThrow = expr: { testingThrow = expr: {
@ -214,6 +215,21 @@ runTests {
expected = false; expected = false;
}; };
testXor = {
expr = [
(xor true false)
(xor true true)
(xor false false)
(xor false true)
];
expected = [
true
false
false
true
];
};
testFix = { testFix = {
expr = fix (x: {a = if x ? a then "a" else "b";}); expr = fix (x: {a = if x ? a then "a" else "b";});
expected = {a = "a";}; expected = {a = "a";};

View File

@ -199,6 +199,24 @@ in {
*/ */
and = x: y: x && y; and = x: y: x && y;
/**
boolean exclusive or
# Inputs
`x`
: 1\. Function argument
`y`
: 2\. Function argument
*/
# We explicitly invert the arguments purely as a type assertion.
# This is invariant under XOR, so it does not affect the result.
xor = x: y: (!x) != (!y);
/** /**
bitwise not bitwise not
*/ */

View File

@ -17200,6 +17200,12 @@
githubId = 2660; githubId = 2660;
name = "Russell Sim"; name = "Russell Sim";
}; };
rutherther = {
name = "Rutherther";
email = "rutherther@proton.me";
github = "rutherther";
githubId = 12197024;
};
ruuda = { ruuda = {
email = "dev+nix@veniogames.com"; email = "dev+nix@veniogames.com";
github = "ruuda"; github = "ruuda";

View File

@ -111,6 +111,8 @@ Use `services.pipewire.extraConfig` or `services.pipewire.configPackages` for Pi
- [Anki Sync Server](https://docs.ankiweb.net/sync-server.html), the official sync server built into recent versions of Anki. Available as [services.anki-sync-server](#opt-services.anki-sync-server.enable). - [Anki Sync Server](https://docs.ankiweb.net/sync-server.html), the official sync server built into recent versions of Anki. Available as [services.anki-sync-server](#opt-services.anki-sync-server.enable).
The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been marked deprecated and will be dropped after 24.05 due to lack of maintenance of the anki-sync-server softwares. The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been marked deprecated and will be dropped after 24.05 due to lack of maintenance of the anki-sync-server softwares.
- [mautrix-meta](https://github.com/mautrix/meta), a Matrix <-> Facebook and Matrix <-> Instagram hybrid puppeting/relaybot bridge. Available as services.mautrix-meta
- [transfer-sh](https://github.com/dutchcoders/transfer.sh), a tool that supports easy and fast file sharing from the command-line. Available as [services.transfer-sh](#opt-services.transfer-sh.enable). - [transfer-sh](https://github.com/dutchcoders/transfer.sh), a tool that supports easy and fast file sharing from the command-line. Available as [services.transfer-sh](#opt-services.transfer-sh.enable).
- [MollySocket](https://github.com/mollyim/mollysocket) which allows getting Signal notifications via UnifiedPush. - [MollySocket](https://github.com/mollyim/mollysocket) which allows getting Signal notifications via UnifiedPush.
@ -139,6 +141,8 @@ The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been m
- [TuxClocker](https://github.com/Lurkki14/tuxclocker), a hardware control and monitoring program. Available as [programs.tuxclocker](#opt-programs.tuxclocker.enable). - [TuxClocker](https://github.com/Lurkki14/tuxclocker), a hardware control and monitoring program. Available as [programs.tuxclocker](#opt-programs.tuxclocker.enable).
- binfmt option for AppImage-run to support running [AppImage](https://appimage.org/)'s seamlessly on NixOS.. Available as [programs.appimage.binfmt](#opt-programs.appimage.binfmt).
- [ALVR](https://github.com/alvr-org/alvr), a VR desktop streamer. Available as [programs.alvr](#opt-programs.alvr.enable) - [ALVR](https://github.com/alvr-org/alvr), a VR desktop streamer. Available as [programs.alvr](#opt-programs.alvr.enable)
- [RustDesk](https://rustdesk.com), a full-featured open source remote control alternative for self-hosting and security with minimal configuration. Alternative to TeamViewer. - [RustDesk](https://rustdesk.com), a full-featured open source remote control alternative for self-hosting and security with minimal configuration. Alternative to TeamViewer.

View File

@ -871,7 +871,6 @@ in {
} }
{ {
assertion = let assertion = let
xor = a: b: a && !b || b && !a;
isEffectivelySystemUser = user.isSystemUser || (user.uid != null && user.uid < 1000); isEffectivelySystemUser = user.isSystemUser || (user.uid != null && user.uid < 1000);
in xor isEffectivelySystemUser user.isNormalUser; in xor isEffectivelySystemUser user.isNormalUser;
message = '' message = ''

View File

@ -143,6 +143,7 @@
./programs/adb.nix ./programs/adb.nix
./programs/alvr.nix ./programs/alvr.nix
./programs/appgate-sdp.nix ./programs/appgate-sdp.nix
./programs/appimage.nix
./programs/atop.nix ./programs/atop.nix
./programs/ausweisapp.nix ./programs/ausweisapp.nix
./programs/autojump.nix ./programs/autojump.nix
@ -651,6 +652,7 @@
./services/matrix/hebbot.nix ./services/matrix/hebbot.nix
./services/matrix/maubot.nix ./services/matrix/maubot.nix
./services/matrix/mautrix-facebook.nix ./services/matrix/mautrix-facebook.nix
./services/matrix/mautrix-meta.nix
./services/matrix/mautrix-telegram.nix ./services/matrix/mautrix-telegram.nix
./services/matrix/mautrix-whatsapp.nix ./services/matrix/mautrix-whatsapp.nix
./services/matrix/mjolnir.nix ./services/matrix/mjolnir.nix
@ -1155,6 +1157,7 @@
./services/networking/tayga.nix ./services/networking/tayga.nix
./services/networking/tcpcrypt.nix ./services/networking/tcpcrypt.nix
./services/networking/teamspeak3.nix ./services/networking/teamspeak3.nix
./services/networking/technitium-dns-server.nix
./services/networking/teleport.nix ./services/networking/teleport.nix
./services/networking/tetrd.nix ./services/networking/tetrd.nix
./services/networking/tftpd.nix ./services/networking/tftpd.nix

View File

@ -0,0 +1,33 @@
{ lib, config, pkgs, ... }:
let
cfg = config.programs.appimage;
in
{
options.programs.appimage = {
enable = lib.mkEnableOption "appimage-run wrapper script for executing appimages on NixOS";
binfmt = lib.mkEnableOption "binfmt registration to run appimages via appimage-run seamlessly";
package = lib.mkPackageOption pkgs "appimage-run" {
example = ''
pkgs.appimage-run.override {
extraPkgs = pkgs: [ pkgs.ffmpeg pkgs.imagemagick ];
}
'';
};
};
config = lib.mkIf cfg.enable {
boot.binfmt.registrations.appimage = lib.mkIf cfg.binfmt {
wrapInterpreterInShell = false;
interpreter = lib.getExe cfg.package;
recognitionType = "magic";
offset = 0;
mask = ''\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff'';
magicOrExtension = ''\x7fELF....AI\x02'';
};
environment.systemPackages = [ cfg.package ];
};
meta.maintainers = with lib.maintainers; [ jopejoe1 atemu ];
}

View File

@ -0,0 +1,562 @@
{ config, pkgs, lib, ... }:
let
settingsFormat = pkgs.formats.yaml {};
upperConfig = config;
cfg = config.services.mautrix-meta;
upperCfg = cfg;
fullDataDir = cfg: "/var/lib/${cfg.dataDir}";
settingsFile = cfg: "${fullDataDir cfg}/config.yaml";
settingsFileUnsubstituted = cfg: settingsFormat.generate "mautrix-meta-config.yaml" cfg.settings;
metaName = name: "mautrix-meta-${name}";
enabledInstances = lib.filterAttrs (name: config: config.enable) config.services.mautrix-meta.instances;
registerToSynapseInstances = lib.filterAttrs (name: config: config.enable && config.registerToSynapse) config.services.mautrix-meta.instances;
in {
options = {
services.mautrix-meta = {
package = lib.mkPackageOption pkgs "mautrix-meta" { };
instances = lib.mkOption {
type = lib.types.attrsOf (lib.types.submodule ({ config, name, ... }: {
options = {
enable = lib.mkEnableOption "Mautrix-Meta, a Matrix <-> Facebook and Matrix <-> Instagram hybrid puppeting/relaybot bridge";
dataDir = lib.mkOption {
type = lib.types.str;
default = metaName name;
description = ''
Path to the directory with database, registration, and other data for the bridge service.
This path is relative to `/var/lib`, it cannot start with `../` (it cannot be outside of `/var/lib`).
'';
};
registrationFile = lib.mkOption {
type = lib.types.path;
readOnly = true;
description = ''
Path to the yaml registration file of the appservice.
'';
};
registerToSynapse = lib.mkOption {
type = lib.types.bool;
default = true;
description = ''
Whether to add registration file to `services.matrix-synapse.settings.app_service_config_files` and
make Synapse wait for registration service.
'';
};
settings = lib.mkOption rec {
apply = lib.recursiveUpdate default;
inherit (settingsFormat) type;
default = {
homeserver = {
software = "standard";
domain = "";
address = "";
};
appservice = {
id = "";
database = {
type = "sqlite3-fk-wal";
uri = "file:${fullDataDir config}/mautrix-meta.db?_txlock=immediate";
};
bot = {
username = "";
};
hostname = "localhost";
port = 29319;
address = "http://${config.settings.appservice.hostname}:${toString config.settings.appservice.port}";
};
meta = {
mode = "";
};
bridge = {
# Enable encryption by default to make the bridge more secure
encryption = {
allow = true;
default = true;
require = true;
# Recommended options from mautrix documentation
# for additional security.
delete_keys = {
dont_store_outbound = true;
ratchet_on_decrypt = true;
delete_fully_used_on_decrypt = true;
delete_prev_on_new_session = true;
delete_on_device_delete = true;
periodically_delete_expired = true;
delete_outdated_inbound = true;
};
verification_levels = {
receive = "cross-signed-tofu";
send = "cross-signed-tofu";
share = "cross-signed-tofu";
};
};
permissions = {};
};
logging = {
min_level = "info";
writers = lib.singleton {
type = "stdout";
format = "pretty-colored";
time_format = " ";
};
};
};
defaultText = ''
{
homeserver = {
software = "standard";
address = "https://''${config.settings.homeserver.domain}";
};
appservice = {
database = {
type = "sqlite3-fk-wal";
uri = "file:''${fullDataDir config}/mautrix-meta.db?_txlock=immediate";
};
hostname = "localhost";
port = 29319;
address = "http://''${config.settings.appservice.hostname}:''${toString config.settings.appservice.port}";
};
bridge = {
# Require encryption by default to make the bridge more secure
encryption = {
allow = true;
default = true;
require = true;
# Recommended options from mautrix documentation
# for optimal security.
delete_keys = {
dont_store_outbound = true;
ratchet_on_decrypt = true;
delete_fully_used_on_decrypt = true;
delete_prev_on_new_session = true;
delete_on_device_delete = true;
periodically_delete_expired = true;
delete_outdated_inbound = true;
};
verification_levels = {
receive = "cross-signed-tofu";
send = "cross-signed-tofu";
share = "cross-signed-tofu";
};
};
};
logging = {
min_level = "info";
writers = lib.singleton {
type = "stdout";
format = "pretty-colored";
time_format = " ";
};
};
};
'';
description = ''
{file}`config.yaml` configuration as a Nix attribute set.
Configuration options should match those described in
[example-config.yaml](https://github.com/mautrix/meta/blob/main/example-config.yaml).
Secret tokens should be specified using {option}`environmentFile`
instead
'';
};
environmentFile = lib.mkOption {
type = lib.types.nullOr lib.types.path;
default = null;
description = ''
File containing environment variables to substitute when copying the configuration
out of Nix store to the `services.mautrix-meta.dataDir`.
Can be used for storing the secrets without making them available in the Nix store.
For example, you can set `services.mautrix-meta.settings.appservice.as_token = "$MAUTRIX_META_APPSERVICE_AS_TOKEN"`
and then specify `MAUTRIX_META_APPSERVICE_AS_TOKEN="{token}"` in the environment file.
This value will get substituted into the configuration file as as token.
'';
};
serviceDependencies = lib.mkOption {
type = lib.types.listOf lib.types.str;
default =
[ config.registrationServiceUnit ] ++
(lib.lists.optional upperConfig.services.matrix-synapse.enable upperConfig.services.matrix-synapse.serviceUnit) ++
(lib.lists.optional upperConfig.services.matrix-conduit.enable "matrix-conduit.service") ++
(lib.lists.optional upperConfig.services.dendrite.enable "dendrite.service");
defaultText = ''
[ config.registrationServiceUnit ] ++
(lib.lists.optional upperConfig.services.matrix-synapse.enable upperConfig.services.matrix-synapse.serviceUnit) ++
(lib.lists.optional upperConfig.services.matrix-conduit.enable "matrix-conduit.service") ++
(lib.lists.optional upperConfig.services.dendrite.enable "dendrite.service");
'';
description = ''
List of Systemd services to require and wait for when starting the application service.
'';
};
serviceUnit = lib.mkOption {
type = lib.types.str;
readOnly = true;
description = ''
The systemd unit (a service or a target) for other services to depend on if they
need to be started after matrix-synapse.
This option is useful as the actual parent unit for all matrix-synapse processes
changes when configuring workers.
'';
};
registrationServiceUnit = lib.mkOption {
type = lib.types.str;
readOnly = true;
description = ''
The registration service that generates the registration file.
Systemd unit (a service or a target) for other services to depend on if they
need to be started after mautrix-meta registration service.
This option is useful as the actual parent unit for all matrix-synapse processes
changes when configuring workers.
'';
};
};
config = {
serviceUnit = (metaName name) + ".service";
registrationServiceUnit = (metaName name) + "-registration.service";
registrationFile = (fullDataDir config) + "/meta-registration.yaml";
};
}));
description = ''
Configuration of multiple `mautrix-meta` instances.
`services.mautrix-meta.instances.facebook` and `services.mautrix-meta.instances.instagram`
come preconfigured with meta.mode, appservice.id, bot username, display name and avatar.
'';
example = ''
{
facebook = {
enable = true;
settings = {
homeserver.domain = "example.com";
};
};
instagram = {
enable = true;
settings = {
homeserver.domain = "example.com";
};
};
messenger = {
enable = true;
settings = {
meta.mode = "messenger";
homeserver.domain = "example.com";
appservice = {
id = "messenger";
bot = {
username = "messengerbot";
displayname = "Messenger bridge bot";
avatar = "mxc://maunium.net/ygtkteZsXnGJLJHRchUwYWak";
};
};
};
};
}
'';
};
};
};
config = lib.mkMerge [
(lib.mkIf (enabledInstances != []) {
assertions = lib.mkMerge (lib.attrValues (lib.mapAttrs (name: cfg: [
{
assertion = cfg.settings.homeserver.domain != "" && cfg.settings.homeserver.address != "";
message = ''
The options with information about the homeserver:
`services.mautrix-meta.instances.${name}.settings.homeserver.domain` and
`services.mautrix-meta.instances.${name}.settings.homeserver.address` have to be set.
'';
}
{
assertion = builtins.elem cfg.settings.meta.mode [ "facebook" "facebook-tor" "messenger" "instagram" ];
message = ''
The option `services.mautrix-meta.instances.${name}.settings.meta.mode` has to be set
to one of: facebook, facebook-tor, messenger, instagram.
This configures the mode of the bridge.
'';
}
{
assertion = cfg.settings.bridge.permissions != {};
message = ''
The option `services.mautrix-meta.instances.${name}.settings.bridge.permissions` has to be set.
'';
}
{
assertion = cfg.settings.appservice.id != "";
message = ''
The option `services.mautrix-meta.instances.${name}.settings.appservice.id` has to be set.
'';
}
{
assertion = cfg.settings.appservice.bot.username != "";
message = ''
The option `services.mautrix-meta.instances.${name}.settings.appservice.bot.username` has to be set.
'';
}
]) enabledInstances));
users.users = lib.mapAttrs' (name: cfg: lib.nameValuePair "mautrix-meta-${name}" {
isSystemUser = true;
group = "mautrix-meta";
extraGroups = [ "mautrix-meta-registration" ];
description = "Mautrix-Meta-${name} bridge user";
}) enabledInstances;
users.groups.mautrix-meta = {};
users.groups.mautrix-meta-registration = {
members = lib.lists.optional config.services.matrix-synapse.enable "matrix-synapse";
};
services.matrix-synapse = lib.mkIf (config.services.matrix-synapse.enable) (let
registrationFiles = lib.attrValues
(lib.mapAttrs (name: cfg: cfg.registrationFile) registerToSynapseInstances);
in {
settings.app_service_config_files = registrationFiles;
});
systemd.services = lib.mkMerge [
{
matrix-synapse = lib.mkIf (config.services.matrix-synapse.enable) (let
registrationServices = lib.attrValues
(lib.mapAttrs (name: cfg: cfg.registrationServiceUnit) registerToSynapseInstances);
in {
wants = registrationServices;
after = registrationServices;
});
}
(lib.mapAttrs' (name: cfg: lib.nameValuePair "${metaName name}-registration" {
description = "Mautrix-Meta registration generation service - ${metaName name}";
path = [
pkgs.yq
pkgs.envsubst
upperCfg.package
];
script = ''
# substitute the settings file by environment variables
# in this case read from EnvironmentFile
rm -f '${settingsFile cfg}'
old_umask=$(umask)
umask 0177
envsubst \
-o '${settingsFile cfg}' \
-i '${settingsFileUnsubstituted cfg}'
config_has_tokens=$(yq '.appservice | has("as_token") and has("hs_token")' '${settingsFile cfg}')
registration_already_exists=$([[ -f '${cfg.registrationFile}' ]] && echo "true" || echo "false")
echo "There are tokens in the config: $config_has_tokens"
echo "Registration already existed: $registration_already_exists"
# tokens not configured from config/environment file, and registration file
# is already generated, override tokens in config to make sure they are not lost
if [[ $config_has_tokens == "false" && $registration_already_exists == "true" ]]; then
echo "Copying as_token, hs_token from registration into configuration"
yq -sY '.[0].appservice.as_token = .[1].as_token
| .[0].appservice.hs_token = .[1].hs_token
| .[0]' '${settingsFile cfg}' '${cfg.registrationFile}' \
> '${settingsFile cfg}.tmp'
mv '${settingsFile cfg}.tmp' '${settingsFile cfg}'
fi
# make sure --generate-registration does not affect config.yaml
cp '${settingsFile cfg}' '${settingsFile cfg}.tmp'
echo "Generating registration file"
mautrix-meta \
--generate-registration \
--config='${settingsFile cfg}.tmp' \
--registration='${cfg.registrationFile}'
rm '${settingsFile cfg}.tmp'
# no tokens configured, and new were just generated by generate registration for first time
if [[ $config_has_tokens == "false" && $registration_already_exists == "false" ]]; then
echo "Copying newly generated as_token, hs_token from registration into configuration"
yq -sY '.[0].appservice.as_token = .[1].as_token
| .[0].appservice.hs_token = .[1].hs_token
| .[0]' '${settingsFile cfg}' '${cfg.registrationFile}' \
> '${settingsFile cfg}.tmp'
mv '${settingsFile cfg}.tmp' '${settingsFile cfg}'
fi
# Make sure correct tokens are in the registration file
if [[ $config_has_tokens == "true" || $registration_already_exists == "true" ]]; then
echo "Copying as_token, hs_token from configuration to the registration file"
yq -sY '.[1].as_token = .[0].appservice.as_token
| .[1].hs_token = .[0].appservice.hs_token
| .[1]' '${settingsFile cfg}' '${cfg.registrationFile}' \
> '${cfg.registrationFile}.tmp'
mv '${cfg.registrationFile}.tmp' '${cfg.registrationFile}'
fi
umask $old_umask
chown :mautrix-meta-registration '${cfg.registrationFile}'
chmod 640 '${cfg.registrationFile}'
'';
serviceConfig = {
Type = "oneshot";
UMask = 0027;
User = "mautrix-meta-${name}";
Group = "mautrix-meta";
SystemCallFilter = [ "@system-service" ];
ProtectSystem = "strict";
ProtectHome = true;
ReadWritePaths = fullDataDir cfg;
StateDirectory = cfg.dataDir;
EnvironmentFile = cfg.environmentFile;
};
restartTriggers = [ (settingsFileUnsubstituted cfg) ];
}) enabledInstances)
(lib.mapAttrs' (name: cfg: lib.nameValuePair "${metaName name}" {
description = "Mautrix-Meta bridge - ${metaName name}";
wantedBy = [ "multi-user.target" ];
wants = [ "network-online.target" ] ++ cfg.serviceDependencies;
after = [ "network-online.target" ] ++ cfg.serviceDependencies;
serviceConfig = {
Type = "simple";
User = "mautrix-meta-${name}";
Group = "mautrix-meta";
PrivateUsers = true;
LockPersonality = true;
MemoryDenyWriteExecute = true;
NoNewPrivileges = true;
PrivateDevices = true;
PrivateTmp = true;
ProtectClock = true;
ProtectControlGroups = true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectSystem = "strict";
Restart = "on-failure";
RestartSec = "30s";
RestrictRealtime = true;
RestrictSUIDSGID = true;
SystemCallArchitectures = "native";
SystemCallErrorNumber = "EPERM";
SystemCallFilter = ["@system-service"];
UMask = 0027;
WorkingDirectory = fullDataDir cfg;
ReadWritePaths = fullDataDir cfg;
StateDirectory = cfg.dataDir;
EnvironmentFile = cfg.environmentFile;
ExecStart = lib.escapeShellArgs [
(lib.getExe upperCfg.package)
"--config=${settingsFile cfg}"
];
};
restartTriggers = [ (settingsFileUnsubstituted cfg) ];
}) enabledInstances)
];
})
{
services.mautrix-meta.instances = let
inherit (lib.modules) mkDefault;
in {
instagram = {
settings = {
meta.mode = mkDefault "instagram";
bridge = {
username_template = mkDefault "instagram_{{.}}";
};
appservice = {
id = mkDefault "instagram";
port = mkDefault 29320;
bot = {
username = mkDefault "instagrambot";
displayname = mkDefault "Instagram bridge bot";
avatar = mkDefault "mxc://maunium.net/JxjlbZUlCPULEeHZSwleUXQv";
};
};
};
};
facebook = {
settings = {
meta.mode = mkDefault "facebook";
bridge = {
username_template = mkDefault "facebook_{{.}}";
};
appservice = {
id = mkDefault "facebook";
port = mkDefault 29321;
bot = {
username = mkDefault "facebookbot";
displayname = mkDefault "Facebook bridge bot";
avatar = mkDefault "mxc://maunium.net/ygtkteZsXnGJLJHRchUwYWak";
};
};
};
};
};
}
];
meta.maintainers = with lib.maintainers; [ rutherther ];
}

View File

@ -9,7 +9,6 @@ with lib;
let let
cfg = config.services.kea; cfg = config.services.kea;
xor = x: y: (!x && y) || (x && !y);
format = pkgs.formats.json {}; format = pkgs.formats.json {};
chooseNotNull = x: y: if x != null then x else y; chooseNotNull = x: y: if x != null then x else y;

View File

@ -0,0 +1,109 @@
{
config,
lib,
pkgs,
...
}:
let
cfg = config.services.technitium-dns-server;
stateDir = "/var/lib/technitium-dns-server";
inherit (lib)
mkEnableOption
mkPackageOption
mkOption
mkIf
types
;
in
{
options.services.technitium-dns-server = {
enable = mkEnableOption "Technitium DNS Server";
package = mkPackageOption pkgs "technitium-dns-server" { };
openFirewall = mkOption {
type = types.bool;
default = false;
description = ''
Whether to open ports in the firewall.
Standard ports are 53 (UDP and TCP, for DNS), 5380 and 53443 (TCP, HTTP and HTTPS for web interface).
Specify different or additional ports in options firewallUDPPorts and firewallTCPPorts if necessary.
'';
};
firewallUDPPorts = mkOption {
type = with types; listOf int;
default = [ 53 ];
description = ''
List of UDP ports to open in firewall.
'';
};
firewallTCPPorts = mkOption {
type = with types; listOf int;
default = [
53
5380 # web interface HTTP
53443 # web interface HTTPS
];
description = ''
List of TCP ports to open in firewall.
You might want to open ports 443 and 853 if you intend to use DNS over HTTPS or DNS over TLS.
'';
};
};
config = mkIf cfg.enable {
systemd.services.technitium-dns-server = {
description = "Technitium DNS Server";
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
serviceConfig = {
ExecStart = "${cfg.package}/bin/technitium-dns-server ${stateDir}";
DynamicUser = true;
StateDirectory = "technitium-dns-server";
WorkingDirectory = stateDir;
BindPaths = stateDir;
Restart = "always";
RestartSec = 10;
TimeoutStopSec = 10;
KillSignal = "SIGINT";
# Harden the service
LockPersonality = true;
NoNewPrivileges = true;
PrivateDevices = true;
PrivateMounts = true;
PrivateTmp = true;
ProtectClock = true;
ProtectControlGroups = true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectSystem = "strict";
RemoveIPC = true;
RestrictAddressFamilies = "AF_INET AF_INET6 AF_UNIX AF_NETLINK";
RestrictNamespaces = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
AmbientCapabilities = [ "CAP_NET_BIND_SERVICE" ];
CapabilityBoundingSet = [ "CAP_NET_BIND_SERVICE" ];
};
};
networking.firewall = mkIf cfg.openFirewall {
allowedUDPPorts = cfg.firewallUDPPorts;
allowedTCPPorts = cfg.firewallTCPPorts;
};
};
meta.maintainers = with lib.maintainers; [ fabianrig ];
}

View File

@ -522,6 +522,8 @@ in {
matrix-conduit = handleTest ./matrix/conduit.nix {}; matrix-conduit = handleTest ./matrix/conduit.nix {};
matrix-synapse = handleTest ./matrix/synapse.nix {}; matrix-synapse = handleTest ./matrix/synapse.nix {};
matrix-synapse-workers = handleTest ./matrix/synapse-workers.nix {}; matrix-synapse-workers = handleTest ./matrix/synapse-workers.nix {};
mautrix-meta-postgres = handleTest ./matrix/mautrix-meta-postgres.nix {};
mautrix-meta-sqlite = handleTest ./matrix/mautrix-meta-sqlite.nix {};
mattermost = handleTest ./mattermost.nix {}; mattermost = handleTest ./mattermost.nix {};
mealie = handleTest ./mealie.nix {}; mealie = handleTest ./mealie.nix {};
mediamtx = handleTest ./mediamtx.nix {}; mediamtx = handleTest ./mediamtx.nix {};
@ -916,6 +918,7 @@ in {
tang = handleTest ./tang.nix {}; tang = handleTest ./tang.nix {};
taskserver = handleTest ./taskserver.nix {}; taskserver = handleTest ./taskserver.nix {};
tayga = handleTest ./tayga.nix {}; tayga = handleTest ./tayga.nix {};
technitium-dns-server = handleTest ./technitium-dns-server.nix {};
teeworlds = handleTest ./teeworlds.nix {}; teeworlds = handleTest ./teeworlds.nix {};
telegraf = handleTest ./telegraf.nix {}; telegraf = handleTest ./telegraf.nix {};
teleport = handleTest ./teleport.nix {}; teleport = handleTest ./teleport.nix {};

View File

@ -108,6 +108,12 @@ let
assert "BEGIN PGP PUBLIC KEY BLOCK" in server.succeed("curl http://localhost:3000/api/v1/signing-key.gpg") assert "BEGIN PGP PUBLIC KEY BLOCK" in server.succeed("curl http://localhost:3000/api/v1/signing-key.gpg")
api_version = json.loads(server.succeed("curl http://localhost:3000/api/forgejo/v1/version")).get("version")
assert "development" != api_version and "-gitea-" in api_version, (
"/api/forgejo/v1/version should not return 'development' "
+ f"but should contain a gitea compatibility version string. Got '{api_version}' instead."
)
server.succeed( server.succeed(
"curl --fail http://localhost:3000/user/sign_up | grep 'Registration is disabled. " "curl --fail http://localhost:3000/user/sign_up | grep 'Registration is disabled. "
+ "Please contact your site administrator.'" + "Please contact your site administrator.'"

View File

@ -31,7 +31,6 @@ let
linux_5_15_hardened linux_5_15_hardened
linux_6_1_hardened linux_6_1_hardened
linux_6_6_hardened linux_6_6_hardened
linux_6_7_hardened
linux_rt_5_4 linux_rt_5_4
linux_rt_5_10 linux_rt_5_10
linux_rt_5_15 linux_rt_5_15

View File

@ -0,0 +1,221 @@
import ../make-test-python.nix ({ pkgs, ... }:
let
homeserverDomain = "server";
homeserverUrl = "http://server:8008";
userName = "alice";
botUserName = "instagrambot";
asToken = "this-is-my-totally-randomly-generated-as-token";
hsToken = "this-is-my-totally-randomly-generated-hs-token";
in
{
name = "mautrix-meta-postgres";
meta.maintainers = pkgs.mautrix-meta.meta.maintainers;
nodes = {
server = { config, pkgs, ... }: {
services.postgresql = {
enable = true;
ensureUsers = [
{
name = "mautrix-meta-instagram";
ensureDBOwnership = true;
}
];
ensureDatabases = [
"mautrix-meta-instagram"
];
};
systemd.services.mautrix-meta-instagram = {
wants = [ "postgres.service" ];
after = [ "postgres.service" ];
};
services.matrix-synapse = {
enable = true;
settings = {
database.name = "sqlite3";
enable_registration = true;
# don't use this in production, always use some form of verification
enable_registration_without_verification = true;
listeners = [ {
# The default but tls=false
bind_addresses = [
"0.0.0.0"
];
port = 8008;
resources = [ {
"compress" = true;
"names" = [ "client" ];
} {
"compress" = false;
"names" = [ "federation" ];
} ];
tls = false;
type = "http";
} ];
};
};
services.mautrix-meta.instances.instagram = {
enable = true;
environmentFile = pkgs.writeText ''my-secrets'' ''
AS_TOKEN=${asToken}
HS_TOKEN=${hsToken}
'';
settings = {
homeserver = {
address = homeserverUrl;
domain = homeserverDomain;
};
appservice = {
port = 8009;
as_token = "$AS_TOKEN";
hs_token = "$HS_TOKEN";
database = {
type = "postgres";
uri = "postgres:///mautrix-meta-instagram?host=/var/run/postgresql";
};
bot.username = botUserName;
};
bridge.permissions."@${userName}:server" = "user";
};
};
networking.firewall.allowedTCPPorts = [ 8008 8009 ];
};
client = { pkgs, ... }: {
environment.systemPackages = [
(pkgs.writers.writePython3Bin "do_test"
{
libraries = [ pkgs.python3Packages.matrix-nio ];
flakeIgnore = [
# We don't live in the dark ages anymore.
# Languages like Python that are whitespace heavy will overrun
# 79 characters..
"E501"
];
} ''
import sys
import functools
import asyncio
from nio import AsyncClient, RoomMessageNotice, RoomCreateResponse, RoomInviteResponse
async def message_callback(matrix: AsyncClient, msg: str, _r, e):
print("Received matrix text message: ", e)
assert msg in e.body
exit(0) # Success!
async def run(homeserver: str):
matrix = AsyncClient(homeserver)
response = await matrix.register("${userName}", "foobar")
print("Matrix register response: ", response)
# Open a DM with the bridge bot
response = await matrix.room_create()
print("Matrix create room response:", response)
assert isinstance(response, RoomCreateResponse)
room_id = response.room_id
response = await matrix.room_invite(room_id, "@${botUserName}:${homeserverDomain}")
assert isinstance(response, RoomInviteResponse)
callback = functools.partial(
message_callback, matrix, "Hello, I'm an Instagram bridge bot."
)
matrix.add_event_callback(callback, RoomMessageNotice)
print("Waiting for matrix message...")
await matrix.sync_forever(timeout=30000)
if __name__ == "__main__":
asyncio.run(run(sys.argv[1]))
''
)
];
};
};
testScript = ''
def extract_token(data):
stdout = data[1]
stdout = stdout.strip()
line = stdout.split('\n')[-1]
return line.split(':')[-1].strip("\" '\n")
def get_token_from(token, file):
data = server.execute(f"cat {file} | grep {token}")
return extract_token(data)
def get_as_token_from(file):
return get_token_from("as_token", file)
def get_hs_token_from(file):
return get_token_from("hs_token", file)
config_yaml = "/var/lib/mautrix-meta-instagram/config.yaml"
registration_yaml = "/var/lib/mautrix-meta-instagram/meta-registration.yaml"
expected_as_token = "${asToken}"
expected_hs_token = "${hsToken}"
start_all()
with subtest("start the server"):
# bridge
server.wait_for_unit("mautrix-meta-instagram.service")
# homeserver
server.wait_for_unit("matrix-synapse.service")
server.wait_for_open_port(8008)
# Bridge only opens the port after it contacts the homeserver
server.wait_for_open_port(8009)
with subtest("ensure messages can be exchanged"):
client.succeed("do_test ${homeserverUrl} >&2")
with subtest("ensure as_token, hs_token match from environment file"):
as_token = get_as_token_from(config_yaml)
hs_token = get_hs_token_from(config_yaml)
as_token_registration = get_as_token_from(registration_yaml)
hs_token_registration = get_hs_token_from(registration_yaml)
assert as_token == expected_as_token, f"as_token in config should match the one specified (is: {as_token}, expected: {expected_as_token})"
assert hs_token == expected_hs_token, f"hs_token in config should match the one specified (is: {hs_token}, expected: {expected_hs_token})"
assert as_token_registration == expected_as_token, f"as_token in registration should match the one specified (is: {as_token_registration}, expected: {expected_as_token})"
assert hs_token_registration == expected_hs_token, f"hs_token in registration should match the one specified (is: {hs_token_registration}, expected: {expected_hs_token})"
with subtest("ensure as_token and hs_token stays same after restart"):
server.systemctl("restart mautrix-meta-instagram")
server.wait_for_open_port(8009)
as_token = get_as_token_from(config_yaml)
hs_token = get_hs_token_from(config_yaml)
as_token_registration = get_as_token_from(registration_yaml)
hs_token_registration = get_hs_token_from(registration_yaml)
assert as_token == expected_as_token, f"as_token in config should match the one specified (is: {as_token}, expected: {expected_as_token})"
assert hs_token == expected_hs_token, f"hs_token in config should match the one specified (is: {hs_token}, expected: {expected_hs_token})"
assert as_token_registration == expected_as_token, f"as_token in registration should match the one specified (is: {as_token_registration}, expected: {expected_as_token})"
assert hs_token_registration == expected_hs_token, f"hs_token in registration should match the one specified (is: {hs_token_registration}, expected: {expected_hs_token})"
'';
})

View File

@ -0,0 +1,247 @@
import ../make-test-python.nix ({ pkgs, ... }:
let
homeserverDomain = "server";
homeserverUrl = "http://server:8008";
username = "alice";
instagramBotUsername = "instagrambot";
facebookBotUsername = "facebookbot";
in
{
name = "mautrix-meta-sqlite";
meta.maintainers = pkgs.mautrix-meta.meta.maintainers;
nodes = {
server = { config, pkgs, ... }: {
services.matrix-synapse = {
enable = true;
settings = {
database.name = "sqlite3";
enable_registration = true;
# don't use this in production, always use some form of verification
enable_registration_without_verification = true;
listeners = [ {
# The default but tls=false
bind_addresses = [
"0.0.0.0"
];
port = 8008;
resources = [ {
"compress" = true;
"names" = [ "client" ];
} {
"compress" = false;
"names" = [ "federation" ];
} ];
tls = false;
type = "http";
} ];
};
};
services.mautrix-meta.instances.facebook = {
enable = true;
settings = {
homeserver = {
address = homeserverUrl;
domain = homeserverDomain;
};
appservice = {
port = 8009;
bot.username = facebookBotUsername;
};
bridge.permissions."@${username}:server" = "user";
};
};
services.mautrix-meta.instances.instagram = {
enable = true;
settings = {
homeserver = {
address = homeserverUrl;
domain = homeserverDomain;
};
appservice = {
port = 8010;
bot.username = instagramBotUsername;
};
bridge.permissions."@${username}:server" = "user";
};
};
networking.firewall.allowedTCPPorts = [ 8008 ];
};
client = { pkgs, ... }: {
environment.systemPackages = [
(pkgs.writers.writePython3Bin "register_user"
{
libraries = [ pkgs.python3Packages.matrix-nio ];
flakeIgnore = [
# We don't live in the dark ages anymore.
# Languages like Python that are whitespace heavy will overrun
# 79 characters..
"E501"
];
} ''
import sys
import asyncio
from nio import AsyncClient
async def run(username: str, homeserver: str):
matrix = AsyncClient(homeserver)
response = await matrix.register(username, "foobar")
print("Matrix register response: ", response)
if __name__ == "__main__":
asyncio.run(run(sys.argv[1], sys.argv[2]))
''
)
(pkgs.writers.writePython3Bin "do_test"
{
libraries = [ pkgs.python3Packages.matrix-nio ];
flakeIgnore = [
# We don't live in the dark ages anymore.
# Languages like Python that are whitespace heavy will overrun
# 79 characters..
"E501"
];
} ''
import sys
import functools
import asyncio
from nio import AsyncClient, RoomMessageNotice, RoomCreateResponse, RoomInviteResponse
async def message_callback(matrix: AsyncClient, msg: str, _r, e):
print("Received matrix text message: ", e)
assert msg in e.body
exit(0) # Success!
async def run(username: str, bot_username: str, homeserver: str):
matrix = AsyncClient(homeserver, f"@{username}:${homeserverDomain}")
response = await matrix.login("foobar")
print("Matrix login response: ", response)
# Open a DM with the bridge bot
response = await matrix.room_create()
print("Matrix create room response:", response)
assert isinstance(response, RoomCreateResponse)
room_id = response.room_id
response = await matrix.room_invite(room_id, f"@{bot_username}:${homeserverDomain}")
assert isinstance(response, RoomInviteResponse)
callback = functools.partial(
message_callback, matrix, "Hello, I'm an Instagram bridge bot."
)
matrix.add_event_callback(callback, RoomMessageNotice)
print("Waiting for matrix message...")
await matrix.sync_forever(timeout=30000)
if __name__ == "__main__":
asyncio.run(run(sys.argv[1], sys.argv[2], sys.argv[3]))
''
)
];
};
};
testScript = ''
def extract_token(data):
stdout = data[1]
stdout = stdout.strip()
line = stdout.split('\n')[-1]
return line.split(':')[-1].strip("\" '\n")
def get_token_from(token, file):
data = server.execute(f"cat {file} | grep {token}")
return extract_token(data)
def get_as_token_from(file):
return get_token_from("as_token", file)
def get_hs_token_from(file):
return get_token_from("hs_token", file)
config_yaml = "/var/lib/mautrix-meta-facebook/config.yaml"
registration_yaml = "/var/lib/mautrix-meta-facebook/meta-registration.yaml"
start_all()
with subtest("wait for bridges and homeserver"):
# bridge
server.wait_for_unit("mautrix-meta-facebook.service")
server.wait_for_unit("mautrix-meta-instagram.service")
# homeserver
server.wait_for_unit("matrix-synapse.service")
server.wait_for_open_port(8008)
# Bridges only open the port after they contact the homeserver
server.wait_for_open_port(8009)
server.wait_for_open_port(8010)
with subtest("register user"):
client.succeed("register_user ${username} ${homeserverUrl} >&2")
with subtest("ensure messages can be exchanged"):
client.succeed("do_test ${username} ${facebookBotUsername} ${homeserverUrl} >&2")
client.succeed("do_test ${username} ${instagramBotUsername} ${homeserverUrl} >&2")
with subtest("ensure as_token and hs_token stays same after restart"):
generated_as_token_facebook = get_as_token_from(config_yaml)
generated_hs_token_facebook = get_hs_token_from(config_yaml)
generated_as_token_facebook_registration = get_as_token_from(registration_yaml)
generated_hs_token_facebook_registration = get_hs_token_from(registration_yaml)
# Indirectly checks the as token is not set to something like empty string or "null"
assert len(generated_as_token_facebook) > 20, f"as_token ({generated_as_token_facebook}) is too short, something went wrong"
assert len(generated_hs_token_facebook) > 20, f"hs_token ({generated_hs_token_facebook}) is too short, something went wrong"
assert generated_as_token_facebook == generated_as_token_facebook_registration, f"as_token should be the same in registration ({generated_as_token_facebook_registration}) and configuration ({generated_as_token_facebook}) files"
assert generated_hs_token_facebook == generated_hs_token_facebook_registration, f"hs_token should be the same in registration ({generated_hs_token_facebook_registration}) and configuration ({generated_hs_token_facebook}) files"
server.systemctl("restart mautrix-meta-facebook")
server.systemctl("restart mautrix-meta-instagram")
server.wait_for_open_port(8009)
server.wait_for_open_port(8010)
new_as_token_facebook = get_as_token_from(config_yaml)
new_hs_token_facebook = get_hs_token_from(config_yaml)
assert generated_as_token_facebook == new_as_token_facebook, f"as_token should stay the same after restart inside the configuration file (is: {new_as_token_facebook}, was: {generated_as_token_facebook})"
assert generated_hs_token_facebook == new_hs_token_facebook, f"hs_token should stay the same after restart inside the configuration file (is: {new_hs_token_facebook}, was: {generated_hs_token_facebook})"
new_as_token_facebook = get_as_token_from(registration_yaml)
new_hs_token_facebook = get_hs_token_from(registration_yaml)
assert generated_as_token_facebook == new_as_token_facebook, f"as_token should stay the same after restart inside the registration file (is: {new_as_token_facebook}, was: {generated_as_token_facebook})"
assert generated_hs_token_facebook == new_hs_token_facebook, f"hs_token should stay the same after restart inside the registration file (is: {new_hs_token_facebook}, was: {generated_hs_token_facebook})"
with subtest("ensure messages can be exchanged after restart"):
client.succeed("do_test ${username} ${instagramBotUsername} ${homeserverUrl} >&2")
client.succeed("do_test ${username} ${facebookBotUsername} ${homeserverUrl} >&2")
'';
})

View File

@ -0,0 +1,21 @@
import ./make-test-python.nix ({pkgs, lib, ...}:
{
name = "technitium-dns-server";
nodes = {
machine = {pkgs, ...}: {
services.technitium-dns-server = {
enable = true;
openFirewall = true;
};
};
};
testScript = ''
start_all()
machine.wait_for_unit("technitium-dns-server.service")
machine.wait_for_open_port(53)
'';
meta.maintainers = with lib.maintainers; [ fabianrig ];
})

View File

@ -2,7 +2,7 @@
let let
pname = "erigon"; pname = "erigon";
version = "2.59.2"; version = "2.59.3";
in in
buildGoModule { buildGoModule {
inherit pname version; inherit pname version;
@ -11,7 +11,7 @@ buildGoModule {
owner = "ledgerwatch"; owner = "ledgerwatch";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
hash = "sha256-gSoaPoyPyryC1yzYaafnPXKpMNzI9fw9Yd0nKzziAKw="; hash = "sha256-pkcT9KFX4rz6WXUm9cG+6x9k+jGmLPGgl/4VnS7TNVE=";
fetchSubmodules = true; fetchSubmodules = true;
}; };

View File

@ -49,9 +49,9 @@ stdenv.mkDerivation rec {
}) })
]; ];
buildInputs = [ neon libusb1 openssl udev avahi freeipmi libmodbus i2c-tools net-snmp gd ]; buildInputs = [ neon libusb1 openssl udev avahi freeipmi libmodbus libtool i2c-tools net-snmp gd ];
nativeBuildInputs = [ autoreconfHook libtool pkg-config makeWrapper ]; nativeBuildInputs = [ autoreconfHook pkg-config makeWrapper ];
configureFlags = configureFlags =
[ "--with-all" [ "--with-all"

View File

@ -27,29 +27,29 @@
"vendorHash": "sha256-jK7JuARpoxq7hvq5+vTtUwcYot0YqlOZdtDwq4IqKvk=" "vendorHash": "sha256-jK7JuARpoxq7hvq5+vTtUwcYot0YqlOZdtDwq4IqKvk="
}, },
"aiven": { "aiven": {
"hash": "sha256-ap2UuJojGx7+OZB2RmIZlHbawZi4lqa1iGUr2NLSPGk=", "hash": "sha256-6FcHqSXszJZkIX9wytkNU8+rKgBu34k2Xnfq6fcqkHs=",
"homepage": "https://registry.terraform.io/providers/aiven/aiven", "homepage": "https://registry.terraform.io/providers/aiven/aiven",
"owner": "aiven", "owner": "aiven",
"repo": "terraform-provider-aiven", "repo": "terraform-provider-aiven",
"rev": "v4.14.0", "rev": "v4.15.0",
"spdx": "MIT", "spdx": "MIT",
"vendorHash": "sha256-PSErY3yFDTjtK+FVlJEEBfZAz1BybjiPK7nDulrrbdY=" "vendorHash": "sha256-SNpsdbNvgLOS8pSSvz58xThTqzCOOPZMggJb7HetzAw="
}, },
"akamai": { "akamai": {
"hash": "sha256-j1UTi4ygixwSfu9Wp//JzKe58xSV/tZM3kRo1ikBo3Y=", "hash": "sha256-WOLEKdY8GbvAREbWQqAdITGVb4erHmIMp9GT2CsKvTk=",
"homepage": "https://registry.terraform.io/providers/akamai/akamai", "homepage": "https://registry.terraform.io/providers/akamai/akamai",
"owner": "akamai", "owner": "akamai",
"repo": "terraform-provider-akamai", "repo": "terraform-provider-akamai",
"rev": "v5.6.0", "rev": "v6.0.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-/gW1vxaDaUMpm0QSghd/Glo3S/XVa5t9x3QrIs4Bqyk=" "vendorHash": "sha256-g3U0w+gvrOzS8W3Cu+wpOlWo8JHUlBpxRkDDl6wzcXM="
}, },
"alicloud": { "alicloud": {
"hash": "sha256-Zi4oymePLOW6NgEE8aHlEo7rStz2GPNFSSUl9LUr7OU=", "hash": "sha256-+3MgqAMcDfwhVW3zGSsjLfVWmVNWyrjtUqB9KYzdYRk=",
"homepage": "https://registry.terraform.io/providers/aliyun/alicloud", "homepage": "https://registry.terraform.io/providers/aliyun/alicloud",
"owner": "aliyun", "owner": "aliyun",
"repo": "terraform-provider-alicloud", "repo": "terraform-provider-alicloud",
"rev": "v1.219.0", "rev": "v1.220.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -72,20 +72,20 @@
"vendorHash": "sha256-buKYDNVCIcSDLCrCL4ZAKNQ7HqkH3+/7RHjyyR4dLmU=" "vendorHash": "sha256-buKYDNVCIcSDLCrCL4ZAKNQ7HqkH3+/7RHjyyR4dLmU="
}, },
"argocd": { "argocd": {
"hash": "sha256-nJrXbeI/07LlKngEkAnqPG6CiOLFTFugmZMVl2FEvIo=", "hash": "sha256-dHIvMFz5XIxxBvBFsEw8lqi6yVoYM9E4tLIoTY+mdiQ=",
"homepage": "https://registry.terraform.io/providers/oboukili/argocd", "homepage": "https://registry.terraform.io/providers/oboukili/argocd",
"owner": "oboukili", "owner": "oboukili",
"repo": "terraform-provider-argocd", "repo": "terraform-provider-argocd",
"rev": "v6.0.3", "rev": "v6.1.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-q9PO9tMbaXTs3nBLElwU05GcDZMZqNmLVVGDmiSRSfo=" "vendorHash": "sha256-yyTU+D4zMDcJPZ9j7a2ZuPjGBCHvED5R0rvevCEaoAI="
}, },
"artifactory": { "artifactory": {
"hash": "sha256-udgRoN1YoVaJpNS6MkZAThcuWGOL9Jc3lf3NAKS9WH8=", "hash": "sha256-qlmAOc4wSxQ9Xnr4zBB98OSFW4HB7w7yStsUNfsylsE=",
"homepage": "https://registry.terraform.io/providers/jfrog/artifactory", "homepage": "https://registry.terraform.io/providers/jfrog/artifactory",
"owner": "jfrog", "owner": "jfrog",
"repo": "terraform-provider-artifactory", "repo": "terraform-provider-artifactory",
"rev": "v10.4.0", "rev": "v10.4.3",
"spdx": "Apache-2.0", "spdx": "Apache-2.0",
"vendorHash": "sha256-P5L2Q8t9TxJnu5cjOwEKek1KNKAw78fqZoOSAo6AvzQ=" "vendorHash": "sha256-P5L2Q8t9TxJnu5cjOwEKek1KNKAw78fqZoOSAo6AvzQ="
}, },
@ -117,13 +117,13 @@
"vendorHash": null "vendorHash": null
}, },
"aws": { "aws": {
"hash": "sha256-+daAkFF6nSTe6yxOdW58BRzBYI4tUMhNoG6vnG1cXTA=", "hash": "sha256-YykNKCDFPQCtES2vAbCbqbHbkx1EmVM0bTEylr84bqs=",
"homepage": "https://registry.terraform.io/providers/hashicorp/aws", "homepage": "https://registry.terraform.io/providers/hashicorp/aws",
"owner": "hashicorp", "owner": "hashicorp",
"repo": "terraform-provider-aws", "repo": "terraform-provider-aws",
"rev": "v5.41.0", "rev": "v5.43.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-JmMp9AqjWZGVvCsCCTYl3o4BT9yxzA3A16ESrpciCLE=" "vendorHash": "sha256-2KQEX1QwPrN32gFMKF7QPnisLdBC/bn74wX1f8uiC+0="
}, },
"azuread": { "azuread": {
"hash": "sha256-lumXl3orK5Jq5+qnRfiIA94NjK2bCjd3LhRzHmW1h8I=", "hash": "sha256-lumXl3orK5Jq5+qnRfiIA94NjK2bCjd3LhRzHmW1h8I=",
@ -135,11 +135,11 @@
"vendorHash": null "vendorHash": null
}, },
"azurerm": { "azurerm": {
"hash": "sha256-5uA+P29yLCXyOB+98Nx9dPNKONmgDAkMEb8cNRB4MW8=", "hash": "sha256-kJ6snlePBGRqE8mS95ROzskz+b4cnPLz/OO1Vk+i56Q=",
"homepage": "https://registry.terraform.io/providers/hashicorp/azurerm", "homepage": "https://registry.terraform.io/providers/hashicorp/azurerm",
"owner": "hashicorp", "owner": "hashicorp",
"repo": "terraform-provider-azurerm", "repo": "terraform-provider-azurerm",
"rev": "v3.96.0", "rev": "v3.97.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -154,20 +154,20 @@
}, },
"baiducloud": { "baiducloud": {
"deleteVendor": true, "deleteVendor": true,
"hash": "sha256-ymTKRxbFUT99qxAS8lb4QAAWXX7yopPo8Ac93mpGEHo=", "hash": "sha256-qLjAHoBnb6//QYxYZyN13RxWOuEjxwSOiPyfR1qMtro=",
"homepage": "https://registry.terraform.io/providers/baidubce/baiducloud", "homepage": "https://registry.terraform.io/providers/baidubce/baiducloud",
"owner": "baidubce", "owner": "baidubce",
"repo": "terraform-provider-baiducloud", "repo": "terraform-provider-baiducloud",
"rev": "v1.19.39", "rev": "v1.19.40",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-puTQKvIvyBRgdZZTZCXEAdc8HYNgtoSmzjpqHCIEAKk=" "vendorHash": "sha256-hd64VJOkl+BtMXR+VcGam8ycKdfuwmaj67cBxx6rS8w="
}, },
"bigip": { "bigip": {
"hash": "sha256-GrHd9plKhe7BdCBgsnTv+CM82F7oDPWamtXxOpiwKPE=", "hash": "sha256-cGFlVcu8G7xpiHk1dhgLIkZHc6srOn/eLyQk9xETpnI=",
"homepage": "https://registry.terraform.io/providers/F5Networks/bigip", "homepage": "https://registry.terraform.io/providers/F5Networks/bigip",
"owner": "F5Networks", "owner": "F5Networks",
"repo": "terraform-provider-bigip", "repo": "terraform-provider-bigip",
"rev": "v1.21.0", "rev": "v1.22.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -190,13 +190,13 @@
"vendorHash": "sha256-/dOiXO2aPkuZaFiwv/6AXJdIADgx8T7eOwvJfBBoqg8=" "vendorHash": "sha256-/dOiXO2aPkuZaFiwv/6AXJdIADgx8T7eOwvJfBBoqg8="
}, },
"buildkite": { "buildkite": {
"hash": "sha256-zhltbz9mlHVJI4R8RSS6UyyfeopgK62BJzQfl3VtIfE=", "hash": "sha256-DIquKLQB27deYYG3vMlhBoO/EZ5WK04zAR7qPrRQ38k=",
"homepage": "https://registry.terraform.io/providers/buildkite/buildkite", "homepage": "https://registry.terraform.io/providers/buildkite/buildkite",
"owner": "buildkite", "owner": "buildkite",
"repo": "terraform-provider-buildkite", "repo": "terraform-provider-buildkite",
"rev": "v1.5.2", "rev": "v1.6.0",
"spdx": "MIT", "spdx": "MIT",
"vendorHash": "sha256-LKATx/5jjQCyaOUDFQNka3tWMH5DbEKNhrfYlyzDPKc=" "vendorHash": "sha256-1HYJ1k3ZK9rK/cDOXnJz556qqBkyikRxCBbOeJhl3Ks="
}, },
"checkly": { "checkly": {
"hash": "sha256-Wxw87/9BG/bTDGqgKdle6WF38oDoHkrc0HIKjJlaQOQ=", "hash": "sha256-Wxw87/9BG/bTDGqgKdle6WF38oDoHkrc0HIKjJlaQOQ=",
@ -226,13 +226,13 @@
"vendorHash": "sha256-cI3brJwN+7FTceOMwR0HMbZCNHhwvm31OXqjAEvrzrs=" "vendorHash": "sha256-cI3brJwN+7FTceOMwR0HMbZCNHhwvm31OXqjAEvrzrs="
}, },
"cloudflare": { "cloudflare": {
"hash": "sha256-veqaQQaZz05lom2X03+bav2JBVv/enBCA1lcyKmAlZk=", "hash": "sha256-p4jRSFNalEzIs8k1QZBVSUERmdK0qVYdk4oUoQRtQww=",
"homepage": "https://registry.terraform.io/providers/cloudflare/cloudflare", "homepage": "https://registry.terraform.io/providers/cloudflare/cloudflare",
"owner": "cloudflare", "owner": "cloudflare",
"repo": "terraform-provider-cloudflare", "repo": "terraform-provider-cloudflare",
"rev": "v4.26.0", "rev": "v4.29.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-8MvwvBDUs0OVptgtbg/tAEEBgD9Tk5mWKnnW4p0Rk20=" "vendorHash": "sha256-HgTjVtC5Z7/3b86fFHTcZE4vUukWtBdWFdwgWGQW9ZQ="
}, },
"cloudfoundry": { "cloudfoundry": {
"hash": "sha256-1nYncJLVU/f9WD6Quh9IieIXgixPzbPk4zbtI1zmf9g=", "hash": "sha256-1nYncJLVU/f9WD6Quh9IieIXgixPzbPk4zbtI1zmf9g=",
@ -364,11 +364,11 @@
"vendorHash": "sha256-XxltOTtCgmJ9wZX8Yw39HkwVVZb58kZjAH7jfKPhjKM=" "vendorHash": "sha256-XxltOTtCgmJ9wZX8Yw39HkwVVZb58kZjAH7jfKPhjKM="
}, },
"doppler": { "doppler": {
"hash": "sha256-FJS1lPYieTWI/AX7pBbFmGtJw5kPD7MCZp+LWWIsnus=", "hash": "sha256-PfqFf3V+zH4SPvciLNnB9KWCVm8M94q8rzedUuXABAg=",
"homepage": "https://registry.terraform.io/providers/DopplerHQ/doppler", "homepage": "https://registry.terraform.io/providers/DopplerHQ/doppler",
"owner": "DopplerHQ", "owner": "DopplerHQ",
"repo": "terraform-provider-doppler", "repo": "terraform-provider-doppler",
"rev": "v1.6.2", "rev": "v1.7.0",
"spdx": "Apache-2.0", "spdx": "Apache-2.0",
"vendorHash": "sha256-qJ1mOuMyJ/f2/yCns7qY8zUt2lgDuBgzN0w1HCKBk7E=" "vendorHash": "sha256-qJ1mOuMyJ/f2/yCns7qY8zUt2lgDuBgzN0w1HCKBk7E="
}, },
@ -382,20 +382,20 @@
"vendorHash": "sha256-oVTanZpCWs05HwyIKW2ajiBPz1HXOFzBAt5Us+EtTRw=" "vendorHash": "sha256-oVTanZpCWs05HwyIKW2ajiBPz1HXOFzBAt5Us+EtTRw="
}, },
"equinix": { "equinix": {
"hash": "sha256-LF9S0jqMeXSci6uAFW+3C7IA9PGmSUgFrVG13/i0hZc=", "hash": "sha256-Wtt4vkHcDBTzObfpOSdH4RAfoT/1+B58PytJ1NSkyQk=",
"homepage": "https://registry.terraform.io/providers/equinix/equinix", "homepage": "https://registry.terraform.io/providers/equinix/equinix",
"owner": "equinix", "owner": "equinix",
"repo": "terraform-provider-equinix", "repo": "terraform-provider-equinix",
"rev": "v1.33.0", "rev": "v1.34.0",
"spdx": "MIT", "spdx": "MIT",
"vendorHash": "sha256-TC1vPWe1rFofz0SdKpV9qAmknLROQH2MglPDrA62nO0=" "vendorHash": "sha256-9+cytMN5VK0nwLiR58lzE+UzlvqI677/rOxzd8D5k30="
}, },
"exoscale": { "exoscale": {
"hash": "sha256-t1yZmayoZkDImcIr+VkNhQRzlfteGuvgcjSDOmmCF5I=", "hash": "sha256-0PsMSEbMoeuoa17AZvzbSOoY48IeQ4CPGWknkc0tDQQ=",
"homepage": "https://registry.terraform.io/providers/exoscale/exoscale", "homepage": "https://registry.terraform.io/providers/exoscale/exoscale",
"owner": "exoscale", "owner": "exoscale",
"repo": "terraform-provider-exoscale", "repo": "terraform-provider-exoscale",
"rev": "v0.56.0", "rev": "v0.57.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -446,40 +446,40 @@
"vendorHash": "sha256-EiTWJ4bw8IwsRTD9Lt28Up2DXH0oVneO2IaO8VqWtkw=" "vendorHash": "sha256-EiTWJ4bw8IwsRTD9Lt28Up2DXH0oVneO2IaO8VqWtkw="
}, },
"github": { "github": {
"hash": "sha256-0tnqXynYPct9HAZdhJ42bzJbcsC5QVz4bOszEO+tjSc=", "hash": "sha256-K3/taXnlIroiWQYyZB2LElAcF5fQm2aaEp3OXqKCJ+E=",
"homepage": "https://registry.terraform.io/providers/integrations/github", "homepage": "https://registry.terraform.io/providers/integrations/github",
"owner": "integrations", "owner": "integrations",
"repo": "terraform-provider-github", "repo": "terraform-provider-github",
"rev": "v6.2.0", "rev": "v6.2.1",
"spdx": "MIT", "spdx": "MIT",
"vendorHash": null "vendorHash": null
}, },
"gitlab": { "gitlab": {
"hash": "sha256-RphUUJOMx9p1fTys68C+bWxgS8zjrWLe4VgMXwKa8SE=", "hash": "sha256-WquY33Dx5E+OgnAMZ6dhgwrixhHhAYRUa4l6TuzGzmw=",
"homepage": "https://registry.terraform.io/providers/gitlabhq/gitlab", "homepage": "https://registry.terraform.io/providers/gitlabhq/gitlab",
"owner": "gitlabhq", "owner": "gitlabhq",
"repo": "terraform-provider-gitlab", "repo": "terraform-provider-gitlab",
"rev": "v16.9.1", "rev": "v16.10.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-mr4ZEQobsFBRU/RUV4joqsWQTuAaSioB1GO09wQJy7M=" "vendorHash": "sha256-7hIThIq3uU803aK+paR5KdTdfVmSZu7Spf9UepaVgvc="
}, },
"google": { "google": {
"hash": "sha256-CbOy5kExsXHQTMteNpqnr0SHsQIjKSiJuwJD9Wcy5Ag=", "hash": "sha256-LV3/4X/4jA+mZrkbzmYqiQDOVkH3zie2YCc6M9voJpA=",
"homepage": "https://registry.terraform.io/providers/hashicorp/google", "homepage": "https://registry.terraform.io/providers/hashicorp/google",
"owner": "hashicorp", "owner": "hashicorp",
"repo": "terraform-provider-google", "repo": "terraform-provider-google",
"rev": "v5.21.0", "rev": "v5.23.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-VL03n3rEMccHuYmFMgoX01hzpEA7WHIyxa8GnfVLLSo=" "vendorHash": "sha256-SUYBSggBAl63plYz1kf+BBL6yPKbHAG8cjJE1JquSQ4="
}, },
"google-beta": { "google-beta": {
"hash": "sha256-fn4JrTU/TX8jJ6vYxzWYFpGFmgSDEt6txOF/jsX2BcU=", "hash": "sha256-0eiMpPuia7M9IPhKHBWXiBXH3LenUonsbs2QdP4V1e4=",
"homepage": "https://registry.terraform.io/providers/hashicorp/google-beta", "homepage": "https://registry.terraform.io/providers/hashicorp/google-beta",
"owner": "hashicorp", "owner": "hashicorp",
"repo": "terraform-provider-google-beta", "repo": "terraform-provider-google-beta",
"rev": "v5.21.0", "rev": "v5.23.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-bUJJNnnmF7PXwXUomE5uuk21rpHsy7W5ESkj0DDiY04=" "vendorHash": "sha256-y9m+rOuQWSooeZBMuM1irx0CtdHtOqmhKv7+f8GNO6g="
}, },
"googleworkspace": { "googleworkspace": {
"hash": "sha256-dedYnsKHizxJZibuvJOMbJoux0W6zgKaK5fxIofKqCY=", "hash": "sha256-dedYnsKHizxJZibuvJOMbJoux0W6zgKaK5fxIofKqCY=",
@ -491,20 +491,20 @@
"vendorHash": "sha256-fqVBnAivVekV+4tpkl+E6eNA3wi8mhLevJRCs3W7L2g=" "vendorHash": "sha256-fqVBnAivVekV+4tpkl+E6eNA3wi8mhLevJRCs3W7L2g="
}, },
"grafana": { "grafana": {
"hash": "sha256-8YE+bi44c55hDH+NlEsuocT1d6PugF/QfwvOTD693YE=", "hash": "sha256-7Hv0jAYnTh8B2xpxIlMdQL3mVANSRYRyG5OTHZLp4wA=",
"homepage": "https://registry.terraform.io/providers/grafana/grafana", "homepage": "https://registry.terraform.io/providers/grafana/grafana",
"owner": "grafana", "owner": "grafana",
"repo": "terraform-provider-grafana", "repo": "terraform-provider-grafana",
"rev": "v2.14.2", "rev": "v2.14.3",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-HVPCLtE1DVM5Rq/myNoJwFrSQVG6utX0LOmR7yklRu8=" "vendorHash": "sha256-NAUFTk868XhzLu5boP09JeZbMs1exqOmHkFa9MUJFns="
}, },
"gridscale": { "gridscale": {
"hash": "sha256-5gidBMUfJ4DPKuRx/pF5Rlff7DPkIXBJ7qzCIy6bZm8=", "hash": "sha256-gytjUn1xy8HTgItYrxrhm80qrbrjdDQvEcGLZ49VC+0=",
"homepage": "https://registry.terraform.io/providers/gridscale/gridscale", "homepage": "https://registry.terraform.io/providers/gridscale/gridscale",
"owner": "gridscale", "owner": "gridscale",
"repo": "terraform-provider-gridscale", "repo": "terraform-provider-gridscale",
"rev": "v1.23.2", "rev": "v1.24.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -564,11 +564,11 @@
"vendorHash": "sha256-GDeuiT3PV92t3CsD60CAmN8ED9j8UzDbRlk59SSCVCM=" "vendorHash": "sha256-GDeuiT3PV92t3CsD60CAmN8ED9j8UzDbRlk59SSCVCM="
}, },
"huaweicloud": { "huaweicloud": {
"hash": "sha256-vOaLOGLp+V+IYYa56rpiv1yx89incw796cTUgUXHtdM=", "hash": "sha256-yYMI1UuOU/DbGYqReI//zhBmlD96KYot7h987k4Cl6o=",
"homepage": "https://registry.terraform.io/providers/huaweicloud/huaweicloud", "homepage": "https://registry.terraform.io/providers/huaweicloud/huaweicloud",
"owner": "huaweicloud", "owner": "huaweicloud",
"repo": "terraform-provider-huaweicloud", "repo": "terraform-provider-huaweicloud",
"rev": "v1.62.1", "rev": "v1.63.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -591,13 +591,13 @@
"vendorHash": null "vendorHash": null
}, },
"ibm": { "ibm": {
"hash": "sha256-dYH6D5VKh2wNh8L4SyXELy1zL+fORLeOgXG92XDg4GY=", "hash": "sha256-5esd44JgaarCJK38QyYv+fxMz0+zzivMZD8rqyqrdbo=",
"homepage": "https://registry.terraform.io/providers/IBM-Cloud/ibm", "homepage": "https://registry.terraform.io/providers/IBM-Cloud/ibm",
"owner": "IBM-Cloud", "owner": "IBM-Cloud",
"repo": "terraform-provider-ibm", "repo": "terraform-provider-ibm",
"rev": "v1.63.0", "rev": "v1.64.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-SlUzByF0tke5YtMflOzpYfguZlNe8qeqJqvxCh/TVoY=" "vendorHash": "sha256-YFxD7hvKTXdaQ/+/Oiws/i6TipbFaAQ0Ah1arGK7JUo="
}, },
"icinga2": { "icinga2": {
"hash": "sha256-Y/Oq0aTzP+oSKPhHiHY9Leal4HJJm7TNDpcdqkUsCmk=", "hash": "sha256-Y/Oq0aTzP+oSKPhHiHY9Leal4HJJm7TNDpcdqkUsCmk=",
@ -636,13 +636,13 @@
"vendorHash": "sha256-NEGjgtrn6ZowqSF6NAK1NnSjYVUvfWuH/4R5ZPdTZSs=" "vendorHash": "sha256-NEGjgtrn6ZowqSF6NAK1NnSjYVUvfWuH/4R5ZPdTZSs="
}, },
"kafka": { "kafka": {
"hash": "sha256-BS15vAQeWAYPaF7i4xpFPv7Ni+tF4LFu8k/woVvQNF4=", "hash": "sha256-bkZfgA/PgLWC3YXrIgoF2YRgOFQhoT+Seeifg1GvVFY=",
"homepage": "https://registry.terraform.io/providers/Mongey/kafka", "homepage": "https://registry.terraform.io/providers/Mongey/kafka",
"owner": "Mongey", "owner": "Mongey",
"repo": "terraform-provider-kafka", "repo": "terraform-provider-kafka",
"rev": "v0.7.0", "rev": "v0.7.1",
"spdx": "MIT", "spdx": "MIT",
"vendorHash": "sha256-H35qqnWovPgf1t9DlxnPhDg2uWEKTWR3KcLtDum/Qc4=" "vendorHash": "sha256-Adfz3r3xWY7a4u9/m6a1rvQYGq+E8Q5pAuS/uMgZRQM="
}, },
"kafka-connect": { "kafka-connect": {
"hash": "sha256-PiSVfzNPEXAgONb/eaVAN4yPudn5glcHL0BLqE5PWsw=", "hash": "sha256-PiSVfzNPEXAgONb/eaVAN4yPudn5glcHL0BLqE5PWsw=",
@ -681,13 +681,13 @@
"vendorHash": "sha256-mVC3Uf+4zWM7lXHXOfVI+okXI8gP1W5VyZyH+qVNX7o=" "vendorHash": "sha256-mVC3Uf+4zWM7lXHXOfVI+okXI8gP1W5VyZyH+qVNX7o="
}, },
"launchdarkly": { "launchdarkly": {
"hash": "sha256-IuoFMp0NViuwwgOlfvoReodPhOJR0+YyJDI/vjN52jQ=", "hash": "sha256-C+RGrw+XAaAekKkwvf5gtoiXghSJuByUJvyKgUt/DSE=",
"homepage": "https://registry.terraform.io/providers/launchdarkly/launchdarkly", "homepage": "https://registry.terraform.io/providers/launchdarkly/launchdarkly",
"owner": "launchdarkly", "owner": "launchdarkly",
"repo": "terraform-provider-launchdarkly", "repo": "terraform-provider-launchdarkly",
"rev": "v2.18.1", "rev": "v2.18.2",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-JbrecA5pNIifikBHwqFL72hRfRFHHl29mFKE4nDdbkY=" "vendorHash": "sha256-NCyEU30hw1aWB7T8sM3lrJEUtPdWlbCGdWDaRgjeJdM="
}, },
"libvirt": { "libvirt": {
"hash": "sha256-yGlNBbixrQxjh7zgZoK3YXpUmr1vrLiLZhKpXvQULYg=", "hash": "sha256-yGlNBbixrQxjh7zgZoK3YXpUmr1vrLiLZhKpXvQULYg=",
@ -699,13 +699,13 @@
"vendorHash": "sha256-K/PH8DAi6Wj+isPx9xefQcLPKnrimfItZFSPfktTias=" "vendorHash": "sha256-K/PH8DAi6Wj+isPx9xefQcLPKnrimfItZFSPfktTias="
}, },
"linode": { "linode": {
"hash": "sha256-rk1fUC+++pXmYVL1IgR5rT77pere+j51n9kdzaDWKgc=", "hash": "sha256-BZoMNx0a+dXMpY/YaYKEL3dQonGELlNzDVtOq7Z4Yfk=",
"homepage": "https://registry.terraform.io/providers/linode/linode", "homepage": "https://registry.terraform.io/providers/linode/linode",
"owner": "linode", "owner": "linode",
"repo": "terraform-provider-linode", "repo": "terraform-provider-linode",
"rev": "v2.17.0", "rev": "v2.18.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-8vmorWsrZLJo3lKN74Bt+V8xKPOe389FZ2SjvxYfvtI=" "vendorHash": "sha256-1okvZTEycuGymzcKtUNrxZpiPVoc2ykMxMUbq8mgEOw="
}, },
"linuxbox": { "linuxbox": {
"hash": "sha256-MzasMVtXO7ZeZ+qEx2Z+7881fOIA0SFzSvXVHeEROtg=", "hash": "sha256-MzasMVtXO7ZeZ+qEx2Z+7881fOIA0SFzSvXVHeEROtg=",
@ -744,13 +744,13 @@
"vendorHash": "sha256-yUXxq8NTOv8ZmWp0WiIID2cRU6AZiItIs99uGZpt9dc=" "vendorHash": "sha256-yUXxq8NTOv8ZmWp0WiIID2cRU6AZiItIs99uGZpt9dc="
}, },
"matchbox": { "matchbox": {
"hash": "sha256-vWhdStfwReeD1PHTihBoj4GoKnP85nzNzIV/Tjfcz1M=", "hash": "sha256-B1PxdbqXrB1ioB5utI4LI6rkhwHmaAiYkSxRAcjJnAA=",
"homepage": "https://registry.terraform.io/providers/poseidon/matchbox", "homepage": "https://registry.terraform.io/providers/poseidon/matchbox",
"owner": "poseidon", "owner": "poseidon",
"repo": "terraform-provider-matchbox", "repo": "terraform-provider-matchbox",
"rev": "v0.5.2", "rev": "v0.5.4",
"spdx": "Apache-2.0", "spdx": "Apache-2.0",
"vendorHash": "sha256-coARdDQVs38dVdUH/fsoGVlwh3wYr3aTxKp/FpUzhis=" "vendorHash": "sha256-L1wufPa7LPPyOPTL+jFQgiWzJoJYS+fCdw3N0KZqKtc="
}, },
"metal": { "metal": {
"hash": "sha256-1HTSDVMk2VhoYRLInrBK3bDuYU0SwyhBV1p5A2tlU/I=", "hash": "sha256-1HTSDVMk2VhoYRLInrBK3bDuYU0SwyhBV1p5A2tlU/I=",
@ -762,13 +762,13 @@
"vendorHash": "sha256-QxbZv6YMa5/I4bTeQBNdmG3EKtLEmstnH7HMiZzFJrI=" "vendorHash": "sha256-QxbZv6YMa5/I4bTeQBNdmG3EKtLEmstnH7HMiZzFJrI="
}, },
"migadu": { "migadu": {
"hash": "sha256-jLOXQmsAAG78eNAlpo6Ge5fdhUHeGevVm079H1gE5/s=", "hash": "sha256-qP862jjbYks+6DR5eGzVlCvYyfupejqaxD2CgwSZxdQ=",
"homepage": "https://registry.terraform.io/providers/metio/migadu", "homepage": "https://registry.terraform.io/providers/metio/migadu",
"owner": "metio", "owner": "metio",
"repo": "terraform-provider-migadu", "repo": "terraform-provider-migadu",
"rev": "2024.3.21", "rev": "2024.4.4",
"spdx": "0BSD", "spdx": "0BSD",
"vendorHash": "sha256-ecoy0nJPuBsoVkYXNkrURgmDiaZEplkD1Zv4TEMuyU0=" "vendorHash": "sha256-r5fOj6YsW8ggoEbfyLvJDmD9P8WQ1J5K7ztg/NYm6y4="
}, },
"minio": { "minio": {
"hash": "sha256-dgMK61jFXnOvE11FIoIJfFN1zb+N9HrFZ/WtQqwktbw=", "hash": "sha256-dgMK61jFXnOvE11FIoIJfFN1zb+N9HrFZ/WtQqwktbw=",
@ -780,13 +780,13 @@
"vendorHash": "sha256-Uxexx5sK6D+EEEPWLnWFE0HPG1RKUsYnSJ/1bV9JBkw=" "vendorHash": "sha256-Uxexx5sK6D+EEEPWLnWFE0HPG1RKUsYnSJ/1bV9JBkw="
}, },
"mongodbatlas": { "mongodbatlas": {
"hash": "sha256-1IHiwMvME+kTbOSBNHBpDifzORf4li8WUxvtMu2uQiI=", "hash": "sha256-xdPR0wyDEsyJCzRcGuDNhD4K+19KhZXsGxygoDadfvY=",
"homepage": "https://registry.terraform.io/providers/mongodb/mongodbatlas", "homepage": "https://registry.terraform.io/providers/mongodb/mongodbatlas",
"owner": "mongodb", "owner": "mongodb",
"repo": "terraform-provider-mongodbatlas", "repo": "terraform-provider-mongodbatlas",
"rev": "v1.15.2", "rev": "v1.15.3",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-fVDjhXRbYt845ZhFY85lCpXubKINBeMZg0U3K5RbnDk=" "vendorHash": "sha256-1cbmarVuMIkLuEBo2+O14wHqV0hbT/Jxh0sWRvfnDoE="
}, },
"namecheap": { "namecheap": {
"hash": "sha256-g3i7jZBOl2umsyRk1z7Radv8a9Ry6oQ8oorv3YbY7Xo=", "hash": "sha256-g3i7jZBOl2umsyRk1z7Radv8a9Ry6oQ8oorv3YbY7Xo=",
@ -807,13 +807,13 @@
"vendorHash": null "vendorHash": null
}, },
"newrelic": { "newrelic": {
"hash": "sha256-4/MFR8AJanto5OuY0J3Yce3zI62D5bx2UklrTccpvP0=", "hash": "sha256-npQn5eN3dGXy7bgVXpobn1HyVemrdxD5sASubbpY7qs=",
"homepage": "https://registry.terraform.io/providers/newrelic/newrelic", "homepage": "https://registry.terraform.io/providers/newrelic/newrelic",
"owner": "newrelic", "owner": "newrelic",
"repo": "terraform-provider-newrelic", "repo": "terraform-provider-newrelic",
"rev": "v3.32.0", "rev": "v3.34.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-QluXNbTBc/EXCO3PmnBOSwSERK3t5NhCS4Jnz5hU97k=" "vendorHash": "sha256-Riutsej7+FISe0wFH9lGghyR5D6Jv3WbP3a33pqVBzU="
}, },
"nomad": { "nomad": {
"hash": "sha256-+S78qH7xMvJEGvgTRlxADNZI24PNgqCj1xgmIl4Oif4=", "hash": "sha256-+S78qH7xMvJEGvgTRlxADNZI24PNgqCj1xgmIl4Oif4=",
@ -825,13 +825,13 @@
"vendorHash": "sha256-f/L9ZkirFIb+Yu2H4wz9wCb65NCC0TsmEnZPCI4Z6gw=" "vendorHash": "sha256-f/L9ZkirFIb+Yu2H4wz9wCb65NCC0TsmEnZPCI4Z6gw="
}, },
"ns1": { "ns1": {
"hash": "sha256-qk+JfmWjaK29KqUVN2K01AEU+zJAQGeJhsnu3BBNHqI=", "hash": "sha256-ZGqHIzK7tv7WeKHE8w11lOfDeWZqhi/88DOHcDaYHNg=",
"homepage": "https://registry.terraform.io/providers/ns1-terraform/ns1", "homepage": "https://registry.terraform.io/providers/ns1-terraform/ns1",
"owner": "ns1-terraform", "owner": "ns1-terraform",
"repo": "terraform-provider-ns1", "repo": "terraform-provider-ns1",
"rev": "v2.2.0", "rev": "v2.2.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-Fh4RP2Yu3EWD/I8r3I2nEkyQBZdM5SmdX+IcK5B8cb0=" "vendorHash": "sha256-mOxhYVsq/yl1BqMyRWmLM/YrncMTlqiUo4GdAObH3ZU="
}, },
"null": { "null": {
"hash": "sha256-KOwJXGvMc9Xgq4Kbr72aW6RDwzldUrU1C3aDxpKO3qE=", "hash": "sha256-KOwJXGvMc9Xgq4Kbr72aW6RDwzldUrU1C3aDxpKO3qE=",
@ -853,11 +853,11 @@
"vendorHash": "sha256-LRIfxQGwG988HE5fftGl6JmBG7tTknvmgpm4Fu1NbWI=" "vendorHash": "sha256-LRIfxQGwG988HE5fftGl6JmBG7tTknvmgpm4Fu1NbWI="
}, },
"oci": { "oci": {
"hash": "sha256-V3A22EUSmVjglnytaxRL2CCG5DtzKl0J+Xalk96z99o=", "hash": "sha256-dGFpk83154mQlH2iM52mo+208MAU4BYBOOBH529QmRI=",
"homepage": "https://registry.terraform.io/providers/oracle/oci", "homepage": "https://registry.terraform.io/providers/oracle/oci",
"owner": "oracle", "owner": "oracle",
"repo": "terraform-provider-oci", "repo": "terraform-provider-oci",
"rev": "v5.34.0", "rev": "v5.36.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -898,13 +898,13 @@
"vendorHash": "sha256-WHsYDcvLE1i+wCHGNF6eE8yVpPbP5SLG7ZK1AL7xMXI=" "vendorHash": "sha256-WHsYDcvLE1i+wCHGNF6eE8yVpPbP5SLG7ZK1AL7xMXI="
}, },
"opentelekomcloud": { "opentelekomcloud": {
"hash": "sha256-rifK2xVnzYQZnDzF4glkpA4w1/rbvuxkas8npJRXqvM=", "hash": "sha256-wRKwrxZbT2z71gVE+O8bLjZmRGsZDhdn4raShedV4kc=",
"homepage": "https://registry.terraform.io/providers/opentelekomcloud/opentelekomcloud", "homepage": "https://registry.terraform.io/providers/opentelekomcloud/opentelekomcloud",
"owner": "opentelekomcloud", "owner": "opentelekomcloud",
"repo": "terraform-provider-opentelekomcloud", "repo": "terraform-provider-opentelekomcloud",
"rev": "v1.36.4", "rev": "v1.36.5",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-4kO4pl1Ssj+lCmImiJQq59J/6rpfuYt/NBDBxJopQdE=" "vendorHash": "sha256-Xlba/lceTt1DPtHZxeIQqbscl+pZl7hw6xJXleXd0r0="
}, },
"opsgenie": { "opsgenie": {
"hash": "sha256-ZssKhfwFrzCjvlebEmKAHWBInN5daVqxbmVFoA92dv8=", "hash": "sha256-ZssKhfwFrzCjvlebEmKAHWBInN5daVqxbmVFoA92dv8=",
@ -925,11 +925,11 @@
"vendorHash": null "vendorHash": null
}, },
"pagerduty": { "pagerduty": {
"hash": "sha256-D1tYsPiozT9FdTL+DKDkjxAByXueyKwBkka3P9xDJLc=", "hash": "sha256-joR7uucMEBbQIuec29m+t3w5W/omgzexg70+Sh2MeBY=",
"homepage": "https://registry.terraform.io/providers/PagerDuty/pagerduty", "homepage": "https://registry.terraform.io/providers/PagerDuty/pagerduty",
"owner": "PagerDuty", "owner": "PagerDuty",
"repo": "terraform-provider-pagerduty", "repo": "terraform-provider-pagerduty",
"rev": "v3.10.0", "rev": "v3.11.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -970,13 +970,13 @@
"vendorHash": null "vendorHash": null
}, },
"project": { "project": {
"hash": "sha256-eXieWiwDzTkOVvrjjnG8i8ke7mMTjluq5zEtiZqfiOA=", "hash": "sha256-U3hm1BqqePffuLBg+U4hgzcZpk+VCLhE5GsnRRYKT30=",
"homepage": "https://registry.terraform.io/providers/jfrog/project", "homepage": "https://registry.terraform.io/providers/jfrog/project",
"owner": "jfrog", "owner": "jfrog",
"repo": "terraform-provider-project", "repo": "terraform-provider-project",
"rev": "v1.5.1", "rev": "v1.5.2",
"spdx": "Apache-2.0", "spdx": "Apache-2.0",
"vendorHash": "sha256-bJ6+i7fZ6PsUcwjwJKiMC10I44bojIifI7eWUhdT1Bw=" "vendorHash": "sha256-2gVJpNRIEO/mTBg3m5CoxpeC2U09hnV9bPi5537f1Mk="
}, },
"proxmox": { "proxmox": {
"hash": "sha256-ikXLLNoAjrnGGGI3fHTKFXm8YwqNazE/U39JTjOBsW4=", "hash": "sha256-ikXLLNoAjrnGGGI3fHTKFXm8YwqNazE/U39JTjOBsW4=",
@ -1033,13 +1033,13 @@
"vendorHash": null "vendorHash": null
}, },
"scaleway": { "scaleway": {
"hash": "sha256-3K1BGar+D45nCSQNodJYTp+kP0EdoBzQTOEJ3PQa3t8=", "hash": "sha256-TRmBSATsynbvRg9TC6kYPbzV3Y2X9Kr/3FjyMe7Kj6c=",
"homepage": "https://registry.terraform.io/providers/scaleway/scaleway", "homepage": "https://registry.terraform.io/providers/scaleway/scaleway",
"owner": "scaleway", "owner": "scaleway",
"repo": "terraform-provider-scaleway", "repo": "terraform-provider-scaleway",
"rev": "v2.38.2", "rev": "v2.38.3",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-5otz+3S1o3V+V1SZaFP611AwyCvoPCxCwR2SE3DEw5o=" "vendorHash": "sha256-hUueCEaNuTQFD17eyne7LMUr5dXdycLFJ7/d9AJh3F4="
}, },
"secret": { "secret": {
"hash": "sha256-MmAnA/4SAPqLY/gYcJSTnEttQTsDd2kEdkQjQj6Bb+A=", "hash": "sha256-MmAnA/4SAPqLY/gYcJSTnEttQTsDd2kEdkQjQj6Bb+A=",
@ -1051,13 +1051,13 @@
"vendorHash": null "vendorHash": null
}, },
"selectel": { "selectel": {
"hash": "sha256-p9XH9/sIVyY2f957/8KI91y5GCn1/MEGY+QBsArvYJA=", "hash": "sha256-HgHgZEo6fKxunbE5W8LUVukvmGpEc0bqbpO/ZcdwmTE=",
"homepage": "https://registry.terraform.io/providers/selectel/selectel", "homepage": "https://registry.terraform.io/providers/selectel/selectel",
"owner": "selectel", "owner": "selectel",
"repo": "terraform-provider-selectel", "repo": "terraform-provider-selectel",
"rev": "v4.0.2", "rev": "v4.1.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-FjJosTjFRJnBW22IB9UHfZe9KWrT1h12InyUl0q7a28=" "vendorHash": "sha256-VT9A7x7CAKJjuCXLI42s6WDy/XSME0UYLU6k1YcreU0="
}, },
"sentry": { "sentry": {
"hash": "sha256-sUXOH0cbD5Zf3e4KHLUYM8vu2knJdfIWZ+fq9HMfJ54=", "hash": "sha256-sUXOH0cbD5Zf3e4KHLUYM8vu2knJdfIWZ+fq9HMfJ54=",
@ -1123,13 +1123,13 @@
"vendorHash": "sha256-8W1PK4T98iK1N6EB6AVjvr1P9Ja51+kSOmYAEosxrh8=" "vendorHash": "sha256-8W1PK4T98iK1N6EB6AVjvr1P9Ja51+kSOmYAEosxrh8="
}, },
"spotinst": { "spotinst": {
"hash": "sha256-3/dMhB5SRc1pEsoflaMcNmPn3MjEUZ95aruqwD/Ro0M=", "hash": "sha256-frnDZx02Kmp2C0djkZYfeZ6WsGc9mFUNmpajsfx8FCI=",
"homepage": "https://registry.terraform.io/providers/spotinst/spotinst", "homepage": "https://registry.terraform.io/providers/spotinst/spotinst",
"owner": "spotinst", "owner": "spotinst",
"repo": "terraform-provider-spotinst", "repo": "terraform-provider-spotinst",
"rev": "v1.165.0", "rev": "v1.168.1",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-aKp9DDUU1cZye24jtFqpxA43KJj8CjXFE/+hl1PBH6c=" "vendorHash": "sha256-pf9FsI11CKANee0dM0djLVF1xwztKVlb0bVAkp/6zbc="
}, },
"ssh": { "ssh": {
"hash": "sha256-1UN5QJyjCuxs2vQYlSuz2jsu/HgGTxOoWWRcv4qcwow=", "hash": "sha256-1UN5QJyjCuxs2vQYlSuz2jsu/HgGTxOoWWRcv4qcwow=",
@ -1195,11 +1195,11 @@
"vendorHash": "sha256-2rYaxDDIPH46gXNILnTcHRsChpEd406r4pzWdnHMLNM=" "vendorHash": "sha256-2rYaxDDIPH46gXNILnTcHRsChpEd406r4pzWdnHMLNM="
}, },
"tencentcloud": { "tencentcloud": {
"hash": "sha256-Vk1Jc1zSTKoFlNATlx9i5Pn4EzD/uS+RgmUCooMQVx8=", "hash": "sha256-nYQVrWpCiDTXJ6BA9dwXkslGF/dvlT+E8WBD7By91Cw=",
"homepage": "https://registry.terraform.io/providers/tencentcloudstack/tencentcloud", "homepage": "https://registry.terraform.io/providers/tencentcloudstack/tencentcloud",
"owner": "tencentcloudstack", "owner": "tencentcloudstack",
"repo": "terraform-provider-tencentcloud", "repo": "terraform-provider-tencentcloud",
"rev": "v1.81.83", "rev": "v1.81.86",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": null "vendorHash": null
}, },
@ -1268,31 +1268,31 @@
"vendorHash": null "vendorHash": null
}, },
"utils": { "utils": {
"hash": "sha256-DW2O1tou+HfOgzSca/SS3tFeo0efGt1ATVs5SmwUvmk=", "hash": "sha256-LGjH/nQj18v8qjwJEU1CkrauGqSizsTpl80Q5jNIwUE=",
"homepage": "https://registry.terraform.io/providers/cloudposse/utils", "homepage": "https://registry.terraform.io/providers/cloudposse/utils",
"owner": "cloudposse", "owner": "cloudposse",
"repo": "terraform-provider-utils", "repo": "terraform-provider-utils",
"rev": "1.18.0", "rev": "1.19.2",
"spdx": "Apache-2.0", "spdx": "Apache-2.0",
"vendorHash": "sha256-srhu8iepW/JmPrJ7PuXyk0GEWMwzpNpkny33z7ZdrdM=" "vendorHash": "sha256-QC5EWVSkBDMfLR2f0u0K2LOn6FaHCeFkNJeq8vWQp+o="
}, },
"vault": { "vault": {
"hash": "sha256-jwVc1x2+i4V/0mWRg5+Xpk0ONHC1T55Hof9JOUVAo/s=", "hash": "sha256-Pdh2rudUwOgY292JPPrvxIABHC+/dBYK5iNTaHZ9ed0=",
"homepage": "https://registry.terraform.io/providers/hashicorp/vault", "homepage": "https://registry.terraform.io/providers/hashicorp/vault",
"owner": "hashicorp", "owner": "hashicorp",
"repo": "terraform-provider-vault", "repo": "terraform-provider-vault",
"rev": "v4.1.0", "rev": "v4.2.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-b/1g/1hFbIfzYJ0lQKNzalLkD95LLRgoftuoeDicalE=" "vendorHash": "sha256-b/1g/1hFbIfzYJ0lQKNzalLkD95LLRgoftuoeDicalE="
}, },
"vcd": { "vcd": {
"hash": "sha256-TP9COMofx4c2GZ0dQkfopn4iq8ddfV3WwuNjTu6yQnU=", "hash": "sha256-yywk60Ae1Ch+kuOqoKfAqrOhUAbJVQxA0wJW+CfZ4CY=",
"homepage": "https://registry.terraform.io/providers/vmware/vcd", "homepage": "https://registry.terraform.io/providers/vmware/vcd",
"owner": "vmware", "owner": "vmware",
"repo": "terraform-provider-vcd", "repo": "terraform-provider-vcd",
"rev": "v3.11.0", "rev": "v3.12.0",
"spdx": "MPL-2.0", "spdx": "MPL-2.0",
"vendorHash": "sha256-IqmmlLr+bwfSRJtKbK/fiBdbf2vX61+6h6rZizD1vw8=" "vendorHash": "sha256-53Cj5ooDsg91iiuYzjAt9u9S40g6plcz6lqlnawNFNM="
}, },
"venafi": { "venafi": {
"hash": "sha256-GkbBD6oDtHy18utI2dsDWmVIUiU8bILg6rsXEX7gfbI=", "hash": "sha256-GkbBD6oDtHy18utI2dsDWmVIUiU8bILg6rsXEX7gfbI=",

View File

@ -15,13 +15,13 @@
buildNpmPackage rec { buildNpmPackage rec {
pname = "jitsi-meet-electron"; pname = "jitsi-meet-electron";
version = "2023.11.3"; version = "2024.3.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "jitsi"; owner = "jitsi";
repo = "jitsi-meet-electron"; repo = "jitsi-meet-electron";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-gE5CP0l3SrAHGNS6Hr5/MefTtE86JTmc85CwOmylEpg="; hash = "sha256-BGN+t9Caw5n/NN1E5Oi/ruMLjoVh0jUlpzYR6vodHbw=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [
@ -38,7 +38,7 @@ buildNpmPackage rec {
zlib zlib
]; ];
npmDepsHash = "sha256-JZVJcKzG4X7YIUvIRWZsDQnHx+dNqCj6kFm8mZaSH2k="; npmDepsHash = "sha256-KanG8y+tYzswCCXjSkOlk+p9XKaouP2Z7IhsD5bDtRk=";
makeCacheWritable = true; makeCacheWritable = true;

View File

@ -63,14 +63,14 @@ let
in in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "telegram-desktop"; pname = "telegram-desktop";
version = "4.16.0"; version = "4.16.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "telegramdesktop"; owner = "telegramdesktop";
repo = "tdesktop"; repo = "tdesktop";
rev = "v${version}"; rev = "v${version}";
fetchSubmodules = true; fetchSubmodules = true;
hash = "sha256-llrHN/XCMKwAvbyUZ/92OUjAEOPJKPbDfldVChLZo5k="; hash = "sha256-sb7BpEIjSJS4ntv8s0RSJAj4BhTgHF7fEei5QXl60mA=";
}; };
patches = [ patches = [

View File

@ -24,7 +24,7 @@ let
pname = "forgejo-frontend"; pname = "forgejo-frontend";
inherit (forgejo) src version; inherit (forgejo) src version;
npmDepsHash = "sha256-I7eq9PB2Od7aaji+VrZj05VVCsGtCiXEMy88xrA8Ktg="; npmDepsHash = "sha256-uMPy4cqMDNZTpF+pk7YibXEJO1zxVfwlCeFzGgJBiU0=";
patches = [ patches = [
./package-json-npm-build-frontend.patch ./package-json-npm-build-frontend.patch
@ -39,17 +39,33 @@ let
in in
buildGoModule rec { buildGoModule rec {
pname = "forgejo"; pname = "forgejo";
version = "1.21.8-0"; version = "1.21.10-0";
src = fetchFromGitea { src = fetchFromGitea {
domain = "codeberg.org"; domain = "codeberg.org";
owner = "forgejo"; owner = "forgejo";
repo = "forgejo"; repo = "forgejo";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-nufhGsibpPrGWpVg75Z6qdzlc1K+p36mMjlS2MtsuAI="; hash = "sha256-uCRAT9RiU9S+tP9alNshSQwbUgLmU9wE5HIQ4FPmXVE=";
# Forgejo has multiple different version strings that need to be provided
# via ldflags. main.ForgejoVersion for example is a combination of a
# hardcoded gitea compatibility version string (in the Makefile) and
# git describe and is easiest to get by invoking the Makefile.
# So we do that, store it the src FOD to then extend the ldflags array
# in preConfigure.
# The `echo -e >> Makefile` is temporary and already part of the next
# major release. Furthermore, the ldflags will change next major release
# and need to be updated accordingly.
leaveDotGit = true;
postFetch = ''
cd "$out"
echo -e 'show-version-full:\n\t@echo ''${FORGEJO_VERSION}' >> Makefile
make show-version-full > FULL_VERSION
find "$out" -name .git -print0 | xargs -0 rm -rf
'';
}; };
vendorHash = "sha256-+1apPnqbIfp2Nu1ieI2DdHo4gndZObmcq/Td+ZtkILM="; vendorHash = "sha256-pgUSmM2CxYO8DralWoeR2groQxpxo9WtRcToYeaHXGk=";
subPackages = [ "." ]; subPackages = [ "." ];
@ -76,6 +92,10 @@ buildGoModule rec {
"-X 'main.Tags=${lib.concatStringsSep " " tags}'" "-X 'main.Tags=${lib.concatStringsSep " " tags}'"
]; ];
preConfigure = ''
export ldflags+=" -X code.gitea.io/gitea/routers/api/forgejo/v1.ForgejoVersion=$(cat FULL_VERSION) -X main.ForgejoVersion=$(cat FULL_VERSION)"
'';
preBuild = '' preBuild = ''
go run build/merge-forgejo-locales.go go run build/merge-forgejo-locales.go
''; '';

View File

@ -0,0 +1,29 @@
{ lib, buildGoModule, fetchFromGitHub, olm, config }:
buildGoModule rec {
pname = "mautrix-meta";
version = "0.2.0";
subPackages = [ "." ];
src = fetchFromGitHub {
owner = "mautrix";
repo = "meta";
rev = "v${version}";
hash = "sha256-n0FpEHgnMdg6W5wahIT5HaF9AP/QYlLuUWJS+VrElgg=";
};
buildInputs = [ olm ];
vendorHash = "sha256-GkgIang3/1u0ybznHgK1l84bEiCj6u4qf8G+HgLGr90=";
doCheck = false;
meta = {
homepage = "https://github.com/mautrix/meta";
description = "Matrix <-> Facebook and Mautrix <-> Instagram hybrid puppeting/relaybot bridge";
license = lib.licenses.agpl3Plus;
maintainers = with lib.maintainers; [ rutherther ];
mainProgram = "mautrix-meta";
};
}

View File

@ -6,17 +6,19 @@
python3.pkgs.buildPythonApplication rec { python3.pkgs.buildPythonApplication rec {
pname = "prowler"; pname = "prowler";
version = "3.13.0"; version = "3.14.0";
pyproject = true; pyproject = true;
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "prowler-cloud"; owner = "prowler-cloud";
repo = "prowler"; repo = "prowler";
rev = "refs/tags/${version}"; rev = "refs/tags/${version}";
hash = "sha256-19B6b+xR+f7dIu/6eINsxs7UxuV96QdsNncodC8/N3Q="; hash = "sha256-hQVrKhBgucuZQ2CZKG6VJMsHUGkWNch9em2dRCbEA+A=";
}; };
pythonRelaxDeps = [ pythonRelaxDeps = [
"azure-mgmt-compute"
"azure-mgmt-network"
"azure-mgmt-security" "azure-mgmt-security"
"azure-storage-blob" "azure-storage-blob"
"boto3" "boto3"
@ -24,8 +26,8 @@ python3.pkgs.buildPythonApplication rec {
"google-api-python-client" "google-api-python-client"
"jsonschema" "jsonschema"
"pydantic" "pydantic"
"slack-sdk"
"pydantic" "pydantic"
"slack-sdk"
]; ];
nativeBuildInputs = with python3.pkgs; [ nativeBuildInputs = with python3.pkgs; [
@ -42,7 +44,9 @@ python3.pkgs.buildPythonApplication rec {
azure-identity azure-identity
azure-mgmt-applicationinsights azure-mgmt-applicationinsights
azure-mgmt-authorization azure-mgmt-authorization
azure-mgmt-compute
azure-mgmt-cosmosdb azure-mgmt-cosmosdb
azure-mgmt-network
azure-mgmt-rdbms azure-mgmt-rdbms
azure-mgmt-security azure-mgmt-security
azure-mgmt-sql azure-mgmt-sql

View File

@ -2,7 +2,7 @@
python3Packages.buildPythonApplication rec { python3Packages.buildPythonApplication rec {
pname = "pyprland"; pname = "pyprland";
version = "2.1.1"; version = "2.1.4";
format = "pyproject"; format = "pyproject";
disabled = python3Packages.pythonOlder "3.10"; disabled = python3Packages.pythonOlder "3.10";
@ -11,7 +11,7 @@ python3Packages.buildPythonApplication rec {
owner = "hyprland-community"; owner = "hyprland-community";
repo = "pyprland"; repo = "pyprland";
rev = "refs/tags/${version}"; rev = "refs/tags/${version}";
hash = "sha256-S1kNA70kxLK4ZdhJDXp1RhKsGVTS0k9wLxAtndv/iCo="; hash = "sha256-vko8SY5d537bKnpVeJWM3D4WeYCXAvF6tCzlFjKIZRU=";
}; };
nativeBuildInputs = with python3Packages; [ poetry-core ]; nativeBuildInputs = with python3Packages; [ poetry-core ];

View File

@ -1,26 +1,38 @@
{ lib, python3Packages, fetchPypi }: {
lib,
python3,
fetchPypi,
}:
python3Packages.buildPythonPackage rec { python3.pkgs.buildPythonPackage rec {
pname = "rst2html5"; pname = "rst2html5";
version = "2.0"; version = "2.0.1";
pyproject = true;
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-Ejjja/fm6wXTf9YtjCYZsNDB8X5oAtyPoUIsYFDuZfc="; hash = "sha256-MJmYyF+rAo8vywGizNyIbbCvxDmCYueVoC6pxNDzKuk=";
}; };
buildInputs = with python3Packages; [ build-system = with python3.pkgs; [ poetry-core ];
dependencies = with python3.pkgs; [
beautifulsoup4 beautifulsoup4
docutils docutils
genshi genshi
pygments pygments
]; ];
meta = with lib;{ # Tests are not shipped as PyPI releases
homepage = "https://rst2html5.readthedocs.io/en/latest/"; doCheck = false;
pythonImportsCheck = [ "rst2html5" ];
meta = with lib; {
description = "Converts ReSTructuredText to (X)HTML5"; description = "Converts ReSTructuredText to (X)HTML5";
mainProgram = "rst2html5"; homepage = "https://rst2html5.readthedocs.io/";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ AndersonTorres ]; maintainers = with maintainers; [ AndersonTorres ];
mainProgram = "rst2html5";
}; };
} }

View File

@ -4,6 +4,7 @@
fetchurl, fetchurl,
makeWrapper, makeWrapper,
dotnet-sdk_8, dotnet-sdk_8,
nixosTests,
}: }:
stdenvNoCC.mkDerivation rec { stdenvNoCC.mkDerivation rec {
pname = "technitium-dns-server"; pname = "technitium-dns-server";
@ -35,6 +36,10 @@ stdenvNoCC.mkDerivation rec {
runHook postInstall runHook postInstall
''; '';
passthru.tests = {
inherit (nixosTests) technitium-dns-server;
};
meta = { meta = {
changelog = "https://github.com/TechnitiumSoftware/DnsServer/blob/master/CHANGELOG.md"; changelog = "https://github.com/TechnitiumSoftware/DnsServer/blob/master/CHANGELOG.md";
description = "Authorative and Recursive DNS server for Privacy and Security"; description = "Authorative and Recursive DNS server for Privacy and Security";

View File

@ -42,12 +42,9 @@
}: }:
assert let assert
int = a: if a then 1 else 0;
xor = a: b: ((builtins.bitXor (int a) (int b)) == 1);
in
lib.assertMsg lib.assertMsg
(xor (lib.xor
(gitRelease != null) (gitRelease != null)
(officialRelease != null)) (officialRelease != null))
("must specify `gitRelease` or `officialRelease`" + ("must specify `gitRelease` or `officialRelease`" +

View File

@ -41,12 +41,9 @@
, monorepoSrc ? null , monorepoSrc ? null
}: }:
assert let assert
int = a: if a then 1 else 0;
xor = a: b: ((builtins.bitXor (int a) (int b)) == 1);
in
lib.assertMsg lib.assertMsg
(xor (lib.xor
(gitRelease != null) (gitRelease != null)
(officialRelease != null)) (officialRelease != null))
("must specify `gitRelease` or `officialRelease`" + ("must specify `gitRelease` or `officialRelease`" +

View File

@ -41,12 +41,9 @@
, monorepoSrc ? null , monorepoSrc ? null
}: }:
assert let assert
int = a: if a then 1 else 0;
xor = a: b: ((builtins.bitXor (int a) (int b)) == 1);
in
lib.assertMsg lib.assertMsg
(xor (lib.xor
(gitRelease != null) (gitRelease != null)
(officialRelease != null)) (officialRelease != null))
("must specify `gitRelease` or `officialRelease`" + ("must specify `gitRelease` or `officialRelease`" +

View File

@ -41,12 +41,9 @@
, monorepoSrc ? null , monorepoSrc ? null
}: }:
assert let assert
int = a: if a then 1 else 0;
xor = a: b: ((builtins.bitXor (int a) (int b)) == 1);
in
lib.assertMsg lib.assertMsg
(xor (lib.xor
(gitRelease != null) (gitRelease != null)
(officialRelease != null)) (officialRelease != null))
("must specify `gitRelease` or `officialRelease`" + ("must specify `gitRelease` or `officialRelease`" +

View File

@ -41,12 +41,9 @@
, monorepoSrc ? null , monorepoSrc ? null
}: }:
assert let assert
int = a: if a then 1 else 0;
xor = a: b: ((builtins.bitXor (int a) (int b)) == 1);
in
lib.assertMsg lib.assertMsg
(xor (lib.xor
(gitRelease != null) (gitRelease != null)
(officialRelease != null)) (officialRelease != null))
("must specify `gitRelease` or `officialRelease`" + ("must specify `gitRelease` or `officialRelease`" +

View File

@ -41,12 +41,9 @@
, monorepoSrc ? null , monorepoSrc ? null
}: }:
assert let assert
int = a: if a then 1 else 0;
xor = a: b: ((builtins.bitXor (int a) (int b)) == 1);
in
lib.assertMsg lib.assertMsg
(xor (lib.xor
(gitRelease != null) (gitRelease != null)
(officialRelease != null)) (officialRelease != null))
("must specify `gitRelease` or `officialRelease`" + ("must specify `gitRelease` or `officialRelease`" +

View File

@ -46,12 +46,9 @@
, monorepoSrc ? null , monorepoSrc ? null
}: }:
assert let assert
int = a: if a then 1 else 0;
xor = a: b: ((builtins.bitXor (int a) (int b)) == 1);
in
lib.assertMsg lib.assertMsg
(xor (lib.xor
(gitRelease != null) (gitRelease != null)
(officialRelease != null)) (officialRelease != null))
("must specify `gitRelease` or `officialRelease`" + ("must specify `gitRelease` or `officialRelease`" +

View File

@ -31,7 +31,7 @@
stdenv.mkDerivation (finalAttrs: { stdenv.mkDerivation (finalAttrs: {
pname = "xdg-desktop-portal"; pname = "xdg-desktop-portal";
version = "1.18.2"; version = "1.18.3";
outputs = [ "out" "installedTests" ]; outputs = [ "out" "installedTests" ];
@ -39,7 +39,7 @@ stdenv.mkDerivation (finalAttrs: {
owner = "flatpak"; owner = "flatpak";
repo = "xdg-desktop-portal"; repo = "xdg-desktop-portal";
rev = finalAttrs.version; rev = finalAttrs.version;
hash = "sha256-Pd5IKrVp/OOE10Ozy4R3XbubVc6iz0znG+YB0Uu+68E="; hash = "sha256-VqIQLUAf/n5m1tHCvnlxi0eaLOuG1R44tMFI/Hc992A=";
}; };
patches = [ patches = [

View File

@ -1,20 +1,21 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi buildPythonPackage,
, pythonOlder cython_3,
, libssh expandvars,
, cython fetchPypi,
, wheel libssh,
, setuptools pythonOlder,
, setuptools-scm setuptools,
, toml setuptools-scm,
, expandvars toml,
wheel,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "ansible-pylibssh"; pname = "ansible-pylibssh";
version = "1.1.0"; version = "1.1.0";
format = "pyproject"; pyproject = true;
disabled = pythonOlder "3.6"; disabled = pythonOlder "3.6";
@ -23,31 +24,28 @@ buildPythonPackage rec {
hash = "sha256-spaGux6dYvtUtpOdU6oN7SEn8IgBof2NpQSPvr+Zplg="; hash = "sha256-spaGux6dYvtUtpOdU6oN7SEn8IgBof2NpQSPvr+Zplg=";
}; };
# remove after https://github.com/ansible/pylibssh/pull/502 is merged # Remove after https://github.com/ansible/pylibssh/pull/502 is merged
postPatch = '' postPatch = ''
sed -i "/setuptools_scm_git_archive/d" pyproject.toml sed -i "/setuptools_scm_git_archive/d" pyproject.toml
''; '';
nativeBuildInputs = [ build-system = [
cython cython_3
wheel expandvars
setuptools setuptools
setuptools-scm setuptools-scm
toml toml
expandvars wheel
]; ];
propagatedBuildInputs = [ dependencies = [ libssh ];
libssh
];
pythonImportsCheck = [ pythonImportsCheck = [ "pylibsshext" ];
"pylibsshext"
];
meta = with lib; { meta = with lib; {
description = "Python bindings to client functionality of libssh specific to Ansible use case"; description = "Python bindings to client functionality of libssh specific to Ansible use case";
homepage = "https://github.com/ansible/pylibssh"; homepage = "https://github.com/ansible/pylibssh";
changelog = "https://github.com/ansible/pylibssh/releases/tag/v${version}";
license = licenses.lgpl21Plus; license = licenses.lgpl21Plus;
maintainers = with maintainers; [ geluk ]; maintainers = with maintainers; [ geluk ];
}; };

View File

@ -1,46 +1,35 @@
{ lib {
, buildPythonPackage lib,
, pythonOlder buildPythonPackage,
, fetchPypi crc,
, setuptools-scm fetchPypi,
, numpy pythonOlder,
, pandas setuptools-scm,
, pillow
, crcmod
, openpyxl
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "apycula"; pname = "apycula";
version = "0.11.1"; version = "0.12";
format = "setuptools"; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
src = fetchPypi { src = fetchPypi {
inherit version; inherit version;
pname = "Apycula"; pname = "Apycula";
hash = "sha256-yuDyW1JXavI6U3B3hx3kdHBuVCQd2rJJqgZ0z15ahaw="; hash = "sha256-aF/JVm4d6c631y+RdsCk3pAVSroRBY+lW2wBRvgcQH8=";
}; };
nativeBuildInputs = [ build-system = [ setuptools-scm ];
setuptools-scm
dependencies = [
crc
]; ];
propagatedBuildInputs = [ # Tests require a physical FPGA
numpy
pandas
pillow
crcmod
openpyxl
];
# tests require a physical FPGA
doCheck = false; doCheck = false;
pythonImportsCheck = [ pythonImportsCheck = [ "apycula" ];
"apycula"
];
meta = with lib; { meta = with lib; {
description = "Open Source tools for Gowin FPGAs"; description = "Open Source tools for Gowin FPGAs";

View File

@ -365,14 +365,14 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "boto3-stubs"; pname = "boto3-stubs";
version = "1.34.76"; version = "1.34.78";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-gE/mLhn08SlXhP4ihboCigWy1RC862UyPTfUnt4YPQc="; hash = "sha256-bjW+/VCh1dgpL1VYnHDFvGzOjnUxCoX5FJMRJxqdkU8=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -1,8 +1,9 @@
{ lib {
, buildPythonPackage lib,
, fetchFromGitHub buildPythonPackage,
, pythonOlder fetchFromGitHub,
, setuptools pythonOlder,
setuptools,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
@ -19,16 +20,12 @@ buildPythonPackage rec {
hash = "sha256-n1Ps0CybeLeDR5E4UnqmSkbFe0SXyplomEGDchAweSY="; hash = "sha256-n1Ps0CybeLeDR5E4UnqmSkbFe0SXyplomEGDchAweSY=";
}; };
nativeBuildInputs = [ build-system = [ setuptools ];
setuptools
];
# avoid circular dependency with xmlschema which directly depends on this # avoid circular dependency with xmlschema which directly depends on this
doCheck = false; doCheck = false;
pythonImportsCheck = [ pythonImportsCheck = [ "elementpath" ];
"elementpath"
];
meta = with lib; { meta = with lib; {
description = "XPath 1.0/2.0 parsers and selectors for ElementTree and lxml"; description = "XPath 1.0/2.0 parsers and selectors for ElementTree and lxml";

View File

@ -1,23 +1,24 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi buildPythonPackage,
, poetry-core fetchPypi,
, pythonOlder poetry-core,
, aiohttp pythonOlder,
, dataclasses-json aiohttp,
, langchain-core dataclasses-json,
, langsmith langchain-core,
, numpy langsmith,
, pyyaml numpy,
, requests pyyaml,
, sqlalchemy requests,
, tenacity sqlalchemy,
, typer tenacity,
typer,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "langchain-community"; pname = "langchain-community";
version = "0.0.29"; version = "0.0.31";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -25,14 +26,12 @@ buildPythonPackage rec {
src = fetchPypi { src = fetchPypi {
pname = "langchain_community"; pname = "langchain_community";
inherit version; inherit version;
hash = "sha256-2IEH+vqf4sVzPaljDGjZ7lHNM7HIiklQ56LZo49+eqM="; hash = "sha256-mpcLwrtZu0wgS2ltjGLCU09t2zEAUAXMG31/k05Ypfw=";
}; };
nativeBuildInputs = [ build-system = [ poetry-core ];
poetry-core
];
propagatedBuildInputs = [ dependencies = [
aiohttp aiohttp
dataclasses-json dataclasses-json
langchain-core langchain-core
@ -45,9 +44,7 @@ buildPythonPackage rec {
]; ];
passthru.optional-dependencies = { passthru.optional-dependencies = {
cli = [ cli = [ typer ];
typer
];
}; };
pythonImportsCheck = [ "langchain_community" ]; pythonImportsCheck = [ "langchain_community" ];

View File

@ -1,22 +1,23 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi anyio,
, pythonOlder buildPythonPackage,
, poetry-core fetchPypi,
, anyio jsonpatch,
, jsonpatch langsmith,
, langsmith packaging,
, packaging poetry-core,
, pydantic pydantic,
, pythonRelaxDepsHook pythonOlder,
, pyyaml pythonRelaxDepsHook,
, requests pyyaml,
, tenacity requests,
tenacity,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "langchain-core"; pname = "langchain-core";
version = "0.1.36"; version = "0.1.40";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -24,7 +25,7 @@ buildPythonPackage rec {
src = fetchPypi { src = fetchPypi {
pname = "langchain_core"; pname = "langchain_core";
inherit version; inherit version;
hash = "sha256-qiQyNwyj0qXW3RSoEKpkiL8vYi/3oKPcMPbg7Z1/X6g="; hash = "sha256-NMBvwObTU0tzjGP4VANEa0vnEWFmW34JH5uxnJFOwQA=";
}; };
pythonRelaxDeps = [ pythonRelaxDeps = [
@ -32,12 +33,11 @@ buildPythonPackage rec {
"packaging" "packaging"
]; ];
nativeBuildInputs = [ build-system = [ poetry-core ];
poetry-core
pythonRelaxDepsHook
];
propagatedBuildInputs = [ nativeBuildInputs = [ pythonRelaxDepsHook ];
dependencies = [
anyio anyio
jsonpatch jsonpatch
langsmith langsmith
@ -48,9 +48,7 @@ buildPythonPackage rec {
tenacity tenacity
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "langchain_core" ];
"langchain_core"
];
# PyPI source does not have tests # PyPI source does not have tests
doCheck = false; doCheck = false;

View File

@ -1,58 +1,57 @@
{ lib {
, bash lib,
, buildPythonPackage aiohttp,
, fetchFromGitHub async-timeout,
, pythonOlder azure-core,
, poetry-core azure-cosmos,
, aiohttp azure-identity,
, async-timeout bash,
, dataclasses-json buildPythonPackage,
, jsonpatch chardet,
, langsmith clarifai,
, langchain-core cohere,
, langchain-community dataclasses-json,
, langchain-text-splitters esprima,
, numpy fetchFromGitHub,
, pydantic freezegun,
, pyyaml huggingface-hub,
, requests jsonpatch,
, sqlalchemy langchain-community,
, tenacity langchain-core,
# optional dependencies langchain-text-splitters,
, azure-core langsmith,
, azure-cosmos lark,
, azure-identity manifest-ml,
, chardet nlpcloud,
, clarifai numpy,
, cohere openai,
, esprima pandas,
, huggingface-hub poetry-core,
, lark pydantic,
, manifest-ml pytest-asyncio,
, nlpcloud pytest-mock,
, openai pytest-socket,
, qdrant-client pytestCheckHook,
, sentence-transformers pythonOlder,
, tiktoken pyyaml,
, torch qdrant-client,
, transformers requests-mock,
, typer requests,
# test dependencies responses,
, freezegun sentence-transformers,
, pandas sqlalchemy,
, pytest-asyncio syrupy,
, pytest-mock tenacity,
, pytest-socket tiktoken,
, pytestCheckHook toml,
, requests-mock torch,
, responses transformers,
, syrupy typer,
, toml
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "langchain"; pname = "langchain";
version = "0.1.13"; version = "0.1.14";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -61,20 +60,16 @@ buildPythonPackage rec {
owner = "langchain-ai"; owner = "langchain-ai";
repo = "langchain"; repo = "langchain";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-tBEO0GOY1cqO5FOYnBXAOSupSRhcoI9u4Nu4FieId74="; hash = "sha256-wV6QFeJ/kV0nDVlA2qsJ9p1n3Yxy8Q/NZ1IX8cFtzcg=";
}; };
sourceRoot = "${src.name}/libs/langchain"; sourceRoot = "${src.name}/libs/langchain";
nativeBuildInputs = [ build-system = [ poetry-core ];
poetry-core
];
buildInputs = [ buildInputs = [ bash ];
bash
];
propagatedBuildInputs = [ dependencies = [
aiohttp aiohttp
dataclasses-json dataclasses-json
jsonpatch jsonpatch
@ -88,9 +83,7 @@ buildPythonPackage rec {
requests requests
sqlalchemy sqlalchemy
tenacity tenacity
] ++ lib.optionals (pythonOlder "3.11") [ ] ++ lib.optionals (pythonOlder "3.11") [ async-timeout ];
async-timeout
];
passthru.optional-dependencies = { passthru.optional-dependencies = {
llms = [ llms = [
@ -104,31 +97,19 @@ buildPythonPackage rec {
torch torch
transformers transformers
]; ];
qdrant = [ qdrant = [ qdrant-client ];
qdrant-client
];
openai = [ openai = [
openai openai
tiktoken tiktoken
]; ];
text_helpers = [ text_helpers = [ chardet ];
chardet clarifai = [ clarifai ];
]; cohere = [ cohere ];
clarifai = [
clarifai
];
cohere = [
cohere
];
docarray = [ docarray = [
# docarray # docarray
]; ];
embeddings = [ embeddings = [ sentence-transformers ];
sentence-transformers javascript = [ esprima ];
];
javascript = [
esprima
];
azure = [ azure = [
azure-identity azure-identity
azure-cosmos azure-cosmos
@ -140,11 +121,8 @@ buildPythonPackage rec {
# azure-search-documents # azure-search-documents
# azure-ai-textanalytics # azure-ai-textanalytics
]; ];
all = [ all = [ ];
]; cli = [ typer ];
cli = [
typer
];
}; };
nativeCheckInputs = [ nativeCheckInputs = [
@ -162,33 +140,34 @@ buildPythonPackage rec {
]; ];
pytestFlagsArray = [ pytestFlagsArray = [
# integration_tests have many network, db access and require `OPENAI_API_KEY`, etc. # integration_tests require network access, database access and require `OPENAI_API_KEY`, etc.
"tests/unit_tests" "tests/unit_tests"
"--only-core" "--only-core"
]; ];
disabledTests = [ disabledTests = [
# these tests have db access # These tests have database access
"test_table_info" "test_table_info"
"test_sql_database_run" "test_sql_database_run"
# These tests have network access
# these tests have network access
"test_socket_disabled" "test_socket_disabled"
"test_openai_agent_with_streaming"
# this test may require a specific version of langchain-community "test_openai_agent_tools_agent"
# This test may require a specific version of langchain-community
"test_compatible_vectorstore_documentation" "test_compatible_vectorstore_documentation"
# AssertionErrors
"test_callback_handlers"
"test_generic_fake_chat_model"
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "langchain" ];
"langchain"
];
meta = with lib; { meta = with lib; {
description = "Building applications with LLMs through composability"; description = "Building applications with LLMs through composability";
mainProgram = "langchain-server";
homepage = "https://github.com/langchain-ai/langchain"; homepage = "https://github.com/langchain-ai/langchain";
changelog = "https://github.com/langchain-ai/langchain/releases/tag/v${version}"; changelog = "https://github.com/langchain-ai/langchain/releases/tag/v${version}";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ natsukium ]; maintainers = with maintainers; [ natsukium ];
mainProgram = "langchain-server";
}; };
} }

View File

@ -1,21 +1,22 @@
{ lib {
, attr lib,
, buildPythonPackage attr,
, fetchFromGitHub buildPythonPackage,
, freezegun fetchFromGitHub,
, orjson freezegun,
, poetry-core orjson,
, pydantic poetry-core,
, pytest-asyncio pydantic,
, pytestCheckHook pytest-asyncio,
, pythonOlder pytestCheckHook,
, pythonRelaxDepsHook pythonOlder,
, requests pythonRelaxDepsHook,
requests,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "langsmith"; pname = "langsmith";
version = "0.1.38"; version = "0.1.40";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -24,14 +25,12 @@ buildPythonPackage rec {
owner = "langchain-ai"; owner = "langchain-ai";
repo = "langsmith-sdk"; repo = "langsmith-sdk";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-hK9zPEmO0LaRnbLTbc9ABE9a7UAZU9yZZUswu955CJU="; hash = "sha256-LdHCTPFrIhIOWV4m9pJx7GWQX1Xs6ajaXMgPFO+JQ8w=";
}; };
sourceRoot = "${src.name}/python"; sourceRoot = "${src.name}/python";
pythonRelaxDeps = [ pythonRelaxDeps = [ "orjson" ];
"orjson"
];
build-system = [ build-system = [
poetry-core poetry-core
@ -59,7 +58,7 @@ buildPythonPackage rec {
"test_as_runnable_batch" "test_as_runnable_batch"
"test_as_runnable_async" "test_as_runnable_async"
"test_as_runnable_async_batch" "test_as_runnable_async_batch"
# requires git repo # Test requires git repo
"test_git_info" "test_git_info"
# Tests require OpenAI API key # Tests require OpenAI API key
"test_chat_async_api" "test_chat_async_api"
@ -74,18 +73,16 @@ buildPythonPackage rec {
"tests/unit_tests/test_client.py" "tests/unit_tests/test_client.py"
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "langsmith" ];
"langsmith"
];
__darwinAllowLocalNetworking = true; __darwinAllowLocalNetworking = true;
meta = with lib; { meta = with lib; {
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform"; description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform";
mainProgram = "langsmith";
homepage = "https://github.com/langchain-ai/langsmith-sdk"; homepage = "https://github.com/langchain-ai/langsmith-sdk";
changelog = "https://github.com/langchain-ai/langsmith-sdk/releases/tag/v${version}"; changelog = "https://github.com/langchain-ai/langsmith-sdk/releases/tag/v${version}";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ natsukium ]; maintainers = with maintainers; [ natsukium ];
mainProgram = "langsmith";
}; };
} }

View File

@ -1,16 +1,17 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi buildPythonPackage,
, llama-index-core fetchPypi,
, llama-index-llms-openai llama-index-core,
, poetry-core llama-index-llms-openai,
, pythonOlder poetry-core,
, pythonRelaxDepsHook pythonOlder,
pythonRelaxDepsHook,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "llama-index-agent-openai"; pname = "llama-index-agent-openai";
version = "0.2.1"; version = "0.2.2";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -18,12 +19,10 @@ buildPythonPackage rec {
src = fetchPypi { src = fetchPypi {
pname = "llama_index_agent_openai"; pname = "llama_index_agent_openai";
inherit version; inherit version;
hash = "sha256-ydCixD0vdSuA99PdflbhEsSd3b0Gl0lzFTz9uTdLYrQ="; hash = "sha256-EgY92TLHQBV5b5c5hsxS14P1H9o45OrXKlbQ/RlZJe4=";
}; };
pythonRelaxDeps = [ pythonRelaxDeps = [ "llama-index-llms-openai" ];
"llama-index-llms-openai"
];
build-system = [ build-system = [
poetry-core poetry-core
@ -35,9 +34,7 @@ buildPythonPackage rec {
llama-index-llms-openai llama-index-llms-openai
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "llama_index.agent.openai" ];
"llama_index.agent.openai"
];
meta = with lib; { meta = with lib; {
description = "LlamaIndex Agent Integration for OpenAI"; description = "LlamaIndex Agent Integration for OpenAI";

View File

@ -1,18 +1,19 @@
{ lib {
, asyncpg lib,
, buildPythonPackage asyncpg,
, fetchPypi buildPythonPackage,
, llama-index-core fetchPypi,
, pgvector llama-index-core,
, poetry-core pgvector,
, psycopg2 poetry-core,
, pythonRelaxDepsHook psycopg2,
, pythonOlder pythonRelaxDepsHook,
pythonOlder,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "llama-index-vector-stores-postgres"; pname = "llama-index-vector-stores-postgres";
version = "0.1.4.post1"; version = "0.1.5";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -20,12 +21,10 @@ buildPythonPackage rec {
src = fetchPypi { src = fetchPypi {
pname = "llama_index_vector_stores_postgres"; pname = "llama_index_vector_stores_postgres";
inherit version; inherit version;
hash = "sha256-E75oSh9MH8aX00y//jhNbehqYdIm5HfEjb5Swn7J/cQ="; hash = "sha256-9jE+1Gbx2y/CSqkpSfuYqgyX49yZwhwmJbiG/EHwTLw=";
}; };
pythonRemoveDeps = [ pythonRemoveDeps = [ "psycopg2-binary" ];
"psycopg2-binary"
];
build-system = [ build-system = [
poetry-core poetry-core
@ -39,9 +38,7 @@ buildPythonPackage rec {
psycopg2 psycopg2
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "llama_index.vector_stores.postgres" ];
"llama_index.vector_stores.postgres"
];
meta = with lib; { meta = with lib; {
description = "LlamaIndex Vector Store Integration for Postgres"; description = "LlamaIndex Vector Store Integration for Postgres";

View File

@ -1,16 +1,17 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi buildPythonPackage,
, llama-index-core fetchPypi,
, qdrant-client llama-index-core,
, poetry-core qdrant-client,
, grpcio poetry-core,
, pythonOlder grpcio,
pythonOlder,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "llama-index-vector-stores-qdrant"; pname = "llama-index-vector-stores-qdrant";
version = "0.1.5"; version = "0.1.6";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -18,12 +19,10 @@ buildPythonPackage rec {
src = fetchPypi { src = fetchPypi {
pname = "llama_index_vector_stores_qdrant"; pname = "llama_index_vector_stores_qdrant";
inherit version; inherit version;
hash = "sha256-Q4+ehywPz+jrA36AtU9yiicRr2nU6BCO6Y42j0SKPdI="; hash = "sha256-MKmtxcHUqF3CzakGNXxvXy3jemoJNbdkCaqrgZ5Rtyo=";
}; };
build-system = [ build-system = [ poetry-core ];
poetry-core
];
dependencies = [ dependencies = [
grpcio grpcio
@ -31,9 +30,7 @@ buildPythonPackage rec {
qdrant-client qdrant-client
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "llama_index.vector_stores.qdrant" ];
"llama_index.vector_stores.qdrant"
];
meta = with lib; { meta = with lib; {
description = "LlamaIndex Vector Store Integration for Qdrant"; description = "LlamaIndex Vector Store Integration for Qdrant";

View File

@ -1,15 +1,16 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi buildPythonPackage,
, httpx fetchPypi,
, poetry-core httpx,
, pydantic poetry-core,
, pythonOlder pydantic,
pythonOlder,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "llamaindex-py-client"; pname = "llamaindex-py-client";
version = "0.1.15"; version = "0.1.16";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -17,14 +18,12 @@ buildPythonPackage rec {
src = fetchPypi { src = fetchPypi {
pname = "llamaindex_py_client"; pname = "llamaindex_py_client";
inherit version; inherit version;
hash = "sha256-x84mhVupdhU7tAFXw8GUIjxrdReZNbmI3UvWo/6Dqss="; hash = "sha256-6Zu8CFXmyqp166IZzbPPbJQ66U+hXMu2ijoI1FL9Y4A=";
}; };
nativeBuildInputs = [ build-system = [ poetry-core ];
poetry-core
];
propagatedBuildInputs = [ dependencies = [
httpx httpx
pydantic pydantic
]; ];
@ -32,9 +31,7 @@ buildPythonPackage rec {
# Module has no tests # Module has no tests
doCheck = false; doCheck = false;
pythonImportsCheck = [ pythonImportsCheck = [ "llama_index_client" ];
"llama_index_client"
];
meta = with lib; { meta = with lib; {
description = "Client for LlamaIndex"; description = "Client for LlamaIndex";

View File

@ -1,32 +1,31 @@
{ lib {
, buildPythonPackage lib,
, fetchFromGitHub anyio,
, pythonOlder buildPythonPackage,
, hatchling cached-property,
# propagated dirty-equals,
, httpx distro,
, pydantic fetchFromGitHub,
, typing-extensions hatch-fancy-pypi-readme,
, anyio hatchling,
, distro httpx,
, sniffio numpy,
, cached-property pandas,
, tqdm pandas-stubs,
# optional pydantic,
, numpy pytest-asyncio,
, pandas pytest-mock,
, pandas-stubs pytestCheckHook,
# tests pythonOlder,
, pytestCheckHook respx,
, pytest-asyncio sniffio,
, pytest-mock tqdm,
, respx typing-extensions,
, dirty-equals
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "openai"; pname = "openai";
version = "1.13.3"; version = "1.16.2";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.7.1"; disabled = pythonOlder "3.7.1";
@ -35,14 +34,15 @@ buildPythonPackage rec {
owner = "openai"; owner = "openai";
repo = "openai-python"; repo = "openai-python";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-8SHXUrPLZ7lgvB0jqZlcvKq5Zv2d2UqXjJpgiBpR8P8="; hash = "sha256-7JWwEoVEdAy+tPcaYUPN7xA62Egzlv/2NNcDqvmDYh4=";
}; };
nativeBuildInputs = [ build-system = [
hatchling hatchling
hatch-fancy-pypi-readme
]; ];
propagatedBuildInputs = [ dependencies = [
httpx httpx
pydantic pydantic
typing-extensions typing-extensions
@ -50,9 +50,7 @@ buildPythonPackage rec {
distro distro
sniffio sniffio
tqdm tqdm
] ++ lib.optionals (pythonOlder "3.8") [ ] ++ lib.optionals (pythonOlder "3.8") [ cached-property ];
cached-property
];
passthru.optional-dependencies = { passthru.optional-dependencies = {
datalib = [ datalib = [
@ -62,9 +60,7 @@ buildPythonPackage rec {
]; ];
}; };
pythonImportsCheck = [ pythonImportsCheck = [ "openai" ];
"openai"
];
nativeCheckInputs = [ nativeCheckInputs = [
pytestCheckHook pytestCheckHook
@ -75,7 +71,8 @@ buildPythonPackage rec {
]; ];
pytestFlagsArray = [ pytestFlagsArray = [
"-W" "ignore::DeprecationWarning" "-W"
"ignore::DeprecationWarning"
]; ];
disabledTests = [ disabledTests = [
@ -94,10 +91,10 @@ buildPythonPackage rec {
meta = with lib; { meta = with lib; {
description = "Python client library for the OpenAI API"; description = "Python client library for the OpenAI API";
mainProgram = "openai";
homepage = "https://github.com/openai/openai-python"; homepage = "https://github.com/openai/openai-python";
changelog = "https://github.com/openai/openai-python/releases/tag/v${version}"; changelog = "https://github.com/openai/openai-python/releases/tag/v${version}";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ malo ]; maintainers = with maintainers; [ malo ];
mainProgram = "openai";
}; };
} }

View File

@ -1,37 +1,34 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi buildPythonPackage,
, pythonOlder fetchPypi,
, setuptools pythonOlder,
setuptools,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "peaqevcore"; pname = "peaqevcore";
version = "19.7.12"; version = "19.7.14";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-/oo24hOH2aIXZH0CwmgTNIvA2MJWvOR084rZEOdldGM="; hash = "sha256-yUGvY5sjt2eXWpu/wSWjxpDpwBEJoZg3nI28QbdfiII=";
}; };
postPatch = '' postPatch = ''
sed -i "/extras_require/d" setup.py sed -i "/extras_require/d" setup.py
''; '';
build-system = [ build-system = [ setuptools ];
setuptools
];
# Tests are not shipped and source is not tagged # Tests are not shipped and source is not tagged
# https://github.com/elden1337/peaqev-core/issues/4 # https://github.com/elden1337/peaqev-core/issues/4
doCheck = false; doCheck = false;
pythonImportsCheck = [ pythonImportsCheck = [ "peaqevcore" ];
"peaqevcore"
];
meta = with lib; { meta = with lib; {
description = "Library for interacting with Peaqev car charging"; description = "Library for interacting with Peaqev car charging";

View File

@ -1,26 +1,29 @@
{ lib {
, stdenv lib,
, buildPythonPackage stdenv,
, fetchFromGitHub buildPythonPackage,
, fetchpatch fetchFromGitHub,
, pydicom fetchpatch,
, pyfakefs pydicom,
, pytest7CheckHook pyfakefs,
, sqlalchemy pytestCheckHook,
, pythonOlder pythonAtLeast,
pythonOlder,
setuptools,
sqlalchemy,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "pynetdicom"; pname = "pynetdicom";
version = "2.0.2"; version = "2.0.2";
format = "setuptools"; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "pydicom"; owner = "pydicom";
repo = pname; repo = "pynetdicom";
rev = "v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-/JWQUtFBW4uqCbs/nUxj1pRBfTCXV4wcqTkqvzpdFrM="; hash = "sha256-/JWQUtFBW4uqCbs/nUxj1pRBfTCXV4wcqTkqvzpdFrM=";
}; };
@ -29,16 +32,16 @@ buildPythonPackage rec {
name = "fix-python-3.11-test-attribute-errors"; name = "fix-python-3.11-test-attribute-errors";
url = "https://github.com/pydicom/pynetdicom/pull/754/commits/2126bd932d6dfb3f07045eb9400acb7eaa1b3069.patch"; url = "https://github.com/pydicom/pynetdicom/pull/754/commits/2126bd932d6dfb3f07045eb9400acb7eaa1b3069.patch";
hash = "sha256-t6Lg0sTZSWIE5q5pkBvEoHDQ+cklDn8SgNBcFk1myp4="; hash = "sha256-t6Lg0sTZSWIE5q5pkBvEoHDQ+cklDn8SgNBcFk1myp4=";
}) })
]; ];
propagatedBuildInputs = [ build-system = [ setuptools ];
pydicom
]; dependencies = [ pydicom ];
nativeCheckInputs = [ nativeCheckInputs = [
pyfakefs pyfakefs
pytest7CheckHook pytestCheckHook
sqlalchemy sqlalchemy
]; ];
@ -72,15 +75,24 @@ buildPythonPackage rec {
disabledTestPaths = [ disabledTestPaths = [
# Ignore apps tests # Ignore apps tests
"pynetdicom/apps/tests/" "pynetdicom/apps/tests/"
] ++ lib.optionals (pythonAtLeast "3.12") [
# https://github.com/pydicom/pynetdicom/issues/924
"pynetdicom/tests/test_assoc.py"
"pynetdicom/tests/test_transport.py"
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "pynetdicom" ];
"pynetdicom"
pytestFlagsArray = [
# https://github.com/pydicom/pynetdicom/issues/923
"-W"
"ignore::pytest.PytestRemovedIn8Warning"
]; ];
meta = with lib; { meta = with lib; {
description = "Python implementation of the DICOM networking protocol"; description = "Python implementation of the DICOM networking protocol";
homepage = "https://github.com/pydicom/pynetdicom"; homepage = "https://github.com/pydicom/pynetdicom";
changelog = "https://github.com/pydicom/pynetdicom/releases/tag/v${version}";
license = with licenses; [ mit ]; license = with licenses; [ mit ];
maintainers = with maintainers; [ fab ]; maintainers = with maintainers; [ fab ];
# Tests are not passing on Darwin/Aarch64, thus it's assumed that it doesn't work # Tests are not passing on Darwin/Aarch64, thus it's assumed that it doesn't work

View File

@ -1,29 +1,30 @@
{ lib {
, buildPythonPackage lib,
, fetchPypi ansible-core,
, pythonOlder buildPythonPackage,
, setuptools-scm fetchPypi,
, ansible-core paramiko,
, paramiko pytest-xdist,
, pytestCheckHook pytestCheckHook,
, pytest-xdist pythonOlder,
, pywinrm pywinrm,
, salt salt,
setuptools-scm,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "pytest-testinfra"; pname = "pytest-testinfra";
version = "10.1.0"; version = "10.1.0";
format = "setuptools"; pyproject = true;
disabled = pythonOlder "3.9";
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
hash = "sha256-93Qzm5R3Ws3zqMSjeTqOrS6N/HD/NLd4vhWquhQPoZ4="; hash = "sha256-93Qzm5R3Ws3zqMSjeTqOrS6N/HD/NLd4vhWquhQPoZ4=";
}; };
nativeBuildInputs = [ build-system = [ setuptools-scm ];
setuptools-scm
];
nativeCheckInputs = [ nativeCheckInputs = [
ansible-core ansible-core
@ -34,7 +35,7 @@ buildPythonPackage rec {
salt salt
]; ];
# markers don't get added when docker is not available (leads to warnings): # Markers don't get added when docker is not available (leads to warnings):
# https://github.com/pytest-dev/pytest-testinfra/blob/9.0.0/test/conftest.py#L223 # https://github.com/pytest-dev/pytest-testinfra/blob/9.0.0/test/conftest.py#L223
preCheck = '' preCheck = ''
export HOME=$(mktemp -d) export HOME=$(mktemp -d)
@ -44,8 +45,8 @@ buildPythonPackage rec {
\tskip_wsl: skip test on WSL, no systemd support' setup.cfg \tskip_wsl: skip test on WSL, no systemd support' setup.cfg
''; '';
# docker is required for all disabled tests
disabledTests = [ disabledTests = [
# docker is required for all disabled tests
# test/test_backends.py # test/test_backends.py
"test_command" "test_command"
"test_encoding" "test_encoding"
@ -53,18 +54,16 @@ buildPythonPackage rec {
"test_user_connection" "test_user_connection"
"test_sudo" "test_sudo"
"test_docker_encoding" "test_docker_encoding"
] ++ lib.optionals (pythonOlder "3.11") [ # Broken because salt package only built for Python
# broken because salt package only built for python 3.11
"test_backend_importables" "test_backend_importables"
]; ];
disabledTestPaths = [ disabledTestPaths = [ "test/test_modules.py" ];
"test/test_modules.py"
];
meta = with lib; { meta = with lib; {
description = "Pytest plugin for testing your infrastructure"; description = "Pytest plugin for testing your infrastructure";
homepage = "https://github.com/pytest-dev/pytest-testinfra"; homepage = "https://github.com/pytest-dev/pytest-testinfra";
changelog = "https://github.com/pytest-dev/pytest-testinfra/releases/tag/${version}";
license = licenses.asl20; license = licenses.asl20;
maintainers = with maintainers; [ hulr ]; maintainers = with maintainers; [ hulr ];
}; };

View File

@ -1,60 +1,55 @@
{ lib {
, buildPythonPackage lib,
, fetchFromGitHub buildPythonPackage,
, grpcio fastembed,
, grpcio-tools fetchFromGitHub,
, httpx grpcio,
, numpy grpcio-tools,
, pytestCheckHook httpx,
, poetry-core numpy,
, pydantic poetry-core,
, pythonOlder portalocker,
, urllib3 pydantic,
, portalocker pytest-asyncio,
, fastembed pytestCheckHook,
# check inputs pythonOlder,
, pytest-asyncio urllib3,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "qdrant-client"; pname = "qdrant-client";
version = "1.8.0"; version = "1.8.2";
format = "pyproject"; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "qdrant"; owner = "qdrant";
repo = pname; repo = "qdrant-client";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-Q+Iyjru4viAxJLDQdbNtsYctnXj8N4glItt44D9HPd8="; hash = "sha256-skPBKSqtwMfm5Tvvhg0pSOsnrf0tfvsUgwxjnUbj3NA=";
}; };
nativeBuildInputs = [ build-system = [ poetry-core ];
poetry-core
];
propagatedBuildInputs = [ dependencies = [
numpy
httpx
grpcio grpcio
# typing-extensions
grpcio-tools grpcio-tools
httpx
numpy
portalocker
pydantic pydantic
urllib3 urllib3
portalocker
] ++ httpx.optional-dependencies.http2; ] ++ httpx.optional-dependencies.http2;
pythonImportsCheck = [ pythonImportsCheck = [ "qdrant_client" ];
"qdrant_client"
];
nativeCheckInputs = [ nativeCheckInputs = [
pytestCheckHook pytestCheckHook
pytest-asyncio pytest-asyncio
]; ];
# tests require network access # Tests require network access
doCheck = false; doCheck = false;
passthru.optional-dependencies = { passthru.optional-dependencies = {

View File

@ -1,24 +1,25 @@
{ lib {
, asyncclick lib,
, buildPythonPackage asyncclick,
, fetchPypi buildPythonPackage,
, firebase-messaging fetchPypi,
, oauthlib firebase-messaging,
, poetry-core oauthlib,
, pytest-asyncio poetry-core,
, pytest-mock pytest-asyncio,
, pytest-socket pytest-mock,
, pytestCheckHook pytest-socket,
, pythonOlder pytestCheckHook,
, pytz pythonOlder,
, requests pytz,
, requests-mock requests,
, requests-oauthlib requests-mock,
requests-oauthlib,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "ring-doorbell"; pname = "ring-doorbell";
version = "0.8.9"; version = "0.8.10";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.8"; disabled = pythonOlder "3.8";
@ -26,12 +27,10 @@ buildPythonPackage rec {
src = fetchPypi { src = fetchPypi {
pname = "ring_doorbell"; pname = "ring_doorbell";
inherit version; inherit version;
hash = "sha256-FUPXia4lCDJDbzEzuewa5ShiIm0EvOrDE8GGZxYWvhk="; hash = "sha256-MKN38SqSn97SF9Y7IzNO6PHsbExjRNXph0Pp1mTNjVI=";
}; };
build-system = [ build-system = [ poetry-core ];
poetry-core
];
dependencies = [ dependencies = [
asyncclick asyncclick
@ -42,9 +41,7 @@ buildPythonPackage rec {
]; ];
passthru.optional-dependencies = { passthru.optional-dependencies = {
listen = [ listen = [ firebase-messaging ];
firebase-messaging
];
}; };
nativeCheckInputs = [ nativeCheckInputs = [
@ -55,16 +52,14 @@ buildPythonPackage rec {
requests-mock requests-mock
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "ring_doorbell" ];
"ring_doorbell"
];
meta = with lib; { meta = with lib; {
description = "Python library to communicate with Ring Door Bell"; description = "Library to communicate with Ring Door Bell";
mainProgram = "ring-doorbell";
homepage = "https://github.com/tchellomello/python-ring-doorbell"; homepage = "https://github.com/tchellomello/python-ring-doorbell";
changelog = "https://github.com/tchellomello/python-ring-doorbell/releases/tag/${version}"; changelog = "https://github.com/tchellomello/python-ring-doorbell/releases/tag/${version}";
license = licenses.lgpl3Plus; license = licenses.lgpl3Plus;
maintainers = with maintainers; [ graham33 ]; maintainers = with maintainers; [ graham33 ];
mainProgram = "ring-doorbell";
}; };
} }

View File

@ -1,23 +1,24 @@
{ lib {
, aiohttp lib,
, authcaptureproxy aiohttp,
, backoff authcaptureproxy,
, beautifulsoup4 backoff,
, buildPythonPackage beautifulsoup4,
, fetchFromGitHub buildPythonPackage,
, httpx fetchFromGitHub,
, orjson httpx,
, poetry-core orjson,
, pytest-asyncio poetry-core,
, pytestCheckHook pytest-asyncio,
, pythonOlder pytestCheckHook,
, tenacity pythonOlder,
, wrapt tenacity,
wrapt,
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "teslajsonpy"; pname = "teslajsonpy";
version = "3.10.2"; version = "3.10.3";
pyproject = true; pyproject = true;
disabled = pythonOlder "3.7"; disabled = pythonOlder "3.7";
@ -26,12 +27,10 @@ buildPythonPackage rec {
owner = "zabuldon"; owner = "zabuldon";
repo = "teslajsonpy"; repo = "teslajsonpy";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-M3/1J73C3gUqyZYsu10O6FnACKYVbGuMH+8Ns0s2Rwc="; hash = "sha256-g5csh014gXdYJ28cBn0Frk5g3zFuZ9ufrypcLcNPwg0=";
}; };
build-system = [ build-system = [ poetry-core ];
poetry-core
];
dependencies = [ dependencies = [
authcaptureproxy authcaptureproxy
@ -49,9 +48,7 @@ buildPythonPackage rec {
pytestCheckHook pytestCheckHook
]; ];
pythonImportsCheck = [ pythonImportsCheck = [ "teslajsonpy" ];
"teslajsonpy"
];
meta = with lib; { meta = with lib; {
description = "Python library to work with Tesla API"; description = "Python library to work with Tesla API";

View File

@ -2,16 +2,16 @@
buildGoModule rec { buildGoModule rec {
pname = "editorconfig-checker"; pname = "editorconfig-checker";
version = "3.0.0"; version = "3.0.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "editorconfig-checker"; owner = "editorconfig-checker";
repo = "editorconfig-checker"; repo = "editorconfig-checker";
rev = "v${version}"; rev = "v${version}";
hash = "sha256-T2+IqHDRGpmMFOL2V6y5BbF+rfaMsKaXvQ48CFpc52I="; hash = "sha256-jqaYJmezekSKdwg8gNdU/DH6S83dPc5WmTU3nfvKjwo=";
}; };
vendorHash = "sha256-vHIv3a//EfkYE/pHUXgFBgV3qvdkMx9Ka5xCk1J5Urw="; vendorHash = "sha256-mPYxBqM4VoSmhtobKAn6p3BXIFGrUzs8gA9x97SmbTw=";
doCheck = false; doCheck = false;

View File

@ -7,13 +7,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "framework-laptop-kmod"; pname = "framework-laptop-kmod";
version = "unstable-2023-12-03"; version = "0-unstable-2024-01-02";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "DHowett"; owner = "DHowett";
repo = "framework-laptop-kmod"; repo = "framework-laptop-kmod";
rev = "d5367eb9e5b5542407494d04ac1a0e77f10cc89d"; rev = "a9e8db9ba2959b75c1fb820ffac8fa189f0f63c3";
hash = "sha256-t8F4XHPkuCjWBrsEjW97ielYtf3V6hlLsrasvyab198="; hash = "sha256-Ai/OxvkaKPltri8R0oyfmxQLUVfaj6Q8vebrhmWYhUU=";
}; };
nativeBuildInputs = kernel.moduleBuildDependencies; nativeBuildInputs = kernel.moduleBuildDependencies;

View File

@ -24,15 +24,11 @@
"hash": "sha256:10dww3cyazcf3wjzh8igpa0frb8gvl6amnksh42zfkji4mskh2r6" "hash": "sha256:10dww3cyazcf3wjzh8igpa0frb8gvl6amnksh42zfkji4mskh2r6"
}, },
"6.6": { "6.6": {
"version": "6.6.24", "version": "6.6.25",
"hash": "sha256:1xgni9daid8c01f29xnxvrzja4sw0b1d5hhdxcw96cg8v9wzi7iy" "hash": "sha256:0i0zvqlj02rm6wpbidji0rn9559vrpfc1b8gbfjk70lhhyz11llr"
},
"6.7": {
"version": "6.7.12",
"hash": "sha256:113rf3jqfpf3wcv637zbq5ww2hpaaf23y6dsmkds01l8jkipjabc"
}, },
"6.8": { "6.8": {
"version": "6.8.3", "version": "6.8.4",
"hash": "sha256:11n9jsjg1wgffchpl6frk26pk4jx2m9m0w8cmizrmvhla1nvaznv" "hash": "sha256:0qwywy89an1w0yvs5957kqyv74mwgxady521w2lmyq00zjaw9pnm"
} }
} }

View File

@ -1,8 +1,8 @@
{ stdenv, lib, fetchsvn, linux { stdenv, lib, fetchsvn, linux
, scripts ? fetchsvn { , scripts ? fetchsvn {
url = "https://www.fsfla.org/svn/fsfla/software/linux-libre/releases/branches/"; url = "https://www.fsfla.org/svn/fsfla/software/linux-libre/releases/branches/";
rev = "19523"; rev = "19527";
sha256 = "0j3fhmb931niskv67v6ngwc11v2z78rr3bcy4369j44aqnbfaq1y"; sha256 = "0sb1qxc25g7f3v6qym9iwi3xjwmxzrf7w33yfmvv3k09rlfndijy";
} }
, ... , ...
}: }:

View File

@ -66,15 +66,6 @@
patch = ./export-rt-sched-migrate.patch; patch = ./export-rt-sched-migrate.patch;
}; };
rust_1_74 = {
name = "rust-1.74.patch";
patch = fetchpatch {
name = "rust-1.74.patch";
url = "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/patch/?id=80fe9e51510b23472ad0f97175556490549ed714";
hash = "sha256-yGt7PwqN/G+ZtZSt6eARvVFdkC8tnUiu0Fz4cFCyguM=";
};
};
rust_1_75 = { rust_1_75 = {
name = "rust-1.75.patch"; name = "rust-1.75.patch";
patch = ./rust-1.75.patch; patch = ./rust-1.75.patch;

View File

@ -17,7 +17,7 @@ callPackage ./generic.nix args {
# check the release notes for compatible kernels # check the release notes for compatible kernels
kernelCompatible = kernel.kernelOlder "6.8"; kernelCompatible = kernel.kernelOlder "6.8";
latestCompatibleLinuxPackages = linuxKernel.packages.linux_6_7; latestCompatibleLinuxPackages = linuxKernel.packages.linux_6_6;
# This is a fixed version to the 2.1.x series, move only # This is a fixed version to the 2.1.x series, move only
# if the 2.1.x series moves. # if the 2.1.x series moves.

View File

@ -16,7 +16,7 @@ callPackage ./generic.nix args {
# check the release notes for compatible kernels # check the release notes for compatible kernels
kernelCompatible = kernel.kernelOlder "6.8"; kernelCompatible = kernel.kernelOlder "6.8";
latestCompatibleLinuxPackages = linuxKernel.packages.linux_6_7; latestCompatibleLinuxPackages = linuxKernel.packages.linux_6_6;
# this package should point to the latest release. # this package should point to the latest release.
version = "2.2.3"; version = "2.2.3";

View File

@ -16,7 +16,7 @@ callPackage ./generic.nix args {
# check the release notes for compatible kernels # check the release notes for compatible kernels
kernelCompatible = kernel.kernelOlder "6.9"; kernelCompatible = kernel.kernelOlder "6.9";
latestCompatibleLinuxPackages = linuxKernel.packages.linux_6_7; latestCompatibleLinuxPackages = linuxKernel.packages.linux_6_6;
# this package should point to a version / git revision compatible with the latest kernel release # this package should point to a version / git revision compatible with the latest kernel release
# IMPORTANT: Always use a tagged release candidate or commits from the # IMPORTANT: Always use a tagged release candidate or commits from the

View File

@ -1,13 +1,13 @@
{ lib, buildGoModule, fetchFromGitHub, nixosTests, nix-update-script }: { lib, buildGoModule, fetchFromGitHub, nixosTests, nix-update-script }:
buildGoModule rec { buildGoModule rec {
pname = "mimir"; pname = "mimir";
version = "2.11.0"; version = "2.12.0";
src = fetchFromGitHub { src = fetchFromGitHub {
rev = "${pname}-${version}"; rev = "${pname}-${version}";
owner = "grafana"; owner = "grafana";
repo = pname; repo = pname;
hash = "sha256-avmVNuUBvKBF7Wm05/AsK5Ld3ykmXCkOw0QQhGy8CKc="; hash = "sha256-V+O89hS2UopGLxGkg6W4gW8kj5QRzpwCQtq0QFrOWf0=";
}; };
vendorHash = null; vendorHash = null;

View File

@ -8,12 +8,12 @@ with builtins;
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "ttyd"; pname = "ttyd";
version = "1.7.5"; version = "1.7.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "tsl0922"; owner = "tsl0922";
repo = pname; repo = pname;
rev = "refs/tags/${version}"; rev = "refs/tags/${version}";
sha256 = "sha256-ci6PLrFQa/aX48kjAqQfCtOOhS06ikfEtYoCgmGhGdU="; sha256 = "sha256-7e08oBKU7BMZ8328qCfNynCSe7LVZ88+iQZRRKl2YkY=";
}; };
nativeBuildInputs = [ pkg-config cmake xxd ]; nativeBuildInputs = [ pkg-config cmake xxd ];

View File

@ -1,268 +0,0 @@
From 196c2e1036ed990bca57c199f271c0359509e9f9 Mon Sep 17 00:00:00 2001
From: Colin Walters <walters@verbum.org>
Date: Tue, 19 Jun 2018 09:34:18 -0400
Subject: [PATCH] Drop "ostree trivial-httpd" CLI, move to tests directory
See https://github.com/ostreedev/ostree/issues/1593
Basically this makes it easier for people packaging, as the trivial-httpd
is only for tests, and this way the binary will live with the tests.
Also at this point nothing should depend on `ostree trivial-httpd`.
---
Makefile-man.am | 6 --
Makefile-ostree.am | 7 ---
Makefile-tests.am | 7 +++
configure.ac | 9 ---
man/ostree-trivial-httpd.xml | 116 -----------------------------------
src/ostree/main.c | 5 --
tests/libtest.sh | 13 ++--
7 files changed, 12 insertions(+), 151 deletions(-)
delete mode 100644 man/ostree-trivial-httpd.xml
diff --git a/Makefile-man.am b/Makefile-man.am
index 78025fff..4aa668f6 100644
--- a/Makefile-man.am
+++ b/Makefile-man.am
@@ -32,12 +32,6 @@ ostree-init.1 ostree-log.1 ostree-ls.1 ostree-prune.1 ostree-pull-local.1 \
ostree-pull.1 ostree-refs.1 ostree-remote.1 ostree-reset.1 \
ostree-rev-parse.1 ostree-show.1 ostree-sign.1 ostree-summary.1 \
ostree-static-delta.1
-if USE_LIBSOUP
-man1_files += ostree-trivial-httpd.1
-else
-# We still want to distribute the source, even if we are not building it
-EXTRA_DIST += man/ostree-trivial-httpd.xml
-endif
if BUILDOPT_FUSE
man1_files += rofiles-fuse.1
diff --git a/Makefile-ostree.am b/Makefile-ostree.am
index 82af1681..dabc7004 100644
--- a/Makefile-ostree.am
+++ b/Makefile-ostree.am
@@ -138,13 +138,6 @@ ostree_SOURCES += src/ostree/ot-builtin-pull.c
endif
if USE_LIBSOUP
-# Eventually once we stop things from using this, we should support disabling this
-ostree_SOURCES += src/ostree/ot-builtin-trivial-httpd.c
-pkglibexec_PROGRAMS += ostree-trivial-httpd
-ostree_trivial_httpd_SOURCES = src/ostree/ostree-trivial-httpd.c
-ostree_trivial_httpd_CFLAGS = $(ostree_bin_shared_cflags) $(OT_INTERNAL_SOUP_CFLAGS)
-ostree_trivial_httpd_LDADD = $(ostree_bin_shared_ldadd) $(OT_INTERNAL_SOUP_LIBS)
-
if !USE_CURL
# This is necessary for the cookie jar bits
ostree_CFLAGS += $(OT_INTERNAL_SOUP_CFLAGS)
diff --git a/Makefile-tests.am b/Makefile-tests.am
index 6bae65cf..47b3ab20 100644
--- a/Makefile-tests.am
+++ b/Makefile-tests.am
@@ -275,6 +275,13 @@ _installed_or_uninstalled_test_programs += \
$(NULL)
endif
+if USE_LIBSOUP
+test_extra_programs += ostree-trivial-httpd
+ostree_trivial_httpd_SOURCES = src/ostree/ostree-trivial-httpd.c
+ostree_trivial_httpd_CFLAGS = $(common_tests_cflags) $(OT_INTERNAL_SOUP_CFLAGS)
+ostree_trivial_httpd_LDADD = $(common_tests_ldadd) $(OT_INTERNAL_SOUP_LIBS)
+endif
+
if USE_AVAHI
test_programs += tests/test-repo-finder-avahi
endif
diff --git a/configure.ac b/configure.ac
index 93b98cb9..a588eea6 100644
--- a/configure.ac
+++ b/configure.ac
@@ -186,14 +186,6 @@ if test x$with_soup != xno; then OSTREE_FEATURES="$OSTREE_FEATURES libsoup"; fi
AM_CONDITIONAL(USE_LIBSOUP, test x$with_soup != xno)
AM_CONDITIONAL(HAVE_LIBSOUP_CLIENT_CERTS, test x$have_libsoup_client_certs = xyes)
-AC_ARG_ENABLE(trivial-httpd-cmdline,
- [AS_HELP_STRING([--enable-trivial-httpd-cmdline],
- [Continue to support "ostree trivial-httpd" [default=no]])],,
- enable_trivial_httpd_cmdline=no)
-AS_IF([test x$enable_trivial_httpd_cmdline = xyes],
- [AC_DEFINE([BUILDOPT_ENABLE_TRIVIAL_HTTPD_CMDLINE], 1, [Define if we are enabling ostree trivial-httpd entrypoint])]
-)
-
AS_IF([test x$with_curl = xyes && test x$with_soup = xno], [
AC_MSG_WARN([Curl enabled, but libsoup is not; libsoup is needed for tests (make check, etc.)])
])
@@ -606,7 +598,6 @@ echo "
introspection: $found_introspection
rofiles-fuse: $enable_rofiles_fuse
HTTP backend: $fetcher_backend
- \"ostree trivial-httpd\": $enable_trivial_httpd_cmdline
SELinux: $with_selinux
fs-verity: $ac_cv_header_linux_fsverity_h
cryptographic checksums: $with_crypto
diff --git a/man/ostree-trivial-httpd.xml b/man/ostree-trivial-httpd.xml
deleted file mode 100644
index 7ba1dae8..00000000
--- a/man/ostree-trivial-httpd.xml
+++ /dev/null
@@ -1,116 +0,0 @@
-<?xml version='1.0'?> <!--*-nxml-*-->
-<!DOCTYPE refentry PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
- "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
-
-<!--
-Copyright 2011,2013 Colin Walters <walters@verbum.org>
-
-SPDX-License-Identifier: LGPL-2.0+
-
-This library is free software; you can redistribute it and/or
-modify it under the terms of the GNU Lesser General Public
-License as published by the Free Software Foundation; either
-version 2 of the License, or (at your option) any later version.
-
-This library is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-Lesser General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public
-License along with this library. If not, see <https://www.gnu.org/licenses/>.
--->
-
-<refentry id="ostree">
-
- <refentryinfo>
- <title>ostree trivial-httpd</title>
- <productname>OSTree</productname>
-
- <authorgroup>
- <author>
- <contrib>Developer</contrib>
- <firstname>Colin</firstname>
- <surname>Walters</surname>
- <email>walters@verbum.org</email>
- </author>
- </authorgroup>
- </refentryinfo>
-
- <refmeta>
- <refentrytitle>ostree trivial-httpd</refentrytitle>
- <manvolnum>1</manvolnum>
- </refmeta>
-
- <refnamediv>
- <refname>ostree-trivial-httpd</refname>
- <refpurpose>Simple webserver</refpurpose>
- </refnamediv>
-
- <refsynopsisdiv>
- <cmdsynopsis>
- <command>ostree trivial-httpd</command> <arg choice="opt" rep="repeat">OPTIONS</arg> <arg choice="opt">DIR</arg>
- </cmdsynopsis>
- </refsynopsisdiv>
-
-<!-- Don't entirely understand this command. Needs details, better content-->
- <refsect1>
- <title>Description</title>
-
- <para>
- This runs a simple webserver and keeps it running until killed. If DIR is not specified, it defaults to the current directory.
- </para>
- </refsect1>
-
- <refsect1>
- <title>Options</title>
-
- <variablelist>
- <varlistentry>
- <term><option>--daemonize</option>,<option>-d</option></term>
-
- <listitem><para>
- Fork into background when ready.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--autoexit</option></term>
-
- <listitem><para>
- Automatically exit when directory is deleted.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--port-file</option>,<option>-p</option>="PATH"</term>
-
- <listitem><para>
- Write port number to PATH (- for standard output).
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--port</option>,<option>-P</option></term>
-
- <listitem><para>
- Use the specified TCP port to listen on.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--force-range-requests</option></term>
-
- <listitem><para>
- Force range requests by only serving half of files.
- </para></listitem>
- </varlistentry>
- </variablelist>
- </refsect1>
-
-<!-- NEED EXAMPLE OUTPUT HERE!-->
- <refsect1>
- <title>Example</title>
- <para><command>$ ostree trivial-httpd</command></para>
- </refsect1>
-</refentry>
diff --git a/src/ostree/main.c b/src/ostree/main.c
index 7d17080c..19d9b8b0 100644
--- a/src/ostree/main.c
+++ b/src/ostree/main.c
@@ -118,11 +118,6 @@ static OstreeCommand commands[] = {
{ "summary", OSTREE_BUILTIN_FLAG_NONE,
ostree_builtin_summary,
"Manage summary metadata" },
-#if defined(HAVE_LIBSOUP) && defined(BUILDOPT_ENABLE_TRIVIAL_HTTPD_CMDLINE)
- { "trivial-httpd", OSTREE_BUILTIN_FLAG_NONE,
- ostree_builtin_trivial_httpd,
- NULL },
-#endif
{ NULL }
};
diff --git a/tests/libtest.sh b/tests/libtest.sh
index 686f08dc..79f8bd1f 100755
--- a/tests/libtest.sh
+++ b/tests/libtest.sh
@@ -174,15 +174,12 @@ fi
if test -n "${OSTREE_UNINSTALLED:-}"; then
OSTREE_HTTPD=${OSTREE_UNINSTALLED}/ostree-trivial-httpd
else
- # trivial-httpd is now in $libexecdir by default, which we don't
- # know at this point. Fortunately, libtest.sh is also in
- # $libexecdir, so make an educated guess. If it's not found, assume
- # it's still runnable as "ostree trivial-httpd".
- if [ -x "${test_srcdir}/../../libostree/ostree-trivial-httpd" ]; then
- OSTREE_HTTPD="${CMD_PREFIX} ${test_srcdir}/../../libostree/ostree-trivial-httpd"
- else
- OSTREE_HTTPD="${CMD_PREFIX} ostree trivial-httpd"
+ # trivial-httpd is now the test directory.
+ OSTREE_HTTPD="${G_TEST_BUILDDIR}/ostree-trivial-httpd"
+ if ! [ -x "${OSTREE_HTTPD}" ]; then
+ fatal "Failed to find ${OSTREE_HTTPD}"
fi
+ OSTREE_HTTPD="${CMD_PREFIX} ${OSTREE_HTTPD}"
fi
files_are_hardlinked() {
--
2.35.1

View File

@ -42,31 +42,15 @@ let
]); ]);
in stdenv.mkDerivation rec { in stdenv.mkDerivation rec {
pname = "ostree"; pname = "ostree";
version = "2023.2"; version = "2024.4";
outputs = [ "out" "dev" "man" "installedTests" ]; outputs = [ "out" "dev" "man" "installedTests" ];
src = fetchurl { src = fetchurl {
url = "https://github.com/ostreedev/ostree/releases/download/v${version}/libostree-${version}.tar.xz"; url = "https://github.com/ostreedev/ostree/releases/download/v${version}/libostree-${version}.tar.xz";
sha256 = "sha256-zrB4h1Wgv/VzjURUNVL7+IPPcd9IG6o8pyiNp6QCu4U="; sha256 = "sha256-Y8kZCCEzOsc3Pg2SPkwnZrJevc/fTvtEy1koxlidn8s=";
}; };
patches = [
# Tests access the helper using relative path
# https://github.com/ostreedev/ostree/issues/1593
# Patch from https://github.com/ostreedev/ostree/pull/1633
./01-Drop-ostree-trivial-httpd-CLI-move-to-tests-director.patch
# Workarounds for https://github.com/ostreedev/ostree/issues/1592
./fix-1592.patch
# Hard-code paths in tests
(substituteAll {
src = ./fix-test-paths.patch;
python3 = testPython.interpreter;
openssl = "${openssl}/bin/openssl";
})
];
nativeBuildInputs = [ nativeBuildInputs = [
autoconf autoconf

View File

@ -1,149 +0,0 @@
--- a/tests/basic-test.sh
+++ b/tests/basic-test.sh
@@ -226,7 +226,7 @@ cd ${test_tmpdir}
if $OSTREE commit ${COMMIT_ARGS} -b test-bootable --bootable $test_tmpdir/checkout-test2-4 2>err.txt; then
fatal "committed non-bootable tree"
fi
-assert_file_has_content err.txt "error: .*No such file or directory"
+assert_file_has_content err.txt "error:.*No such file or directory"
echo "ok commit fails bootable if no kernel"
cd ${test_tmpdir}
diff --git a/tests/pull-test.sh b/tests/pull-test.sh
index f4084290..4af5ec6f 100644
--- a/tests/pull-test.sh
+++ b/tests/pull-test.sh
@@ -297,7 +297,7 @@ ostree_repo_init mirrorrepo-local --mode=archive
if ${CMD_PREFIX} ostree --repo=mirrorrepo-local pull-local otherrepo 2>err.txt; then
fatal "pull with mixed refs succeeded?"
fi
-assert_file_has_content err.txt "error: Invalid ref name origin:main"
+assert_file_has_content err.txt "Invalid ref name origin:main"
${CMD_PREFIX} ostree --repo=mirrorrepo-local pull-local otherrepo localbranch
${CMD_PREFIX} ostree --repo=mirrorrepo-local rev-parse localbranch
${CMD_PREFIX} ostree --repo=mirrorrepo-local fsck
@@ -308,7 +308,7 @@ if ${CMD_PREFIX} ostree --repo=mirrorrepo-local pull-local otherrepo nosuchbranc
fatal "pulled nonexistent branch"
fi
# So true
-assert_file_has_content_literal err.txt "error: Refspec 'nosuchbranch' not found"
+assert_file_has_content_literal err.txt "Refspec 'nosuchbranch' not found"
echo "ok pull-local nonexistent branch"
cd ${test_tmpdir}
@@ -687,5 +687,5 @@ rm ostree-srv/gnomerepo/summary
if ${CMD_PREFIX} ostree --repo=repo pull origin main 2>err.txt; then
fatal "pull of invalid ref succeeded"
fi
-assert_file_has_content_literal err.txt 'error: Fetching checksum for ref ((empty), main): Invalid rev lots of html here lots of html here lots of html here lots of'
+assert_file_has_content_literal err.txt 'Fetching checksum for ref ((empty), main): Invalid rev lots of html here lots of html here lots of html here lots of'
echo "ok pull got HTML for a ref"
diff --git a/tests/test-config.sh b/tests/test-config.sh
index 2d9aaf53..f1e28614 100755
--- a/tests/test-config.sh
+++ b/tests/test-config.sh
@@ -44,7 +44,7 @@ assert_file_has_content list.txt "http://example\.com/ostree/repo/"
if ${CMD_PREFIX} ostree config --repo=repo get --group=core lock-timeout-secs extra 2>err.txt; then
assert_not_reached "ostree config get should error out if too many arguments are given"
fi
-assert_file_has_content err.txt "error: Too many arguments given"
+assert_file_has_content err.txt "Too many arguments given"
echo "ok config get"
${CMD_PREFIX} ostree config --repo=repo set core.mode bare-user-only
@@ -61,7 +61,7 @@ assert_file_has_content repo/config "http://example\.com/ostree/"
if ${CMD_PREFIX} ostree config --repo=repo set --group=core lock-timeout-secs 120 extra 2>err.txt; then
assert_not_reached "ostree config set should error out if too many arguments are given"
fi
-assert_file_has_content err.txt "error: Too many arguments given"
+assert_file_has_content err.txt "Too many arguments given"
echo "ok config set"
# Check that using `--` works and that "ostree config unset" works
@@ -78,7 +78,7 @@ if ${CMD_PREFIX} ostree config --repo=repo get core.lock-timeout-secs 2>err.txt;
fi
# Check for any character where quotation marks would be as they appear differently in the Fedora and Debian
# test suites (“” and '' respectively). See: https://github.com/ostreedev/ostree/pull/1839
-assert_file_has_content err.txt "error: Key file does not have key .lock-timeout-secs. in group .core."
+assert_file_has_content err.txt "Key file does not have key .lock-timeout-secs. in group .core."
# Check that it's idempotent
${CMD_PREFIX} ostree config --repo=repo unset core.lock-timeout-secs
@@ -95,5 +95,5 @@ ${CMD_PREFIX} ostree config --repo=repo unset --group='remote "aoeuhtns"' 'xa.ti
if ${CMD_PREFIX} ostree config --repo=repo unset core.lock-timeout-secs extra 2>err.txt; then
assert_not_reached "ostree config unset should error out if too many arguments are given"
fi
-assert_file_has_content err.txt "error: Too many arguments given"
+assert_file_has_content err.txt "Too many arguments given"
echo "ok config unset"
diff --git a/tests/test-fsck-collections.sh b/tests/test-fsck-collections.sh
index 3dbcdd23..d6359979 100755
--- a/tests/test-fsck-collections.sh
+++ b/tests/test-fsck-collections.sh
@@ -98,7 +98,7 @@ ${CMD_PREFIX} ostree fsck --repo=repo
if ${CMD_PREFIX} ostree fsck --repo=repo --verify-bindings > fsck 2> fsck-error; then
assert_not_reached "fsck unexpectedly succeeded after adding unbound ref!"
fi
-assert_file_has_content fsck-error "Commit has no requested ref new-ref in ref binding metadata (ref1)"
+assert_file_has_content fsck-error "Commit has no requested ref .new-ref. in ref binding metadata (.ref1.)"
assert_file_has_content fsck "^Validating refs\.\.\.$"
echo "ok 3 fsck detects missing ref bindings"
@@ -111,7 +111,7 @@ ${CMD_PREFIX} ostree --repo=repo refs --collections --create=org.example.Collect
if ${CMD_PREFIX} ostree fsck --repo=repo --verify-bindings > fsck 2> fsck-error; then
assert_not_reached "fsck unexpectedly succeeded after adding unbound ref!"
fi
-assert_file_has_content fsck-error "Commit has no requested ref new-ref in ref binding metadata (ref1)"
+assert_file_has_content fsck-error "Commit has no requested ref .new-ref. in ref binding metadata (.ref1.)"
assert_file_has_content fsck "^Validating refs\.\.\.$"
assert_file_has_content fsck "^Validating refs in collections\.\.\.$"
@@ -125,7 +125,7 @@ ${CMD_PREFIX} ostree --repo=repo refs --collections --create=org.example.Collect
if ${CMD_PREFIX} ostree fsck --repo=repo --verify-bindings > fsck 2> fsck-error; then
assert_not_reached "fsck unexpectedly succeeded after adding unbound ref!"
fi
-assert_file_has_content fsck-error "Commit has collection ID org\.example\.Collection in collection binding metadata, while the remote it came from has collection ID org\.example\.Collection2"
+assert_file_has_content fsck-error "Commit has collection ID .org\.example\.Collection. in collection binding metadata, while the remote it came from has collection ID .org\.example\.Collection2."
assert_file_has_content fsck "^Validating refs\.\.\.$"
assert_file_has_content fsck "^Validating refs in collections\.\.\.$"
@@ -145,7 +145,7 @@ echo "ok 6 fsck ignores unreferenced ref bindings"
if ${CMD_PREFIX} ostree fsck --repo=repo --verify-back-refs > fsck 2> fsck-error; then
assert_not_reached "fsck unexpectedly succeeded after adding unbound ref!"
fi
-assert_file_has_content fsck-error "Collectionref (org\.example\.Collection, ref1) in bindings for commit .* does not exist"
+assert_file_has_content fsck-error 'Collection.ref (org\.example\.Collection, ref1) in bindings for commit .* does not exist'
assert_file_has_content fsck "^Validating refs\.\.\.$"
assert_file_has_content fsck "^Validating refs in collections\.\.\.$"
@@ -184,7 +184,7 @@ ${CMD_PREFIX} ostree --repo=repo refs --create=new-ref $(cat ref3-checksum)
if ${CMD_PREFIX} ostree fsck --repo=repo --verify-bindings > fsck 2> fsck-error; then
assert_not_reached "fsck unexpectedly succeeded after adding unbound ref!"
fi
-assert_file_has_content fsck-error "Commit has no requested ref new-ref in ref binding metadata (ref3, ref4)"
+assert_file_has_content fsck-error "Commit has no requested ref .new-ref. in ref binding metadata (.ref3., .ref4.)"
assert_file_has_content fsck "^Validating refs\.\.\.$"
echo "ok 9 fsck detects missing ref bindings"
@@ -203,7 +203,7 @@ echo "ok 10 fsck ignores unreferenced ref bindings"
if ${CMD_PREFIX} ostree fsck --repo=repo --verify-back-refs > fsck 2> fsck-error; then
assert_not_reached "fsck unexpectedly succeeded after adding unbound ref!"
fi
-assert_file_has_content fsck-error "Ref ref3 in bindings for commit .* does not exist"
+assert_file_has_content fsck-error 'Ref .ref3. in bindings for commit .* does not exist'
assert_file_has_content fsck "^Validating refs\.\.\.$"
echo "ok 11 fsck ignores unreferenced ref bindings"
diff --git a/tests/test-remote-add.sh b/tests/test-remote-add.sh
index 2f5ea634..0f63853c 100755
--- a/tests/test-remote-add.sh
+++ b/tests/test-remote-add.sh
@@ -81,7 +81,7 @@ echo "ok remote delete"
if $OSTREE remote delete nosuchremote 2>err.txt; then
assert_not_reached "Deleting remote unexpectedly succeeded"
fi
-assert_file_has_content err.txt "error: "
+assert_file_has_content err.txt "not found"
$OSTREE remote delete --if-exists nosuchremote
echo "ok"

View File

@ -1,55 +0,0 @@
diff --git a/tests/libtest.sh b/tests/libtest.sh
index ca457fa2..c0a529ff 100755
--- a/tests/libtest.sh
+++ b/tests/libtest.sh
@@ -709,12 +709,12 @@ gen_ed25519_keys ()
{
# Generate private key in PEM format
pemfile="$(mktemp -p ${test_tmpdir} ed25519_XXXXXX.pem)"
- openssl genpkey -algorithm ed25519 -outform PEM -out "${pemfile}"
+ @openssl@ genpkey -algorithm ed25519 -outform PEM -out "${pemfile}"
# Based on: http://openssl.6102.n7.nabble.com/ed25519-key-generation-td73907.html
# Extract the private and public parts from generated key.
- ED25519PUBLIC="$(openssl pkey -outform DER -pubout -in ${pemfile} | tail -c 32 | base64)"
- ED25519SEED="$(openssl pkey -outform DER -in ${pemfile} | tail -c 32 | base64)"
+ ED25519PUBLIC="$(@openssl@ pkey -outform DER -pubout -in ${pemfile} | tail -c 32 | base64)"
+ ED25519SEED="$(@openssl@ pkey -outform DER -in ${pemfile} | tail -c 32 | base64)"
# Secret key is concantination of SEED and PUBLIC
ED25519SECRET="$(echo ${ED25519SEED}${ED25519PUBLIC} | base64 -d | base64 -w 0)"
@@ -725,7 +725,7 @@ gen_ed25519_keys ()
gen_ed25519_random_public()
{
- openssl genpkey -algorithm ED25519 | openssl pkey -outform DER | tail -c 32 | base64
+ @openssl@ genpkey -algorithm ED25519 | @openssl@ pkey -outform DER | tail -c 32 | base64
}
is_bare_user_only_repo () {
diff --git a/tests/test-basic-user-only.sh b/tests/test-basic-user-only.sh
index f65094fd..105be893 100755
--- a/tests/test-basic-user-only.sh
+++ b/tests/test-basic-user-only.sh
@@ -29,7 +29,7 @@ extra_basic_tests=5
. $(dirname $0)/basic-test.sh
$CMD_PREFIX ostree --version > version.yaml
-python3 -c 'import yaml; yaml.safe_load(open("version.yaml"))'
+@python3@ -c 'import yaml; yaml.safe_load(open("version.yaml"))'
echo "ok yaml version"
# Reset things so we don't inherit a lot of state from earlier tests
diff --git a/tests/test-remote-headers.sh b/tests/test-remote-headers.sh
index a41d087a..77b34c90 100755
--- a/tests/test-remote-headers.sh
+++ b/tests/test-remote-headers.sh
@@ -26,7 +26,7 @@ echo '1..2'
. $(dirname $0)/libtest.sh
V=$($CMD_PREFIX ostree --version | \
- python3 -c 'import sys, yaml; print(yaml.safe_load(sys.stdin)["libostree"]["Version"])')
+ @python3@ -c 'import sys, yaml; print(yaml.safe_load(sys.stdin)["libostree"]["Version"])')
setup_fake_remote_repo1 "archive" "" \
--expected-header foo=bar \

View File

@ -1,26 +1,25 @@
{ lib {
, buildGoModule lib,
, fetchFromGitHub buildGoModule,
fetchFromGitHub,
}: }:
buildGoModule rec { buildGoModule rec {
pname = "cnspec"; pname = "cnspec";
version = "10.10.0"; version = "10.11.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "mondoohq"; owner = "mondoohq";
repo = "cnspec"; repo = "cnspec";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-6nWyLWBrnvdmyUiuWon+Lqtn/FzQ1mJ4rvoHH7sCsQY="; hash = "sha256-z8pWAazoafyrsz3EmfhtHDBhmHHjkGFEIL5BftW79fg=";
}; };
proxyVendor = true; proxyVendor = true;
vendorHash = "sha256-LaYpyKJPvB++4tbNV4OEtwtU4t+0DQUIM4lMlKGjgDk="; vendorHash = "sha256-6MVl8QuzxzcyFVP04ikO7B4Gk17e0TA4hxmL17OehCo=";
subPackages = [ subPackages = [ "apps/cnspec" ];
"apps/cnspec"
];
ldflags = [ ldflags = [
"-s" "-s"
@ -33,6 +32,9 @@ buildGoModule rec {
homepage = "https://github.com/mondoohq/cnspec"; homepage = "https://github.com/mondoohq/cnspec";
changelog = "https://github.com/mondoohq/cnspec/releases/tag/v${version}"; changelog = "https://github.com/mondoohq/cnspec/releases/tag/v${version}";
license = licenses.bsl11; license = licenses.bsl11;
maintainers = with maintainers; [ fab mariuskimmina ]; maintainers = with maintainers; [
fab
mariuskimmina
];
}; };
} }

View File

@ -6,13 +6,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "exploitdb"; pname = "exploitdb";
version = "2024-04-03"; version = "2024-04-04";
src = fetchFromGitLab { src = fetchFromGitLab {
owner = "exploit-database"; owner = "exploit-database";
repo = pname; repo = pname;
rev = "refs/tags/${version}"; rev = "refs/tags/${version}";
hash = "sha256-N6SF2BJltPfFqNA7YHDjuWLJw+PUk94pdl8mE9a1BiA="; hash = "sha256-qWmHY2CNZBY3kaWaru7jXJuPZOH96+Ea/8pUT0oKHF0=";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -1,20 +1,21 @@
{ lib {
, buildGoModule lib,
, fetchFromGitHub buildGoModule,
, git fetchFromGitHub,
, installShellFiles git,
, openssl installShellFiles,
openssl,
}: }:
buildGoModule rec { buildGoModule rec {
pname = "grype"; pname = "grype";
version = "0.74.7"; version = "0.75.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "anchore"; owner = "anchore";
repo = "grype"; repo = "grype";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-mP9Yjg5AVMIMvlOI+5AaCYzlw7h2K9WCFLY9ZwXmZk0="; hash = "sha256-FOKSJ9u1+johBRL37I/sYo+BH9Na3vzxRTr6PqiLWrs=";
# populate values that require us to use git. By doing this in postFetch we # populate values that require us to use git. By doing this in postFetch we
# can delete .git afterwards and maintain better reproducibility of the src. # can delete .git afterwards and maintain better reproducibility of the src.
leaveDotGit = true; leaveDotGit = true;
@ -29,20 +30,16 @@ buildGoModule rec {
proxyVendor = true; proxyVendor = true;
vendorHash = "sha256-X+E2g/FoDgjKq8XcPeEA/XbRJV8JkhY5AHPnw26hRnM="; vendorHash = "sha256-C1xM0OcEsplWOe0SGL6SCAvFq7M5LcekYyQTjP9EZB4=";
nativeBuildInputs = [ nativeBuildInputs = [ installShellFiles ];
installShellFiles
];
nativeCheckInputs = [ nativeCheckInputs = [
git git
openssl openssl
]; ];
subPackages = [ subPackages = [ "cmd/grype" ];
"cmd/grype"
];
excludedPackages = "test/integration"; excludedPackages = "test/integration";
@ -115,6 +112,10 @@ buildGoModule rec {
container image or filesystem to find known vulnerabilities. container image or filesystem to find known vulnerabilities.
''; '';
license = with licenses; [ asl20 ]; license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab jk kashw2 ]; maintainers = with maintainers; [
fab
jk
kashw2
];
}; };
} }

View File

@ -11,7 +11,7 @@ buildGoModule rec {
owner = "smallstep"; owner = "smallstep";
repo = "cli"; repo = "cli";
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-B0LGedExlk9XllWilZ0QAwQHNyISAI2WJ48P2STbxSY="; hash = "sha256-fxBWYz95yxNa7xotmId2SHLAIymJIBOJumYzAPB3Ias=";
}; };
ldflags = [ ldflags = [

View File

@ -27619,8 +27619,6 @@ with pkgs;
linux_6_1_hardened = linuxKernel.kernels.linux_6_1_hardened; linux_6_1_hardened = linuxKernel.kernels.linux_6_1_hardened;
linuxPackages_6_6_hardened = linuxKernel.packages.linux_6_6_hardened; linuxPackages_6_6_hardened = linuxKernel.packages.linux_6_6_hardened;
linux_6_6_hardened = linuxKernel.kernels.linux_6_6_hardened; linux_6_6_hardened = linuxKernel.kernels.linux_6_6_hardened;
linuxPackages_6_7_hardened = linuxKernel.packages.linux_6_7_hardened;
linux_6_7_hardened = linuxKernel.kernels.linux_6_7_hardened;
# GNU Linux-libre kernels # GNU Linux-libre kernels
linuxPackages-libre = linuxKernel.packages.linux_libre; linuxPackages-libre = linuxKernel.packages.linux_libre;

View File

@ -185,17 +185,6 @@ in {
]; ];
}; };
linux_6_7 = callPackage ../os-specific/linux/kernel/mainline.nix {
branch = "6.7";
kernelPatches = [
kernelPatches.bridge_stp_helper
kernelPatches.request_key_helper
kernelPatches.rust_1_74
kernelPatches.rust_1_75
kernelPatches.rust_1_76
];
};
linux_6_8 = callPackage ../os-specific/linux/kernel/mainline.nix { linux_6_8 = callPackage ../os-specific/linux/kernel/mainline.nix {
branch = "6.8"; branch = "6.8";
kernelPatches = [ kernelPatches = [
@ -269,7 +258,6 @@ in {
linux_5_15_hardened = hardenedKernelFor kernels.linux_5_15 { }; linux_5_15_hardened = hardenedKernelFor kernels.linux_5_15 { };
linux_6_1_hardened = hardenedKernelFor kernels.linux_6_1 { }; linux_6_1_hardened = hardenedKernelFor kernels.linux_6_1 { };
linux_6_6_hardened = hardenedKernelFor kernels.linux_6_6 { }; linux_6_6_hardened = hardenedKernelFor kernels.linux_6_6 { };
linux_6_7_hardened = hardenedKernelFor kernels.linux_6_7 { };
} // lib.optionalAttrs config.allowAliases { } // lib.optionalAttrs config.allowAliases {
linux_4_9 = throw "linux 4.9 was removed because it will reach its end of life within 22.11"; linux_4_9 = throw "linux 4.9 was removed because it will reach its end of life within 22.11";
@ -281,12 +269,14 @@ in {
linux_6_3 = throw "linux 6.3 was removed because it has reached its end of life upstream"; linux_6_3 = throw "linux 6.3 was removed because it has reached its end of life upstream";
linux_6_4 = throw "linux 6.4 was removed because it has reached its end of life upstream"; linux_6_4 = throw "linux 6.4 was removed because it has reached its end of life upstream";
linux_6_5 = throw "linux 6.5 was removed because it has reached its end of life upstream"; linux_6_5 = throw "linux 6.5 was removed because it has reached its end of life upstream";
linux_6_7 = throw "linux 6.7 was removed because it has reached its end of life upstream";
linux_xanmod_tt = throw "linux_xanmod_tt was removed because upstream no longer offers this option"; linux_xanmod_tt = throw "linux_xanmod_tt was removed because upstream no longer offers this option";
linux_5_18_hardened = throw "linux 5.18 was removed because it has reached its end of life upstream"; linux_5_18_hardened = throw "linux 5.18 was removed because it has reached its end of life upstream";
linux_5_19_hardened = throw "linux 5.19 was removed because it has reached its end of life upstream"; linux_5_19_hardened = throw "linux 5.19 was removed because it has reached its end of life upstream";
linux_6_0_hardened = throw "linux 6.0 was removed because it has reached its end of life upstream"; linux_6_0_hardened = throw "linux 6.0 was removed because it has reached its end of life upstream";
linux_6_7_hardened = throw "linux 6.7 was removed because it has reached its end of life upstream";
})); }));
/* Linux kernel modules are inherently tied to a specific kernel. So /* Linux kernel modules are inherently tied to a specific kernel. So
rather than provide specific instances of those packages for a rather than provide specific instances of those packages for a
@ -600,7 +590,6 @@ in {
linux_5_15 = recurseIntoAttrs (packagesFor kernels.linux_5_15); linux_5_15 = recurseIntoAttrs (packagesFor kernels.linux_5_15);
linux_6_1 = recurseIntoAttrs (packagesFor kernels.linux_6_1); linux_6_1 = recurseIntoAttrs (packagesFor kernels.linux_6_1);
linux_6_6 = recurseIntoAttrs (packagesFor kernels.linux_6_6); linux_6_6 = recurseIntoAttrs (packagesFor kernels.linux_6_6);
linux_6_7 = recurseIntoAttrs (packagesFor kernels.linux_6_7);
linux_6_8 = recurseIntoAttrs (packagesFor kernels.linux_6_8); linux_6_8 = recurseIntoAttrs (packagesFor kernels.linux_6_8);
__attrsFailEvaluation = true; __attrsFailEvaluation = true;
} // lib.optionalAttrs config.allowAliases { } // lib.optionalAttrs config.allowAliases {
@ -613,6 +602,7 @@ in {
linux_6_3 = throw "linux 6.3 was removed because it reached its end of life upstream"; # Added 2023-07-22 linux_6_3 = throw "linux 6.3 was removed because it reached its end of life upstream"; # Added 2023-07-22
linux_6_4 = throw "linux 6.4 was removed because it reached its end of life upstream"; # Added 2023-10-02 linux_6_4 = throw "linux 6.4 was removed because it reached its end of life upstream"; # Added 2023-10-02
linux_6_5 = throw "linux 6.5 was removed because it reached its end of life upstream"; # Added 2024-02-28 linux_6_5 = throw "linux 6.5 was removed because it reached its end of life upstream"; # Added 2024-02-28
linux_6_7 = throw "linux 6.7 was removed because it reached its end of life upstream"; # Added 2024-04-04
}; };
rtPackages = { rtPackages = {
@ -646,7 +636,6 @@ in {
linux_5_15_hardened = recurseIntoAttrs (packagesFor kernels.linux_5_15_hardened); linux_5_15_hardened = recurseIntoAttrs (packagesFor kernels.linux_5_15_hardened);
linux_6_1_hardened = recurseIntoAttrs (packagesFor kernels.linux_6_1_hardened); linux_6_1_hardened = recurseIntoAttrs (packagesFor kernels.linux_6_1_hardened);
linux_6_6_hardened = recurseIntoAttrs (packagesFor kernels.linux_6_6_hardened); linux_6_6_hardened = recurseIntoAttrs (packagesFor kernels.linux_6_6_hardened);
linux_6_7_hardened = recurseIntoAttrs (packagesFor kernels.linux_6_7_hardened);
linux_zen = recurseIntoAttrs (packagesFor kernels.linux_zen); linux_zen = recurseIntoAttrs (packagesFor kernels.linux_zen);
linux_lqx = recurseIntoAttrs (packagesFor kernels.linux_lqx); linux_lqx = recurseIntoAttrs (packagesFor kernels.linux_lqx);
@ -662,6 +651,7 @@ in {
linux_5_18_hardened = throw "linux 5.18 was removed because it has reached its end of life upstream"; linux_5_18_hardened = throw "linux 5.18 was removed because it has reached its end of life upstream";
linux_5_19_hardened = throw "linux 5.19 was removed because it has reached its end of life upstream"; linux_5_19_hardened = throw "linux 5.19 was removed because it has reached its end of life upstream";
linux_6_0_hardened = throw "linux 6.0 was removed because it has reached its end of life upstream"; linux_6_0_hardened = throw "linux 6.0 was removed because it has reached its end of life upstream";
linux_6_7_hardened = throw "linux 6.7 was removed because it has reached its end of life upstream";
linux_xanmod_tt = throw "linux_xanmod_tt was removed because upstream no longer offers this option"; linux_xanmod_tt = throw "linux_xanmod_tt was removed because upstream no longer offers this option";
}); });