Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2023-11-18 00:02:40 +00:00 committed by GitHub
commit cd58dff98b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
87 changed files with 954 additions and 3719 deletions

View File

@ -552,6 +552,10 @@ The module update takes care of the new config syntax and the data itself (user
## Nixpkgs internals {#sec-release-23.11-nixpkgs-internals}
- Node.js v14, v16 has been removed as they were end of life. Any dependent packages that contributors were not able to reasonably upgrade were dropped after a month of notice to their maintainers, were **removed**.
- This includes VSCode Server.
- This includes Kibana 7 as the ELK stack is unmaintained in nixpkgs and is marked for slow removal.
- The use of `sourceRoot = "source";`, `sourceRoot = "source/subdir";`, and similar lines in package derivations using the default `unpackPhase` is deprecated as it requires `unpackPhase` to always produce a directory named "source". Use `sourceRoot = src.name`, `sourceRoot = "${src.name}/subdir";`, or `setSourceRoot = "sourceRoot=$(echo */subdir)";` or similar instead.
- The `django` alias in the python package set was upgraded to Django 4.x.

View File

@ -1147,7 +1147,6 @@
./services/search/elasticsearch-curator.nix
./services/search/elasticsearch.nix
./services/search/hound.nix
./services/search/kibana.nix
./services/search/meilisearch.nix
./services/search/opensearch.nix
./services/search/qdrant.nix
@ -1241,7 +1240,6 @@
./services/web-apps/changedetection-io.nix
./services/web-apps/chatgpt-retrieval-plugin.nix
./services/web-apps/cloudlog.nix
./services/web-apps/code-server.nix
./services/web-apps/convos.nix
./services/web-apps/dex.nix
./services/web-apps/discourse.nix

View File

@ -54,7 +54,7 @@ in {
serviceConfig = {
User = "slimserver";
# Issue 40589: Disable broken image/video support (audio still works!)
ExecStart = "${cfg.package}/slimserver.pl --logdir ${cfg.dataDir}/logs --prefsdir ${cfg.dataDir}/prefs --cachedir ${cfg.dataDir}/cache --noimage --novideo";
ExecStart = "${lib.getExe cfg.package} --logdir ${cfg.dataDir}/logs --prefsdir ${cfg.dataDir}/prefs --cachedir ${cfg.dataDir}/cache --noimage --novideo";
};
};

View File

@ -666,7 +666,9 @@ in {
${cfg.package}/bin/syncthing \
-no-browser \
-gui-address=${if isUnixGui then "unix://" else ""}${cfg.guiAddress} \
-home=${cfg.configDir} ${escapeShellArgs cfg.extraFlags}
-config=${cfg.configDir} \
-data=${cfg.dataDir} \
${escapeShellArgs cfg.extraFlags}
'';
MemoryDenyWriteExecute = true;
NoNewPrivileges = true;

View File

@ -1,213 +0,0 @@
{ config, lib, options, pkgs, ... }:
with lib;
let
cfg = config.services.kibana;
opt = options.services.kibana;
ge7 = builtins.compareVersions cfg.package.version "7" >= 0;
lt6_6 = builtins.compareVersions cfg.package.version "6.6" < 0;
cfgFile = pkgs.writeText "kibana.json" (builtins.toJSON (
(filterAttrsRecursive (n: v: v != null && v != []) ({
server.host = cfg.listenAddress;
server.port = cfg.port;
server.ssl.certificate = cfg.cert;
server.ssl.key = cfg.key;
kibana.index = cfg.index;
kibana.defaultAppId = cfg.defaultAppId;
elasticsearch.url = cfg.elasticsearch.url;
elasticsearch.hosts = cfg.elasticsearch.hosts;
elasticsearch.username = cfg.elasticsearch.username;
elasticsearch.password = cfg.elasticsearch.password;
elasticsearch.ssl.certificate = cfg.elasticsearch.cert;
elasticsearch.ssl.key = cfg.elasticsearch.key;
elasticsearch.ssl.certificateAuthorities = cfg.elasticsearch.certificateAuthorities;
} // cfg.extraConf)
)));
in {
options.services.kibana = {
enable = mkEnableOption (lib.mdDoc "kibana service");
listenAddress = mkOption {
description = lib.mdDoc "Kibana listening host";
default = "127.0.0.1";
type = types.str;
};
port = mkOption {
description = lib.mdDoc "Kibana listening port";
default = 5601;
type = types.port;
};
cert = mkOption {
description = lib.mdDoc "Kibana ssl certificate.";
default = null;
type = types.nullOr types.path;
};
key = mkOption {
description = lib.mdDoc "Kibana ssl key.";
default = null;
type = types.nullOr types.path;
};
index = mkOption {
description = lib.mdDoc "Elasticsearch index to use for saving kibana config.";
default = ".kibana";
type = types.str;
};
defaultAppId = mkOption {
description = lib.mdDoc "Elasticsearch default application id.";
default = "discover";
type = types.str;
};
elasticsearch = {
url = mkOption {
description = lib.mdDoc ''
Elasticsearch url.
Defaults to `"http://localhost:9200"`.
Don't set this when using Kibana >= 7.0.0 because it will result in a
configuration error. Use {option}`services.kibana.elasticsearch.hosts`
instead.
'';
default = null;
type = types.nullOr types.str;
};
hosts = mkOption {
description = lib.mdDoc ''
The URLs of the Elasticsearch instances to use for all your queries.
All nodes listed here must be on the same cluster.
Defaults to `[ "http://localhost:9200" ]`.
This option is only valid when using kibana >= 6.6.
'';
default = null;
type = types.nullOr (types.listOf types.str);
};
username = mkOption {
description = lib.mdDoc "Username for elasticsearch basic auth.";
default = null;
type = types.nullOr types.str;
};
password = mkOption {
description = lib.mdDoc "Password for elasticsearch basic auth.";
default = null;
type = types.nullOr types.str;
};
ca = mkOption {
description = lib.mdDoc ''
CA file to auth against elasticsearch.
It's recommended to use the {option}`certificateAuthorities` option
when using kibana-5.4 or newer.
'';
default = null;
type = types.nullOr types.path;
};
certificateAuthorities = mkOption {
description = lib.mdDoc ''
CA files to auth against elasticsearch.
Please use the {option}`ca` option when using kibana \< 5.4
because those old versions don't support setting multiple CA's.
This defaults to the singleton list [ca] when the {option}`ca` option is defined.
'';
default = lib.optional (cfg.elasticsearch.ca != null) ca;
defaultText = literalExpression ''
lib.optional (config.${opt.elasticsearch.ca} != null) ca
'';
type = types.listOf types.path;
};
cert = mkOption {
description = lib.mdDoc "Certificate file to auth against elasticsearch.";
default = null;
type = types.nullOr types.path;
};
key = mkOption {
description = lib.mdDoc "Key file to auth against elasticsearch.";
default = null;
type = types.nullOr types.path;
};
};
package = mkOption {
description = lib.mdDoc "Kibana package to use";
default = pkgs.kibana;
defaultText = literalExpression "pkgs.kibana";
type = types.package;
};
dataDir = mkOption {
description = lib.mdDoc "Kibana data directory";
default = "/var/lib/kibana";
type = types.path;
};
extraConf = mkOption {
description = lib.mdDoc "Kibana extra configuration";
default = {};
type = types.attrs;
};
};
config = mkIf (cfg.enable) {
assertions = [
{
assertion = ge7 -> cfg.elasticsearch.url == null;
message =
"The option services.kibana.elasticsearch.url has been removed when using kibana >= 7.0.0. " +
"Please use option services.kibana.elasticsearch.hosts instead.";
}
{
assertion = lt6_6 -> cfg.elasticsearch.hosts == null;
message =
"The option services.kibana.elasticsearch.hosts is only valid for kibana >= 6.6.";
}
];
systemd.services.kibana = {
description = "Kibana Service";
wantedBy = [ "multi-user.target" ];
after = [ "network.target" "elasticsearch.service" ];
environment = { BABEL_CACHE_PATH = "${cfg.dataDir}/.babelcache.json"; };
serviceConfig = {
ExecStart =
"${cfg.package}/bin/kibana" +
" --config ${cfgFile}" +
" --path.data ${cfg.dataDir}";
User = "kibana";
WorkingDirectory = cfg.dataDir;
};
};
environment.systemPackages = [ cfg.package ];
users.users.kibana = {
isSystemUser = true;
description = "Kibana service user";
home = cfg.dataDir;
createHome = true;
group = "kibana";
};
users.groups.kibana = {};
};
}

View File

@ -1,259 +0,0 @@
{ config, lib, pkgs, ... }:
let
cfg = config.services.code-server;
defaultUser = "code-server";
defaultGroup = defaultUser;
in {
options = {
services.code-server = {
enable = lib.mkEnableOption (lib.mdDoc "code-server");
package = lib.mkPackageOptionMD pkgs "code-server" {
example = ''
pkgs.vscode-with-extensions.override {
vscode = pkgs.code-server;
vscodeExtensions = with pkgs.vscode-extensions; [
bbenoist.nix
dracula-theme.theme-dracula
];
}
'';
};
extraPackages = lib.mkOption {
default = [ ];
description = lib.mdDoc ''
Additional packages to add to the code-server {env}`PATH`.
'';
example = lib.literalExpression "[ pkgs.go ]";
type = lib.types.listOf lib.types.package;
};
extraEnvironment = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
description = lib.mdDoc ''
Additional environment variables to pass to code-server.
'';
default = { };
example = { PKG_CONFIG_PATH = "/run/current-system/sw/lib/pkgconfig"; };
};
extraArguments = lib.mkOption {
default = [ ];
description = lib.mdDoc ''
Additional arguments to pass to code-server.
'';
example = lib.literalExpression ''[ "--log=info" ]'';
type = lib.types.listOf lib.types.str;
};
host = lib.mkOption {
default = "localhost";
description = lib.mdDoc ''
The host name or IP address the server should listen to.
'';
type = lib.types.str;
};
port = lib.mkOption {
default = 4444;
description = lib.mdDoc ''
The port the server should listen to.
'';
type = lib.types.port;
};
auth = lib.mkOption {
default = "password";
description = lib.mdDoc ''
The type of authentication to use.
'';
type = lib.types.enum [ "none" "password" ];
};
hashedPassword = lib.mkOption {
default = "";
description = lib.mdDoc ''
Create the password with: `echo -n 'thisismypassword' | npx argon2-cli -e`.
'';
type = lib.types.str;
};
user = lib.mkOption {
default = defaultUser;
example = "yourUser";
description = lib.mdDoc ''
The user to run code-server as.
By default, a user named `${defaultUser}` will be created.
'';
type = lib.types.str;
};
group = lib.mkOption {
default = defaultGroup;
example = "yourGroup";
description = lib.mdDoc ''
The group to run code-server under.
By default, a group named `${defaultGroup}` will be created.
'';
type = lib.types.str;
};
extraGroups = lib.mkOption {
default = [ ];
description = lib.mdDoc ''
An array of additional groups for the `${defaultUser}` user.
'';
example = [ "docker" ];
type = lib.types.listOf lib.types.str;
};
socket = lib.mkOption {
default = null;
example = "/run/code-server/socket";
description = lib.mdDoc ''
Path to a socket (bind-addr will be ignored).
'';
type = lib.types.nullOr lib.types.str;
};
socketMode = lib.mkOption {
default = null;
description = lib.mdDoc ''
File mode of the socket.
'';
type = lib.types.nullOr lib.types.str;
};
userDataDir = lib.mkOption {
default = null;
description = lib.mdDoc ''
Path to the user data directory.
'';
type = lib.types.nullOr lib.types.str;
};
extensionsDir = lib.mkOption {
default = null;
description = lib.mdDoc ''
Path to the extensions directory.
'';
type = lib.types.nullOr lib.types.str;
};
proxyDomain = lib.mkOption {
default = null;
example = "code-server.lan";
description = lib.mdDoc ''
Domain used for proxying ports.
'';
type = lib.types.nullOr lib.types.str;
};
disableTelemetry = lib.mkOption {
default = false;
example = true;
description = lib.mdDoc ''
Disable telemetry.
'';
type = lib.types.bool;
};
disableUpdateCheck = lib.mkOption {
default = false;
example = true;
description = lib.mdDoc ''
Disable update check.
Without this flag, code-server checks every 6 hours against the latest github release and
then notifies you once every week that a new release is available.
'';
type = lib.types.bool;
};
disableFileDownloads = lib.mkOption {
default = false;
example = true;
description = lib.mdDoc ''
Disable file downloads from Code.
'';
type = lib.types.bool;
};
disableWorkspaceTrust = lib.mkOption {
default = false;
example = true;
description = lib.mdDoc ''
Disable Workspace Trust feature.
'';
type = lib.types.bool;
};
disableGettingStartedOverride = lib.mkOption {
default = false;
example = true;
description = lib.mdDoc ''
Disable the coder/coder override in the Help: Getting Started page.
'';
type = lib.types.bool;
};
};
};
config = lib.mkIf cfg.enable {
systemd.services.code-server = {
description = "Code server";
wantedBy = [ "multi-user.target" ];
after = [ "network-online.target" ];
path = cfg.extraPackages;
environment = {
HASHED_PASSWORD = cfg.hashedPassword;
} // cfg.extraEnvironment;
serviceConfig = {
ExecStart = ''
${lib.getExe cfg.package} \
--auth=${cfg.auth} \
--bind-addr=${cfg.host}:${toString cfg.port} \
'' + lib.optionalString (cfg.socket != null) ''
--socket=${cfg.socket} \
'' + lib.optionalString (cfg.userDataDir != null) ''
--user-data-dir=${cfg.userDataDir} \
'' + lib.optionalString (cfg.extensionsDir != null) ''
--extensions-dir=${cfg.extensionsDir} \
'' + lib.optionalString (cfg.disableTelemetry == true) ''
--disable-telemetry \
'' + lib.optionalString (cfg.disableUpdateCheck == true) ''
--disable-update-check \
'' + lib.optionalString (cfg.disableFileDownloads == true) ''
--disable-file-downloads \
'' + lib.optionalString (cfg.disableWorkspaceTrust == true) ''
--disable-workspace-trust \
'' + lib.optionalString (cfg.disableGettingStartedOverride == true) ''
--disable-getting-started-override \
'' + lib.escapeShellArgs cfg.extraArguments;
ExecReload = "${pkgs.coreutils}/bin/kill -HUP $MAINPID";
RuntimeDirectory = cfg.user;
User = cfg.user;
Group = cfg.group;
Restart = "on-failure";
};
};
users.users."${cfg.user}" = lib.mkMerge [
(lib.mkIf (cfg.user == defaultUser) {
isNormalUser = true;
description = "code-server user";
inherit (cfg) group;
})
{
packages = cfg.extraPackages;
inherit (cfg) extraGroups;
}
];
users.groups."${defaultGroup}" = lib.mkIf (cfg.group == defaultGroup) { };
};
meta.maintainers = [ lib.maintainers.stackshadow ];
}

View File

@ -169,6 +169,15 @@ in
off if you want to configure it manually.
'';
};
excalidraw.enable = mkEnableOption (lib.mdDoc "Excalidraw collaboration backend for Jitsi");
excalidraw.port = mkOption {
type = types.port;
default = 3002;
description = lib.mdDoc ''The port which the Excalidraw backend for Jitsi should listen to.'';
};
secureDomain.enable = mkEnableOption (lib.mdDoc "Authenticated room creation");
};
config = mkIf cfg.enable {
@ -192,41 +201,118 @@ in
roomLocking = false;
roomDefaultPublicJids = true;
extraConfig = ''
restrict_room_creation = true
storage = "memory"
admins = { "focus@auth.${cfg.hostName}" }
'';
}
{
domain = "internal.${cfg.hostName}";
domain = "breakout.${cfg.hostName}";
name = "Jitsi Meet Breakout MUC";
roomLocking = false;
roomDefaultPublicJids = true;
extraConfig = ''
restrict_room_creation = true
storage = "memory"
admins = { "focus@auth.${cfg.hostName}" }
'';
}
{
domain = "internal.auth.${cfg.hostName}";
name = "Jitsi Meet Videobridge MUC";
roomLocking = false;
roomDefaultPublicJids = true;
extraConfig = ''
storage = "memory"
admins = { "focus@auth.${cfg.hostName}", "jvb@auth.${cfg.hostName}" }
'';
#-- muc_room_cache_size = 1000
}
{
domain = "lobby.${cfg.hostName}";
name = "Jitsi Meet Lobby MUC";
roomLocking = false;
roomDefaultPublicJids = true;
extraConfig = ''
restrict_room_creation = true
storage = "memory"
'';
}
];
extraModules = [
"pubsub"
"smacks"
"speakerstats"
"external_services"
"conference_duration"
"end_conference"
"muc_lobby_rooms"
"muc_breakout_rooms"
"av_moderation"
"muc_hide_all"
"muc_meeting_id"
"muc_domain_mapper"
"muc_rate_limit"
"limits_exception"
"persistent_lobby"
"room_metadata"
];
extraModules = [ "pubsub" "smacks" ];
extraPluginPaths = [ "${pkgs.jitsi-meet-prosody}/share/prosody-plugins" ];
extraConfig = lib.mkMerge [ (mkAfter ''
Component "focus.${cfg.hostName}" "client_proxy"
target_address = "focus@auth.${cfg.hostName}"
extraConfig = lib.mkMerge [
(mkAfter ''
Component "focus.${cfg.hostName}" "client_proxy"
target_address = "focus@auth.${cfg.hostName}"
Component "speakerstats.${cfg.hostName}" "speakerstats_component"
muc_component = "conference.${cfg.hostName}"
Component "conferenceduration.${cfg.hostName}" "conference_duration_component"
muc_component = "conference.${cfg.hostName}"
Component "endconference.${cfg.hostName}" "end_conference"
muc_component = "conference.${cfg.hostName}"
Component "avmoderation.${cfg.hostName}" "av_moderation_component"
muc_component = "conference.${cfg.hostName}"
Component "metadata.${cfg.hostName}" "room_metadata_component"
muc_component = "conference.${cfg.hostName}"
breakout_rooms_component = "breakout.${cfg.hostName}"
'')
(mkBefore ''
muc_mapper_domain_base = "${cfg.hostName}"
cross_domain_websocket = true;
consider_websocket_secure = true;
unlimited_jids = {
"focus@auth.${cfg.hostName}",
"jvb@auth.${cfg.hostName}"
}
'')
];
virtualHosts.${cfg.hostName} = {
enabled = true;
domain = cfg.hostName;
extraConfig = ''
authentication = "anonymous"
authentication = ${if cfg.secureDomain.enable then "\"internal_hashed\"" else "\"jitsi-anonymous\""}
c2s_require_encryption = false
admins = { "focus@auth.${cfg.hostName}" }
smacks_max_unacked_stanzas = 5
smacks_hibernation_time = 60
smacks_max_hibernated_sessions = 1
smacks_max_old_sessions = 1
av_moderation_component = "avmoderation.${cfg.hostName}"
speakerstats_component = "speakerstats.${cfg.hostName}"
conference_duration_component = "conferenceduration.${cfg.hostName}"
end_conference_component = "endconference.${cfg.hostName}"
c2s_require_encryption = false
lobby_muc = "lobby.${cfg.hostName}"
breakout_rooms_muc = "breakout.${cfg.hostName}"
room_metadata_component = "metadata.${cfg.hostName}"
main_muc = "conference.${cfg.hostName}"
'';
ssl = {
cert = "/var/lib/jitsi-meet/jitsi-meet.crt";
@ -237,7 +323,7 @@ in
enabled = true;
domain = "auth.${cfg.hostName}";
extraConfig = ''
authentication = "internal_plain"
authentication = "internal_hashed"
'';
ssl = {
cert = "/var/lib/jitsi-meet/jitsi-meet.crt";
@ -252,6 +338,14 @@ in
c2s_require_encryption = false
'';
};
virtualHosts."guest.${cfg.hostName}" = {
enabled = true;
domain = "guest.${cfg.hostName}";
extraConfig = ''
authentication = "anonymous"
c2s_require_encryption = false
'';
};
};
systemd.services.prosody = mkIf cfg.prosody.enable {
preStart = let
@ -270,7 +364,7 @@ in
reloadIfChanged = true;
};
users.groups.jitsi-meet = {};
users.groups.jitsi-meet = { };
systemd.tmpfiles.rules = [
"d '/var/lib/jitsi-meet' 0750 root jitsi-meet - -"
];
@ -317,6 +411,20 @@ in
'';
};
systemd.services.jitsi-excalidraw = mkIf cfg.excalidraw.enable {
description = "Excalidraw collaboration backend for Jitsi";
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
environment.PORT = toString cfg.excalidraw.port;
serviceConfig = {
Type = "simple";
ExecStart = "${pkgs.jitsi-excalidraw}/bin/jitsi-excalidraw-backend";
Restart = "on-failure";
Group = "jitsi-meet";
};
};
services.nginx = mkIf cfg.nginx.enable {
enable = mkDefault true;
virtualHosts.${cfg.hostName} = {
@ -345,12 +453,23 @@ in
locations."=/external_api.js" = mkDefault {
alias = "${pkgs.jitsi-meet}/libs/external_api.min.js";
};
locations."=/_api/room-info" = {
proxyPass = "http://localhost:5280/room-info";
extraConfig = ''
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
'';
};
locations."=/config.js" = mkDefault {
alias = overrideJs "${pkgs.jitsi-meet}/config.js" "config" (recursiveUpdate defaultCfg cfg.config) cfg.extraConfig;
};
locations."=/interface_config.js" = mkDefault {
alias = overrideJs "${pkgs.jitsi-meet}/interface_config.js" "interfaceConfig" cfg.interfaceConfig "";
};
locations."/socket.io/" = mkIf cfg.excalidraw.enable {
proxyPass = "http://127.0.0.1:${toString cfg.excalidraw.port}";
proxyWebsockets = true;
};
};
};
@ -359,7 +478,7 @@ in
virtualHosts.${cfg.hostName} = {
extraConfig =
let
templatedJitsiMeet = pkgs.runCommand "templated-jitsi-meet" {} ''
templatedJitsiMeet = pkgs.runCommand "templated-jitsi-meet" { } ''
cp -R ${pkgs.jitsi-meet}/* .
for file in *.html **/*.html ; do
${pkgs.sd}/bin/sd '<!--#include virtual="(.*)" -->' '{{ include "$1" }}' $file
@ -390,13 +509,24 @@ in
};
};
services.jitsi-meet.config = recursiveUpdate
(mkIf cfg.excalidraw.enable {
whiteboard = {
enabled = true;
collabServerBaseUrl = "https://${cfg.hostName}";
};
})
(mkIf cfg.secureDomain.enable {
hosts.anonymousdomain = "guest.${cfg.hostName}";
});
services.jitsi-videobridge = mkIf cfg.videobridge.enable {
enable = true;
xmppConfigs."localhost" = {
userName = "jvb";
domain = "auth.${cfg.hostName}";
passwordFile = "/var/lib/jitsi-meet/videobridge-secret";
mucJids = "jvbbrewery@internal.${cfg.hostName}";
mucJids = "jvbbrewery@internal.auth.${cfg.hostName}";
disableCertificateVerification = true;
};
};
@ -409,17 +539,27 @@ in
userName = "focus";
userPasswordFile = "/var/lib/jitsi-meet/jicofo-user-secret";
componentPasswordFile = "/var/lib/jitsi-meet/jicofo-component-secret";
bridgeMuc = "jvbbrewery@internal.${cfg.hostName}";
bridgeMuc = "jvbbrewery@internal.auth.${cfg.hostName}";
config = mkMerge [{
jicofo.xmpp.service.disable-certificate-verification = true;
jicofo.xmpp.client.disable-certificate-verification = true;
#} (lib.mkIf cfg.jibri.enable {
} (lib.mkIf (config.services.jibri.enable || cfg.jibri.enable) {
jicofo.jibri = {
brewery-jid = "JibriBrewery@internal.${cfg.hostName}";
pending-timeout = "90";
};
})];
}
(lib.mkIf (config.services.jibri.enable || cfg.jibri.enable) {
jicofo.jibri = {
brewery-jid = "JibriBrewery@internal.auth.${cfg.hostName}";
pending-timeout = "90";
};
})
(lib.mkIf cfg.secureDomain.enable {
jicofo = {
authentication = {
enabled = "true";
type = "XMPP";
login-url = cfg.hostName;
};
xmpp.client.client-proxy = "focus.${cfg.hostName}";
};
})];
};
services.jibri = mkIf cfg.jibri.enable {
@ -430,7 +570,7 @@ in
xmppDomain = cfg.hostName;
control.muc = {
domain = "internal.${cfg.hostName}";
domain = "internal.auth.${cfg.hostName}";
roomName = "JibriBrewery";
nickname = "jibri";
};

View File

@ -192,7 +192,6 @@ in {
cntr = handleTestOn ["aarch64-linux" "x86_64-linux"] ./cntr.nix {};
cockpit = handleTest ./cockpit.nix {};
cockroachdb = handleTestOn ["x86_64-linux"] ./cockroachdb.nix {};
code-server = handleTest ./code-server.nix {};
coder = handleTest ./coder.nix {};
collectd = handleTest ./collectd.nix {};
connman = handleTest ./connman.nix {};
@ -750,6 +749,7 @@ in {
signal-desktop = handleTest ./signal-desktop.nix {};
simple = handleTest ./simple.nix {};
sing-box = handleTest ./sing-box.nix {};
slimserver = handleTest ./slimserver.nix {};
slurm = handleTest ./slurm.nix {};
smokeping = handleTest ./smokeping.nix {};
snapcast = handleTest ./snapcast.nix {};

View File

@ -1,22 +0,0 @@
import ./make-test-python.nix ({pkgs, lib, ...}:
{
name = "code-server";
nodes = {
machine = {pkgs, ...}: {
services.code-server = {
enable = true;
auth = "none";
};
};
};
testScript = ''
start_all()
machine.wait_for_unit("code-server.service")
machine.wait_for_open_port(4444)
machine.succeed("curl -k --fail http://localhost:4444", timeout=10)
'';
meta.maintainers = [ lib.maintainers.drupol ];
})

View File

@ -119,11 +119,6 @@ let
package = elk.elasticsearch;
};
kibana = {
enable = true;
package = elk.kibana;
};
elasticsearch-curator = {
enable = true;
actionYAML = ''
@ -217,13 +212,6 @@ let
one.wait_until_succeeds("cat /tmp/logstash.out | grep flowers")
one.wait_until_succeeds("cat /tmp/logstash.out | grep -v dragons")
with subtest("Kibana is healthy"):
one.wait_for_unit("kibana.service")
one.wait_until_succeeds(
"curl --silent --show-error --fail-with-body 'http://localhost:5601/api/status'"
+ " | jq -es 'if . == [] then null else .[] | .status.overall.state == \"green\" end'"
)
with subtest("Metricbeat is running"):
one.wait_for_unit("metricbeat.service")
@ -274,7 +262,6 @@ in {
# name = "elk-7";
# elasticsearch = pkgs.elasticsearch7-oss;
# logstash = pkgs.logstash7-oss;
# kibana = pkgs.kibana7-oss;
# filebeat = pkgs.filebeat7;
# metricbeat = pkgs.metricbeat7;
# };
@ -282,7 +269,6 @@ in {
ELK-7 = mkElkTest "elk-7" {
elasticsearch = pkgs.elasticsearch7;
logstash = pkgs.logstash7;
kibana = pkgs.kibana7;
filebeat = pkgs.filebeat7;
metricbeat = pkgs.metricbeat7;
};

View File

@ -0,0 +1,47 @@
import ./make-test-python.nix ({ pkgs, ...} : {
name = "slimserver";
meta.maintainers = with pkgs.lib.maintainers; [ adamcstephens ];
nodes.machine = { ... }: {
services.slimserver.enable = true;
services.squeezelite = {
enable = true;
extraArguments = "-s 127.0.0.1 -d slimproto=info";
};
sound.enable = true;
boot.initrd.kernelModules = ["snd-dummy"];
};
testScript =
''
import json
rpc_get_player = {
"id": 1,
"method": "slim.request",
"params":[0,["player", "id", "0", "?"]]
}
with subtest("slimserver is started"):
machine.wait_for_unit("slimserver.service")
# give slimserver a moment to report errors
machine.sleep(2)
with subtest('slimserver module errors are not reported'):
machine.fail("journalctl -u slimserver.service | grep 'throw_exception'")
machine.fail("journalctl -u slimserver.service | grep 'not installed'")
machine.fail("journalctl -u slimserver.service | grep 'not found'")
machine.fail("journalctl -u slimserver.service | grep 'The following CPAN modules were found but cannot work with Logitech Media Server'")
machine.fail("journalctl -u slimserver.service | grep 'please use the buildme.sh'")
with subtest('slimserver is ready'):
machine.wait_for_open_port(9000)
machine.wait_until_succeeds("journalctl -u slimserver.service | grep 'Completed dbOptimize Scan'")
with subtest("squeezelite player successfully connects to slimserver"):
machine.wait_for_unit("squeezelite.service")
machine.wait_until_succeeds("journalctl -u squeezelite.service | grep 'slimproto:937 connected'")
player_mac = machine.wait_until_succeeds("journalctl -eu squeezelite.service | grep 'sendHELO:148 mac:'").strip().split(" ")[-1]
player_id = machine.succeed(f"curl http://localhost:9000/jsonrpc.js -g -X POST -d '{json.dumps(rpc_get_player)}'")
assert player_mac == json.loads(player_id)["result"]["_id"], "squeezelite player not found"
'';
})

View File

@ -252,6 +252,35 @@ in
'';
};
garbage-collect-entry = makeTest {
name = "systemd-boot-switch-test";
meta.maintainers = with pkgs.lib.maintainers; [ julienmalka ];
nodes = {
inherit common;
machine = { pkgs, nodes, ... }: {
imports = [ common ];
# These are configs for different nodes, but we'll use them here in `machine`
system.extraDependencies = [
nodes.common.system.build.toplevel
];
};
};
testScript = { nodes, ... }:
let
baseSystem = nodes.common.system.build.toplevel;
in
''
machine.succeed("nix-env -p /nix/var/nix/profiles/system --set ${baseSystem}")
machine.succeed("nix-env -p /nix/var/nix/profiles/system --delete-generations 1")
machine.succeed("${baseSystem}/bin/switch-to-configuration boot")
machine.fail("test -e /boot/loader/entries/nixos-generation-1.conf")
machine.succeed("test -e /boot/loader/entries/nixos-generation-2.conf")
'';
};
# Some UEFI firmwares fail on large reads. Now that systemd-boot loads initrd
# itself, systems with such firmware won't boot without this fix
uefiLargeFileWorkaround = makeTest {

View File

@ -22,6 +22,7 @@
, openssl
, portaudioSupport ? stdenv.isDarwin
, portaudio
, slimserver
, AudioToolbox
, AudioUnit
, Carbon
@ -95,7 +96,10 @@ stdenv.mkDerivation {
runHook postInstall
'';
passthru.updateScript = ./update.sh;
passthru = {
inherit (slimserver) tests;
updateScript = ./update.sh;
};
meta = with lib; {
description = "Lightweight headless squeezebox client emulator";

View File

@ -2,7 +2,7 @@
let
pname = "erigon";
version = "2.53.1";
version = "2.54.0";
in
buildGoModule {
inherit pname version;
@ -11,11 +11,11 @@ buildGoModule {
owner = "ledgerwatch";
repo = pname;
rev = "v${version}";
hash = "sha256-Gsrt/+6fhpwg3DzPtXPj9T9VPaMIaRcYBdWuFOotsbA=";
hash = "sha256-1kgbIg/3SvVT83UfwAYUixs1RQk4PP1quiOcI1mzbZ0=";
fetchSubmodules = true;
};
vendorHash = "sha256-zsLPqcLCZSnhlFWvNXZJwlfS+NsaTS07TmWd+x4ZPXA=";
vendorHash = "sha256-Gr9mrME8/ZDxp2ORKessNhfguklDf+jC4RSpzLOSBhQ=";
proxyVendor = true;
# Build errors in mdbx when format hardening is enabled:

View File

@ -847,6 +847,10 @@ self: super: {
dependencies = with self; [ plenary-nvim ];
};
neotest = super.neorg.overrideAttrs {
dependencies = with self; [ plenary-nvim ];
};
neo-tree-nvim = super.neo-tree-nvim.overrideAttrs {
dependencies = with self; [ plenary-nvim nui-nvim ];
};

View File

@ -1,6 +1,13 @@
{ mkDerivation, lib, fetchFromGitHub, qmake, qtbase, qtdeclarative }:
mkDerivation rec {
{ stdenv
, lib
, fetchFromGitHub
, qmake
, qtbase
, qtdeclarative
, qtquickcontrols
, wrapQtAppsHook
}:
stdenv.mkDerivation rec {
pname = "firebird-emu";
version = "1.6";
@ -12,16 +19,23 @@ mkDerivation rec {
hash = "sha256-ZptjlnOiF+hKuKYvBFJL95H5YQuR99d4biOco/MVEmE=";
};
nativeBuildInputs = [ qmake ];
# work around https://github.com/NixOS/nixpkgs/issues/19098
env.NIX_CFLAGS_COMPILE = lib.optionalString (stdenv.cc.isClang && stdenv.isDarwin) "-fno-lto";
buildInputs = [ qtbase qtdeclarative ];
nativeBuildInputs = [ wrapQtAppsHook qmake ];
buildInputs = [ qtbase qtdeclarative qtquickcontrols ];
postInstall = lib.optionalString stdenv.hostPlatform.isDarwin ''
mkdir $out/Applications
mv $out/bin/${pname}.app $out/Applications/
'';
meta = {
homepage = "https://github.com/nspire-emus/firebird";
description = "Third-party multi-platform emulator of the ARM-based TI-Nspire calculators";
license = lib.licenses.gpl3;
maintainers = with lib.maintainers; [ pneumaticat ];
# Only tested on Linux, but likely possible to build on, e.g. macOS
platforms = lib.platforms.linux;
platforms = lib.platforms.unix;
};
}

View File

@ -12,14 +12,14 @@
python3Packages.buildPythonPackage rec {
pname = "hydrus";
version = "551";
version = "552";
format = "other";
src = fetchFromGitHub {
owner = "hydrusnetwork";
repo = "hydrus";
rev = "refs/tags/v${version}";
hash = "sha256-P/U44ndfucbRnwGLdSnnA0VE4K40zPz3wtNpQj8rh5Q=";
hash = "sha256-MaS9WxYlbQ7V/2mrETKS0UyWb5IzCrSwDcp4UlVS3zk=";
};
nativeBuildInputs = [

View File

@ -15,9 +15,9 @@
version = "2023-09-12";
};
};
hash = "sha256-UR8EowMYZ668qQ/6C9C/M+0HSguyUGKVhY46FTG+ShA=";
hash_deb_amd64 = "sha256-PIBWkCwU/7N242wR/WmpJDDJKLDGk/sJxPxa05bOQSE=";
version = "119.0.6045.123";
hash = "sha256-8xPm3vNF0HjfL7ewTz7iz7GMfiJi6mhMK1YSC7VeoSM=";
hash_deb_amd64 = "sha256-xAm7bPsnnJD7UWNTtHKMv5enHo3rM9w0M81QPqZVlP4=";
version = "119.0.6045.159";
};
ungoogled-chromium = {
deps = {
@ -28,12 +28,12 @@
version = "2023-09-12";
};
ungoogled-patches = {
hash = "sha256-4EbfM62KuG8nHrYWwp183V5G9ac7FICjpyJahnKbQjE=";
rev = "119.0.6045.123-1";
hash = "sha256-kgUrYXy2avfwfRckSYI6YPMW1uuvl2Osg4Vr9Q1ksMc=";
rev = "119.0.6045.159-1";
};
};
hash = "sha256-UR8EowMYZ668qQ/6C9C/M+0HSguyUGKVhY46FTG+ShA=";
hash_deb_amd64 = "sha256-PIBWkCwU/7N242wR/WmpJDDJKLDGk/sJxPxa05bOQSE=";
version = "119.0.6045.123";
hash = "sha256-8xPm3vNF0HjfL7ewTz7iz7GMfiJi6mhMK1YSC7VeoSM=";
hash_deb_amd64 = "sha256-xAm7bPsnnJD7UWNTtHKMv5enHo3rM9w0M81QPqZVlP4=";
version = "119.0.6045.159";
};
}

View File

@ -38,20 +38,20 @@ let
singularity = callPackage
(import ./generic.nix rec {
pname = "singularity-ce";
version = "4.0.1";
version = "4.0.2";
projectName = "singularity";
src = fetchFromGitHub {
owner = "sylabs";
repo = "singularity";
rev = "refs/tags/v${version}";
hash = "sha256-rdpIAiLh4mlSu+1UUDN79gIzxy5X5wOB5XOW9oBm+HU=";
hash = "sha256-R+vAKYR4lJmC7PIITYyg4UeGYjGXoPqqUai3HmPzwG0=";
};
# Update by running
# nix-prefetch -E "{ sha256 }: ((import ./. { }).singularity.override { vendorHash = sha256; }).goModules"
# at the root directory of the Nixpkgs repository
vendorHash = "sha256-kV4Yu9MBoF8spJroWqLOUt2v8YV79AoNUG9hYgPgXRc=";
vendorHash = "sha256-z3VozeMpaqh4ddZxB3xqo25Gm+8JYeIwASOq+Mmerr4=";
# Do not build conmon and squashfuse from the Git submodule sources,
# Use Nixpkgs provided version

View File

@ -1,4 +1,9 @@
{ stdenv, lib, fetchFromGitHub, rustPlatform, libX11, libXinerama }:
{ lib
, fetchFromGitHub
, rustPlatform
, libX11
, libXinerama
}:
let
rpathLibs = [ libXinerama libX11 ];
@ -6,16 +11,16 @@ in
rustPlatform.buildRustPackage rec {
pname = "leftwm";
version = "0.4.2";
version = "0.5.1";
src = fetchFromGitHub {
owner = "leftwm";
repo = "leftwm";
rev = version;
sha256 = "sha256-SjEp0gQHwq3Omhx/EPnyLeQJ50Ov0rHDxmYVWBwIDBs=";
rev = "refs/tags/${version}";
hash = "sha256-wn5DurPWFwSUtc5naEL4lBSQpKWTJkugpN9mKx+Ed2Y=";
};
cargoSha256 = "sha256-kdGqnfzO+Ev9QeZcZqISPTehEXZzCWT5S8p6JbTBreE=";
cargoHash = "sha256-TylRxdpAVuGtZ3Lm8je6FZ0JUwetBi6mOGRoT2M3Jyk=";
buildInputs = rpathLibs;
@ -23,16 +28,18 @@ rustPlatform.buildRustPackage rec {
for p in $out/bin/left*; do
patchelf --set-rpath "${lib.makeLibraryPath rpathLibs}" $p
done
install -D -m 0555 leftwm/doc/leftwm.1 $out/share/man/man1/leftwm.1
'';
dontPatchELF = true;
meta = with lib; {
meta = {
description = "A tiling window manager for the adventurer";
homepage = "https://github.com/leftwm/leftwm";
license = licenses.mit;
platforms = platforms.linux;
maintainers = with maintainers; [ yanganto ];
changelog = "https://github.com/leftwm/leftwm/blob/${version}/CHANGELOG";
license = lib.licenses.mit;
platforms = lib.platforms.linux;
maintainers = with lib.maintainers; [ yanganto ];
changelog = "https://github.com/leftwm/leftwm/blob/${version}/CHANGELOG.md";
};
}

View File

@ -12,16 +12,16 @@
rustPlatform.buildRustPackage rec {
pname = "cyme";
version = "1.5.0";
version = "1.5.2";
src = fetchFromGitHub {
owner = "tuna-f1sh";
repo = "cyme";
rev = "v${version}";
hash = "sha256-UXh97pHJ9wa/xSslHLB7WVDwLKJYvLPgmPX8RvKrsTI=";
hash = "sha256-Y5TcRcbqarcKRDWCI36YhbLJFU+yrpAE3vRGArbfr0U=";
};
cargoHash = "sha256-hSd53K50Y4K/fYGfsT2fHUaipVSpeYN6/EOFlv4ocuE=";
cargoHash = "sha256-ycFNNTZ7AU4WRnf1+RJB7KxQKVdJbubB28tS/GyU0bI=";
nativeBuildInputs = [
pkg-config

View File

@ -9,9 +9,6 @@
, enableMoltenVKCompat ? false
}:
let
isCross = stdenv.hostPlatform != stdenv.targetPlatform;
in
stdenv.mkDerivation (finalAttrs: {
pname = "dxvk";
version = "1.10.3";
@ -36,15 +33,10 @@ stdenv.mkDerivation (finalAttrs: {
nativeBuildInputs = [ glslang meson ninja ];
buildInputs = [ windows.pthreads ];
mesonFlags =
let
arch = if stdenv.is32bit then "32" else "64";
in
[
"--buildtype" "release"
"--prefix" "${placeholder "out"}"
]
++ lib.optionals isCross [ "--cross-file" "build-win${arch}.txt" ];
mesonFlags = [
"--buildtype" "release"
"--prefix" "${placeholder "out"}"
];
meta = {
description = "A Vulkan-based translation layer for Direct3D 9/10/11";

View File

@ -19,7 +19,6 @@
assert !sdl2Support || !glfwSupport;
let
isCross = stdenv.hostPlatform != stdenv.targetPlatform;
isWindows = stdenv.hostPlatform.uname.system == "Windows";
in
stdenv.mkDerivation (finalAttrs: {
@ -51,18 +50,12 @@ stdenv.mkDerivation (finalAttrs: {
mkdir -p include/spirv/include include/vulkan/include
'';
mesonFlags =
let
arch = if stdenv.is32bit then "32" else "64";
in
[
"--buildtype" "release"
"--prefix" "${placeholder "out"}"
]
++ lib.optionals isCross [ "--cross-file" "build-win${arch}.txt" ]
++ lib.optional glfwSupport "-Ddxvk_native_wsi=glfw";
mesonFlags = [
"--buildtype" "release"
"--prefix" "${placeholder "out"}"
] ++ lib.optional glfwSupport "-Ddxvk_native_wsi=glfw";
doCheck = !isCross;
doCheck = true;
passthru.updateScript = gitUpdater { rev-prefix = "v"; };

View File

@ -0,0 +1,48 @@
{ lib
, rustPlatform
, fetchFromGitea
, pkg-config
, openssl
, nix
}:
let
cargoFlags = [ "-p" "nix-web" ];
in
rustPlatform.buildRustPackage rec {
pname = "nix-web";
version = "0.1.0";
src = fetchFromGitea {
domain = "codeberg.org";
owner = "gorgon";
repo = "gorgon";
rev = "nix-web-v${version}";
hash = "sha256-+IDvoMRuMt1nS69yFhPPVs+s6Dj0dgXVdjjd9f3+spk=";
};
cargoHash = "sha256-uVBfIw++MRxgVAC+KzGVuMZra8oktUfHcZQk90FF1a8=";
nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl ];
postPatch = ''
substituteInPlace nix-web/nix-web.service \
--replace 'ExecStart=nix-web' "ExecStart=$out/bin/nix-web"
'';
postInstall = ''
install -m 644 -D nix-web/nix-web.service $out/lib/systemd/system/nix-web.service
'';
cargoBuildFlags = cargoFlags;
cargoTestFlags = cargoFlags;
NIX_WEB_BUILD_NIX_CLI_PATH = "${nix}/bin/nix";
meta = with lib; {
description = "Web interface for the Nix store";
homepage = "https://codeberg.org/gorgon/gorgon/src/branch/main/nix-web";
license = licenses.eupl12;
maintainers = with maintainers; [ embr ];
mainProgram = "nix-web";
};
}

View File

@ -4,13 +4,13 @@
}:
buildGoModule rec {
pname = "nom";
version = "2.0.2";
version = "2.0.5";
src = fetchFromGitHub {
owner = "guyfedwards";
repo = "nom";
rev = "v${version}";
hash = "sha256-6tk8NRuBbRMoaz3CmUUOC6thxIgjk/MWl50+YgQ6l5o=";
hash = "sha256-mYE8cu7qHRyG/pZSr4u6tMEF3ZM5Qz+CX+oLf/chwl4=";
};
vendorHash = "sha256-fP6yxfIQoVaBC9hYcrCyo3YP3ntEVDbDTwKMO9TdyDI=";

View File

@ -1,27 +1,49 @@
{ config
, lib
{ lib
, stdenv
, fetchFromGitHub
, pkgsBuildBuild
, pkg-config
, cmake
, ninja
, git
, doxygen
, sphinx
, libxml2
, libxcrypt
, libedit
, libffi
, libpfm
, mpfr
, zlib
, ncurses
, doxygen
, sphinx
, which
, sysctl
, python3Packages
, buildDocs ? true
, buildMan ? true
, buildTests ? true
, llvmTargetsToBuild ? [ "NATIVE" ] # "NATIVE" resolves into x86 or aarch64 depending on stdenv
, llvmProjectsToBuild ? [ "llvm" "mlir" ]
}:
stdenv.mkDerivation (finalAttrs: {
let
llvmNativeTarget =
if stdenv.hostPlatform.isx86_64 then "X86"
else if stdenv.hostPlatform.isAarch64 then "AArch64"
else throw "Currently unsupported LLVM platform '${stdenv.hostPlatform.config}'";
inferNativeTarget = t: if t == "NATIVE" then llvmNativeTarget else t;
llvmTargetsToBuild' = [ "AMDGPU" "NVPTX" ] ++ builtins.map inferNativeTarget llvmTargetsToBuild;
# This LLVM version can't seem to find pygments/pyyaml,
# but a later update will likely fix this (openai-triton-2.1.0)
python =
if buildTests
then python3Packages.python.withPackages (p: with p; [ psutil pygments pyyaml ])
else python3Packages.python;
isNative = stdenv.hostPlatform == stdenv.buildPlatform;
in stdenv.mkDerivation (finalAttrs: {
pname = "openai-triton-llvm";
version = "14.0.6-f28c006a5895";
@ -33,7 +55,8 @@ stdenv.mkDerivation (finalAttrs: {
"man"
];
# See https://github.com/openai/triton/blob/main/python/setup.py and https://github.com/ptillet/triton-llvm-releases/releases
# See https://github.com/openai/triton/blob/main/python/setup.py
# and https://github.com/ptillet/triton-llvm-releases/releases
src = fetchFromGitHub {
owner = "llvm";
repo = "llvm-project";
@ -46,7 +69,7 @@ stdenv.mkDerivation (finalAttrs: {
cmake
ninja
git
python3Packages.python
python
] ++ lib.optionals (buildDocs || buildMan) [
doxygen
sphinx
@ -58,6 +81,7 @@ stdenv.mkDerivation (finalAttrs: {
libxcrypt
libedit
libffi
libpfm
mpfr
];
@ -69,37 +93,55 @@ stdenv.mkDerivation (finalAttrs: {
sourceRoot = "${finalAttrs.src.name}/llvm";
cmakeFlags = [
"-DLLVM_TARGETS_TO_BUILD=${
let
# Targets can be found in
# https://github.com/llvm/llvm-project/tree/f28c006a5895fc0e329fe15fead81e37457cb1d1/clang/lib/Basic/Targets
# NOTE: Unsure of how "host" would function, especially given that we might be cross-compiling.
llvmTargets = [ "AMDGPU" "NVPTX" ]
++ lib.optionals stdenv.isAarch64 [ "AArch64" ]
++ lib.optionals stdenv.isx86_64 [ "X86" ];
in
lib.concatStringsSep ";" llvmTargets
}"
"-DLLVM_ENABLE_PROJECTS=llvm;mlir"
"-DLLVM_INSTALL_UTILS=ON"
] ++ lib.optionals (buildDocs || buildMan) [
"-DLLVM_INCLUDE_DOCS=ON"
"-DMLIR_INCLUDE_DOCS=ON"
"-DLLVM_BUILD_DOCS=ON"
# "-DLLVM_ENABLE_DOXYGEN=ON" Way too slow, only uses one core
"-DLLVM_ENABLE_SPHINX=ON"
"-DSPHINX_OUTPUT_HTML=ON"
"-DSPHINX_OUTPUT_MAN=ON"
"-DSPHINX_WARNINGS_AS_ERRORS=OFF"
] ++ lib.optionals buildTests [
"-DLLVM_INCLUDE_TESTS=ON"
"-DMLIR_INCLUDE_TESTS=ON"
"-DLLVM_BUILD_TESTS=ON"
];
(lib.cmakeFeature "LLVM_TARGETS_TO_BUILD" (lib.concatStringsSep ";" llvmTargetsToBuild'))
(lib.cmakeFeature "LLVM_ENABLE_PROJECTS" (lib.concatStringsSep ";" llvmProjectsToBuild))
(lib.cmakeFeature "LLVM_HOST_TRIPLE" stdenv.hostPlatform.config)
(lib.cmakeFeature "LLVM_DEFAULT_TARGET_TRIPLE" stdenv.hostPlatform.config)
(lib.cmakeBool "LLVM_INSTALL_UTILS" true)
(lib.cmakeBool "LLVM_INCLUDE_DOCS" (buildDocs || buildMan))
(lib.cmakeBool "MLIR_INCLUDE_DOCS" (buildDocs || buildMan))
(lib.cmakeBool "LLVM_BUILD_DOCS" (buildDocs || buildMan))
# Way too slow, only uses one core
# (lib.cmakeBool "LLVM_ENABLE_DOXYGEN" (buildDocs || buildMan))
(lib.cmakeBool "LLVM_ENABLE_SPHINX" (buildDocs || buildMan))
(lib.cmakeBool "SPHINX_OUTPUT_HTML" buildDocs)
(lib.cmakeBool "SPHINX_OUTPUT_MAN" buildMan)
(lib.cmakeBool "SPHINX_WARNINGS_AS_ERRORS" false)
(lib.cmakeBool "LLVM_INCLUDE_TESTS" buildTests)
(lib.cmakeBool "MLIR_INCLUDE_TESTS" buildTests)
(lib.cmakeBool "LLVM_BUILD_TESTS" buildTests)
# Cross compilation code taken/modified from LLVM 16 derivation
] ++ lib.optionals (!isNative) (let
nativeToolchainFlags = let
nativeCC = pkgsBuildBuild.targetPackages.stdenv.cc;
nativeBintools = nativeCC.bintools.bintools;
in [
(lib.cmakeFeature "CMAKE_C_COMPILER" "${nativeCC}/bin/${nativeCC.targetPrefix}cc")
(lib.cmakeFeature "CMAKE_CXX_COMPILER" "${nativeCC}/bin/${nativeCC.targetPrefix}c++")
(lib.cmakeFeature "CMAKE_AR" "${nativeBintools}/bin/${nativeBintools.targetPrefix}ar")
(lib.cmakeFeature "CMAKE_STRIP" "${nativeBintools}/bin/${nativeBintools.targetPrefix}strip")
(lib.cmakeFeature "CMAKE_RANLIB" "${nativeBintools}/bin/${nativeBintools.targetPrefix}ranlib")
];
# We need to repass the custom GNUInstallDirs values, otherwise CMake
# will choose them for us, leading to wrong results in llvm-config-native
nativeInstallFlags = [
(lib.cmakeFeature "CMAKE_INSTALL_PREFIX" (placeholder "out"))
(lib.cmakeFeature "CMAKE_INSTALL_BINDIR" "${placeholder "out"}/bin")
(lib.cmakeFeature "CMAKE_INSTALL_INCLUDEDIR" "${placeholder "out"}/include")
(lib.cmakeFeature "CMAKE_INSTALL_LIBDIR" "${placeholder "out"}/lib")
(lib.cmakeFeature "CMAKE_INSTALL_LIBEXECDIR" "${placeholder "out"}/libexec")
];
in [
(lib.cmakeBool "CMAKE_CROSSCOMPILING" true)
(lib.cmakeFeature "CROSS_TOOLCHAIN_FLAGS_NATIVE" (lib.concatStringsSep ";"
(lib.concatLists [ nativeToolchainFlags nativeInstallFlags ])))
]);
postPatch = ''
# `CMake Error: cannot write to file "/build/source/llvm/build/lib/cmake/mlir/MLIRTargets.cmake": Permission denied`
chmod +w -R ../mlir
patchShebangs ../mlir/test/mlir-reduce
# FileSystem permissions tests fail with various special bits
rm test/tools/llvm-objcopy/ELF/mirror-permissions-unix.test
@ -107,9 +149,21 @@ stdenv.mkDerivation (finalAttrs: {
substituteInPlace unittests/Support/CMakeLists.txt \
--replace "Path.cpp" ""
'' + lib.optionalString stdenv.isAarch64 ''
# Not sure why this fails
rm test/tools/llvm-exegesis/AArch64/latency-by-opcode-name.s
'';
postInstall = lib.optionalString (!isNative) ''
cp -a NATIVE/bin/llvm-config $out/bin/llvm-config-native
'';
doCheck = buildTests;
nativeCheckInputs = [ which ]
++ lib.optionals stdenv.isDarwin [ sysctl ];
checkTarget = "check-all";
requiredSystemFeatures = [ "big-parallel" ];
meta = with lib; {
@ -117,9 +171,6 @@ stdenv.mkDerivation (finalAttrs: {
homepage = "https://github.com/llvm/llvm-project";
license = with licenses; [ ncsa ];
maintainers = with maintainers; [ SomeoneSerge Madouura ];
platforms = platforms.linux;
# Consider the derivation broken if we're not building for CUDA or ROCm, or if we're building for aarch64
# and ROCm is enabled. See https://github.com/RadeonOpenCompute/ROCm/issues/1831#issuecomment-1278205344.
broken = stdenv.isAarch64 && !config.cudaSupport;
platforms = with platforms; aarch64 ++ x86;
};
})

View File

@ -2,7 +2,7 @@
python3Packages.buildPythonApplication rec {
pname = "pyprland";
version = "1.5.3";
version = "1.6.0";
format = "pyproject";
disabled = python3Packages.pythonOlder "3.10";
@ -11,7 +11,7 @@ python3Packages.buildPythonApplication rec {
owner = "hyprland-community";
repo = "pyprland";
rev = version;
hash = "sha256-PmE52pbEhc4+SkuyEeVUZoMVcbgK6CYA8HNwYPpgaIw=";
hash = "sha256-QbbBpaBIlU4IoU/NM7igDap8TxOKePQ8JI3ZlH944Bs=";
};
nativeBuildInputs = with python3Packages; [ poetry-core ];

View File

@ -1,11 +1,8 @@
{ lib
, callPackage
, symlinkJoin
, nmap
, rockyou
, runtimeShell
, seclists
, symlinkJoin
, tree
, wfuzz
, lists ? [
nmap
@ -13,33 +10,39 @@
seclists
wfuzz
]
, writeShellScriptBin
, tree
}:
let
wordlistsCollection = symlinkJoin {
name = "wordlists-collection";
paths = lists;
symlinkJoin rec {
pname = "wordlists";
version = "unstable-2023-10-10";
postBuild = ''
shopt -s extglob
rm -rf $out/!(share)
rm -rf $out/share/!(wordlists)
shopt -u extglob
'';
};
name = "${pname}-${version}";
paths = lists;
postBuild = ''
mkdir -p $out/bin
# Create a command to show the location of the links.
cat >> $out/bin/wordlists << __EOF__
#!${runtimeShell}
${tree}/bin/tree ${placeholder "out"}/share/wordlists
__EOF__
chmod +x $out/bin/wordlists
# Create a handy command for easy access to the wordlists.
# e.g.: `cat "$(wordlists_path)/rockyou.txt"`, or `ls "$(wordlists_path)/dirbuster"`
cat >> $out/bin/wordlists_path << __EOF__
#!${runtimeShell}
printf "${placeholder "out"}/share/wordlists\n"
__EOF__
chmod +x $out/bin/wordlists_path
# A command to show the location of the links.
wordlistsBin = writeShellScriptBin "wordlists" ''
${lib.getExe tree} ${wordlistsCollection}/share/wordlists
'';
# A command for easy access to the wordlists.
wordlistsPathBin = writeShellScriptBin "wordlists_path" ''
printf "${wordlistsCollection}/share/wordlists\n"
'';
in symlinkJoin {
name = "wordlists";
paths = [
wordlistsCollection
wordlistsBin
wordlistsPathBin
];
meta = with lib; {
description = "A collection of wordlists useful for security testing";
@ -65,6 +68,6 @@ symlinkJoin rec {
If you want to add a new package that provides wordlist/s the convention
is to copy it to {file}`$out/share/wordlists/myNewWordlist`.
'';
maintainers = with maintainers; [ janik pamplemousse ];
maintainers = with maintainers; [ janik pamplemousse h7x4 ];
};
}

View File

@ -2,7 +2,7 @@
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}:
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ../../../node-packages/node-env.nix {

View File

@ -1,14 +0,0 @@
diff --git a/lib/Driver/ToolChains/CommonArgs.cpp b/lib/Driver/ToolChains/CommonArgs.cpp
index 00bd60bc24bb..17416b0bd3c0 100644
--- a/lib/Driver/ToolChains/CommonArgs.cpp
+++ b/lib/Driver/ToolChains/CommonArgs.cpp
@@ -376,8 +376,7 @@ void tools::AddGoldPlugin(const ToolChain &ToolChain, const ArgList &Args,
// as gold requires -plugin to come before any -plugin-opt that -Wl might
// forward.
CmdArgs.push_back("-plugin");
- std::string Plugin =
- ToolChain.getDriver().Dir + "/../lib" CLANG_LIBDIR_SUFFIX "/LLVMgold.so";
+ std::string Plugin = "@libllvmLibdir@" "/LLVMgold.so";
CmdArgs.push_back(Args.MakeArgString(Plugin));
// Try to pass driver level flags relevant to LTO code generation down to

View File

@ -1,127 +0,0 @@
{ lib, stdenv, llvm_meta, fetch, substituteAll, cmake, libxml2, libllvm, version, clang-tools-extra_src, python3
, buildLlvmTools
, fixDarwinDylibNames
, enableManpages ? false
}:
let
self = stdenv.mkDerivation ({
pname = "clang";
inherit version;
src = fetch "cfe" "0018520c4qxf5hgjdqgpz2dgl3faf4gsz87fdlb8zdmx99rfk77s";
unpackPhase = ''
unpackFile $src
mv cfe-${version}* clang
sourceRoot=$PWD/clang
unpackFile ${clang-tools-extra_src}
mv clang-tools-extra-* $sourceRoot/tools/extra
'';
nativeBuildInputs = [ cmake python3 ]
++ lib.optional enableManpages python3.pkgs.sphinx
++ lib.optional stdenv.hostPlatform.isDarwin fixDarwinDylibNames;
buildInputs = [ libxml2 libllvm ];
cmakeFlags = [
"-DCMAKE_CXX_FLAGS=-std=c++11"
"-DLLVM_ENABLE_RTTI=ON"
"-DLLVM_CONFIG_PATH=${libllvm.dev}/bin/llvm-config${lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) "-native"}"
] ++ lib.optionals enableManpages [
"-DCLANG_INCLUDE_DOCS=ON"
"-DLLVM_ENABLE_SPHINX=ON"
"-DSPHINX_OUTPUT_MAN=ON"
"-DSPHINX_OUTPUT_HTML=OFF"
"-DSPHINX_WARNINGS_AS_ERRORS=OFF"
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"-DLLVM_TABLEGEN_EXE=${buildLlvmTools.llvm}/bin/llvm-tblgen"
"-DCLANG_TABLEGEN=${buildLlvmTools.libclang.dev}/bin/clang-tblgen"
];
patches = [
../../common/clang/5-8-purity.patch
./gnu-install-dirs.patch
(substituteAll {
src = ./LLVMgold-path.patch;
libllvmLibdir = "${libllvm.lib}/lib";
})
];
postPatch = ''
sed -i -e 's/DriverArgs.hasArg(options::OPT_nostdlibinc)/true/' \
-e 's/Args.hasArg(options::OPT_nostdlibinc)/true/' \
lib/Driver/ToolChains/*.cpp
'' + lib.optionalString stdenv.hostPlatform.isMusl ''
sed -i -e 's/lgcc_s/lgcc_eh/' lib/Driver/ToolChains/*.cpp
'';
outputs = [ "out" "lib" "dev" "python" ];
postInstall = ''
ln -sv $out/bin/clang $out/bin/cpp
# Move libclang to 'lib' output
moveToOutput "lib/libclang.*" "$lib"
substituteInPlace $out/lib/cmake/clang/ClangTargets-release.cmake \
--replace "\''${_IMPORT_PREFIX}/lib/libclang." "$lib/lib/libclang."
mkdir -p $python/bin $python/share/{clang,scan-view}
mv $out/bin/{git-clang-format,scan-view} $python/bin
if [ -e $out/bin/set-xcode-analyzer ]; then
mv $out/bin/set-xcode-analyzer $python/bin
fi
mv $out/share/clang/*.py $python/share/clang
mv $out/share/scan-view/*.py $python/share/scan-view
rm $out/bin/c-index-test
patchShebangs $python/bin
mkdir -p $dev/bin
cp bin/clang-tblgen $dev/bin
'';
passthru = {
inherit libllvm;
isClang = true;
hardeningUnsupportedFlags = [ "fortify3" ];
};
meta = llvm_meta // {
homepage = "https://clang.llvm.org/";
description = "A C language family frontend for LLVM";
longDescription = ''
The Clang project provides a language front-end and tooling
infrastructure for languages in the C language family (C, C++, Objective
C/C++, OpenCL, CUDA, and RenderScript) for the LLVM project.
It aims to deliver amazingly fast compiles, extremely useful error and
warning messages and to provide a platform for building great source
level tools. The Clang Static Analyzer and clang-tidy are tools that
automatically find bugs in your code, and are great examples of the sort
of tools that can be built using the Clang frontend as a library to
parse C/C++ code.
'';
mainProgram = "clang";
};
} // lib.optionalAttrs enableManpages {
pname = "clang-manpages";
buildPhase = ''
make docs-clang-man
'';
installPhase = ''
mkdir -p $out/share/man/man1
# Manually install clang manpage
cp docs/man/*.1 $out/share/man/man1/
'';
outputs = [ "out" ];
doCheck = false;
meta = llvm_meta // {
description = "man page for Clang ${version}";
};
});
in self

View File

@ -1,242 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 2667b1d6892e..87c5ad58738f 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -5,6 +5,8 @@ cmake_minimum_required(VERSION 3.4.3)
if( CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR )
project(Clang)
+ include(GNUInstallDirs)
+
# Rely on llvm-config.
set(CONFIG_OUTPUT)
find_program(LLVM_CONFIG "llvm-config")
@@ -344,7 +346,7 @@ include_directories(BEFORE
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/clang include/clang-c
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.def"
PATTERN "*.h"
@@ -353,7 +355,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/include/clang
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "CMakeFiles" EXCLUDE
PATTERN "*.inc"
@@ -361,7 +363,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
)
install(PROGRAMS utils/bash-autocomplete.sh
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
)
endif()
diff --git a/cmake/modules/AddClang.cmake b/cmake/modules/AddClang.cmake
index e657059744a4..19da44638fe6 100644
--- a/cmake/modules/AddClang.cmake
+++ b/cmake/modules/AddClang.cmake
@@ -99,9 +99,9 @@ macro(add_clang_library name)
install(TARGETS ${name}
COMPONENT ${name}
${export_to_clangtargets}
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX}
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX}
- RUNTIME DESTINATION bin)
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
if (${ARG_SHARED} AND NOT CMAKE_CONFIGURATION_TYPES)
add_custom_target(install-${name}
@@ -143,7 +143,7 @@ macro(add_clang_tool name)
install(TARGETS ${name}
${export_to_clangtargets}
- RUNTIME DESTINATION bin
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
COMPONENT ${name})
if(NOT CMAKE_CONFIGURATION_TYPES)
@@ -160,5 +160,5 @@ endmacro()
macro(add_clang_symlink name dest)
add_llvm_tool_symlink(${name} ${dest} ALWAYS_GENERATE)
# Always generate install targets
- llvm_install_symlink(${name} ${dest} ALWAYS_GENERATE)
+ llvm_install_symlink(${name} ${dest} ${CMAKE_INSTALL_FULL_BINDIR} ALWAYS_GENERATE)
endmacro()
diff --git a/lib/Headers/CMakeLists.txt b/lib/Headers/CMakeLists.txt
index a621c02644e3..e140efc9c83c 100644
--- a/lib/Headers/CMakeLists.txt
+++ b/lib/Headers/CMakeLists.txt
@@ -129,13 +129,13 @@ install(
FILES ${files} ${CMAKE_CURRENT_BINARY_DIR}/arm_neon.h
COMPONENT clang-headers
PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ
- DESTINATION lib${LLVM_LIBDIR_SUFFIX}/clang/${CLANG_VERSION}/include)
+ DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}/clang/${CLANG_VERSION}/include)
install(
FILES ${cuda_wrapper_files}
COMPONENT clang-headers
PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ
- DESTINATION lib${LLVM_LIBDIR_SUFFIX}/clang/${CLANG_VERSION}/include/cuda_wrappers)
+ DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}/clang/${CLANG_VERSION}/include/cuda_wrappers)
if (NOT CMAKE_CONFIGURATION_TYPES) # don't add this for IDE's.
add_custom_target(install-clang-headers
diff --git a/tools/c-index-test/CMakeLists.txt b/tools/c-index-test/CMakeLists.txt
index ad990e010eef..92e52d05afb9 100644
--- a/tools/c-index-test/CMakeLists.txt
+++ b/tools/c-index-test/CMakeLists.txt
@@ -48,7 +48,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
set_property(TARGET c-index-test APPEND PROPERTY INSTALL_RPATH
"@executable_path/../../lib")
else()
- set(INSTALL_DESTINATION bin)
+ set(INSTALL_DESTINATION ${CMAKE_INSTALL_BINDIR})
endif()
install(TARGETS c-index-test
diff --git a/tools/clang-check/CMakeLists.txt b/tools/clang-check/CMakeLists.txt
index 04151a8e0331..13918d91c4ba 100644
--- a/tools/clang-check/CMakeLists.txt
+++ b/tools/clang-check/CMakeLists.txt
@@ -19,4 +19,4 @@ target_link_libraries(clang-check
)
install(TARGETS clang-check
- RUNTIME DESTINATION bin)
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
diff --git a/tools/clang-format/CMakeLists.txt b/tools/clang-format/CMakeLists.txt
index a13633eaefc4..9b0094783690 100644
--- a/tools/clang-format/CMakeLists.txt
+++ b/tools/clang-format/CMakeLists.txt
@@ -20,20 +20,20 @@ if( LLVM_USE_SANITIZE_COVERAGE )
endif()
install(PROGRAMS clang-format-bbedit.applescript
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
COMPONENT clang-format)
install(PROGRAMS clang-format-diff.py
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
COMPONENT clang-format)
install(PROGRAMS clang-format-sublime.py
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
COMPONENT clang-format)
install(PROGRAMS clang-format.el
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
COMPONENT clang-format)
install(PROGRAMS clang-format.py
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
COMPONENT clang-format)
install(PROGRAMS git-clang-format
- DESTINATION bin
+ DESTINATION ${CMAKE_INSTALL_BINDIR}
COMPONENT clang-format)
diff --git a/tools/clang-offload-bundler/CMakeLists.txt b/tools/clang-offload-bundler/CMakeLists.txt
index 6161d08ae587..a003292d1676 100644
--- a/tools/clang-offload-bundler/CMakeLists.txt
+++ b/tools/clang-offload-bundler/CMakeLists.txt
@@ -21,4 +21,4 @@ target_link_libraries(clang-offload-bundler
${CLANG_OFFLOAD_BUNDLER_LIB_DEPS}
)
-install(TARGETS clang-offload-bundler RUNTIME DESTINATION bin)
+install(TARGETS clang-offload-bundler RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
diff --git a/tools/clang-rename/CMakeLists.txt b/tools/clang-rename/CMakeLists.txt
index 771e3bdea6f0..d1396e62b28f 100644
--- a/tools/clang-rename/CMakeLists.txt
+++ b/tools/clang-rename/CMakeLists.txt
@@ -14,11 +14,11 @@ target_link_libraries(clang-rename
clangToolingRefactor
)
-install(TARGETS clang-rename RUNTIME DESTINATION bin)
+install(TARGETS clang-rename RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
install(PROGRAMS clang-rename.py
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
COMPONENT clang-rename)
install(PROGRAMS clang-rename.el
- DESTINATION share/clang
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/clang
COMPONENT clang-rename)
diff --git a/tools/libclang/CMakeLists.txt b/tools/libclang/CMakeLists.txt
index 2dd670307636..1fe576f77ddb 100644
--- a/tools/libclang/CMakeLists.txt
+++ b/tools/libclang/CMakeLists.txt
@@ -121,7 +121,7 @@ endif()
if(INTERNAL_INSTALL_PREFIX)
set(LIBCLANG_HEADERS_INSTALL_DESTINATION "${INTERNAL_INSTALL_PREFIX}/include")
else()
- set(LIBCLANG_HEADERS_INSTALL_DESTINATION include)
+ set(LIBCLANG_HEADERS_INSTALL_DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
endif()
install(DIRECTORY ../../include/clang-c
diff --git a/tools/scan-build/CMakeLists.txt b/tools/scan-build/CMakeLists.txt
index 380379300b09..adfd58ed5f7d 100644
--- a/tools/scan-build/CMakeLists.txt
+++ b/tools/scan-build/CMakeLists.txt
@@ -41,7 +41,7 @@ if(CLANG_INSTALL_SCANBUILD)
${CMAKE_BINARY_DIR}/bin/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/bin/${BinFile})
list(APPEND Depends ${CMAKE_BINARY_DIR}/bin/${BinFile})
- install(PROGRAMS bin/${BinFile} DESTINATION bin)
+ install(PROGRAMS bin/${BinFile} DESTINATION ${CMAKE_INSTALL_BINDIR})
endforeach()
foreach(LibexecFile ${LibexecFiles})
@@ -53,7 +53,7 @@ if(CLANG_INSTALL_SCANBUILD)
${CMAKE_BINARY_DIR}/libexec/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/libexec/${LibexecFile})
list(APPEND Depends ${CMAKE_BINARY_DIR}/libexec/${LibexecFile})
- install(PROGRAMS libexec/${LibexecFile} DESTINATION libexec)
+ install(PROGRAMS libexec/${LibexecFile} DESTINATION ${CMAKE_INSTALL_LIBEXECDIR})
endforeach()
foreach(ManPage ${ManPages})
@@ -77,7 +77,7 @@ if(CLANG_INSTALL_SCANBUILD)
${CMAKE_BINARY_DIR}/share/scan-build/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/share/scan-build/${ShareFile})
list(APPEND Depends ${CMAKE_BINARY_DIR}/share/scan-build/${ShareFile})
- install(FILES share/scan-build/${ShareFile} DESTINATION share/scan-build)
+ install(FILES share/scan-build/${ShareFile} DESTINATION ${CMAKE_INSTALL_DATADIR}/scan-build)
endforeach()
add_custom_target(scan-build ALL DEPENDS ${Depends})
diff --git a/tools/scan-view/CMakeLists.txt b/tools/scan-view/CMakeLists.txt
index b305ca562a72..554bcb379061 100644
--- a/tools/scan-view/CMakeLists.txt
+++ b/tools/scan-view/CMakeLists.txt
@@ -21,7 +21,7 @@ if(CLANG_INSTALL_SCANVIEW)
${CMAKE_BINARY_DIR}/bin/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/bin/${BinFile})
list(APPEND Depends ${CMAKE_BINARY_DIR}/bin/${BinFile})
- install(PROGRAMS bin/${BinFile} DESTINATION bin)
+ install(PROGRAMS bin/${BinFile} DESTINATION ${CMAKE_INSTALL_BINDIR})
endforeach()
foreach(ShareFile ${ShareFiles})
@@ -33,7 +33,7 @@ if(CLANG_INSTALL_SCANVIEW)
${CMAKE_BINARY_DIR}/share/scan-view/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/share/${ShareFile})
list(APPEND Depends ${CMAKE_BINARY_DIR}/share/scan-view/${ShareFile})
- install(FILES share/${ShareFile} DESTINATION share/scan-view)
+ install(FILES share/${ShareFile} DESTINATION ${CMAKE_INSTALL_DATADIR}/scan-view)
endforeach()
add_custom_target(scan-view ALL DEPENDS ${Depends})

View File

@ -1,23 +0,0 @@
diff -ur compiler-rt-5.0.2.src/cmake/builtin-config-ix.cmake compiler-rt-5.0.2.src-patched/cmake/builtin-config-ix.cmake
--- compiler-rt-5.0.2.src/cmake/builtin-config-ix.cmake 2017-05-25 00:53:24.000000000 +0900
+++ compiler-rt-5.0.2.src-patched/cmake/builtin-config-ix.cmake 2020-05-10 03:24:24.937433155 +0900
@@ -24,7 +24,7 @@
set(ARM64 aarch64)
-set(ARM32 arm armhf armv6m armv7m armv7em armv7 armv7s armv7k)
+set(ARM32 arm armhf armv6m armv7m armv7em armv7 armv7s armv7k armv7l)
set(X86 i386 i686)
set(X86_64 x86_64)
set(MIPS32 mips mipsel)
diff -ur compiler-rt-5.0.2.src/lib/builtins/CMakeLists.txt compiler-rt-5.0.2.src-patched/lib/builtins/CMakeLists.txt
--- compiler-rt-5.0.2.src/lib/builtins/CMakeLists.txt 2017-07-13 04:33:30.000000000 +0900
+++ compiler-rt-5.0.2.src-patched/lib/builtins/CMakeLists.txt 2020-05-10 03:24:45.945075423 +0900
@@ -444,6 +444,7 @@
set(armv7_SOURCES ${arm_SOURCES})
set(armv7s_SOURCES ${arm_SOURCES})
set(armv7k_SOURCES ${arm_SOURCES})
+set(armv7l_SOURCES ${arm_SOURCES})
set(arm64_SOURCES ${aarch64_SOURCES})
# macho_embedded archs

View File

@ -1,155 +0,0 @@
From 3dec5f3475a26aeb4678627795c4b67c6b7b4785 Mon Sep 17 00:00:00 2001
From: Will Dietz <w@wdtz.org>
Date: Tue, 19 Sep 2017 13:13:06 -0500
Subject: [PATCH] remove codesign use on Apple, disable ios sim testing that
needs it
---
cmake/Modules/AddCompilerRT.cmake | 8 ------
test/asan/CMakeLists.txt | 52 ---------------------------------------
test/tsan/CMakeLists.txt | 47 -----------------------------------
3 files changed, 107 deletions(-)
diff --git a/cmake/Modules/AddCompilerRT.cmake b/cmake/Modules/AddCompilerRT.cmake
index bc5fb9ff7..b64eb4246 100644
--- a/cmake/Modules/AddCompilerRT.cmake
+++ b/cmake/Modules/AddCompilerRT.cmake
@@ -210,14 +210,6 @@ function(add_compiler_rt_runtime name type)
set_target_properties(${libname} PROPERTIES IMPORT_PREFIX "")
set_target_properties(${libname} PROPERTIES IMPORT_SUFFIX ".lib")
endif()
- if(APPLE)
- # Ad-hoc sign the dylibs
- add_custom_command(TARGET ${libname}
- POST_BUILD
- COMMAND codesign --sign - $<TARGET_FILE:${libname}>
- WORKING_DIRECTORY ${COMPILER_RT_LIBRARY_OUTPUT_DIR}
- )
- endif()
endif()
install(TARGETS ${libname}
ARCHIVE DESTINATION ${COMPILER_RT_LIBRARY_INSTALL_DIR}
diff --git a/test/asan/CMakeLists.txt b/test/asan/CMakeLists.txt
index 8bfc15b5c..f23d0f71a 100644
--- a/test/asan/CMakeLists.txt
+++ b/test/asan/CMakeLists.txt
@@ -83,58 +83,6 @@ foreach(arch ${ASAN_TEST_ARCH})
endif()
endforeach()
-# iOS and iOS simulator test suites
-# These are not added into "check-all", in order to run these tests, use
-# "check-asan-iossim-x86_64" and similar. They also require that an extra env
-# variable to select which iOS device or simulator to use, e.g.:
-# SANITIZER_IOSSIM_TEST_DEVICE_IDENTIFIER="iPhone 6"
-if(APPLE)
- set(EXCLUDE_FROM_ALL ON)
-
- set(ASAN_TEST_TARGET_CC ${COMPILER_RT_TEST_COMPILER})
- set(ASAN_TEST_IOS "1")
- pythonize_bool(ASAN_TEST_IOS)
- set(ASAN_TEST_DYNAMIC True)
-
- foreach(arch ${DARWIN_iossim_ARCHS})
- set(ASAN_TEST_IOSSIM "1")
- pythonize_bool(ASAN_TEST_IOSSIM)
- set(ASAN_TEST_TARGET_ARCH ${arch})
- set(ASAN_TEST_TARGET_CFLAGS "-arch ${arch} -isysroot ${DARWIN_iossim_SYSROOT} ${COMPILER_RT_TEST_COMPILER_CFLAGS}")
- set(ASAN_TEST_CONFIG_SUFFIX "-${arch}-iossim")
- get_bits_for_arch(${arch} ASAN_TEST_BITS)
- string(TOUPPER ${arch} ARCH_UPPER_CASE)
- set(CONFIG_NAME "IOSSim${ARCH_UPPER_CASE}Config")
- configure_lit_site_cfg(
- ${CMAKE_CURRENT_SOURCE_DIR}/lit.site.cfg.in
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/lit.site.cfg
- )
- add_lit_testsuite(check-asan-iossim-${arch} "AddressSanitizer iOS Simulator ${arch} tests"
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/
- DEPENDS ${ASAN_TEST_DEPS})
- endforeach()
-
- foreach (arch ${DARWIN_ios_ARCHS})
- set(ASAN_TEST_IOSSIM "0")
- pythonize_bool(ASAN_TEST_IOSSIM)
- set(ASAN_TEST_TARGET_ARCH ${arch})
- set(ASAN_TEST_TARGET_CFLAGS "-arch ${arch} -isysroot ${DARWIN_ios_SYSROOT} ${COMPILER_RT_TEST_COMPILER_CFLAGS}")
- set(ASAN_TEST_CONFIG_SUFFIX "-${arch}-ios")
- get_bits_for_arch(${arch} ASAN_TEST_BITS)
- string(TOUPPER ${arch} ARCH_UPPER_CASE)
- set(CONFIG_NAME "IOS${ARCH_UPPER_CASE}Config")
- configure_lit_site_cfg(
- ${CMAKE_CURRENT_SOURCE_DIR}/lit.site.cfg.in
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/lit.site.cfg
- )
- add_lit_testsuite(check-asan-ios-${arch} "AddressSanitizer iOS ${arch} tests"
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/
- DEPENDS ${ASAN_TEST_DEPS})
- endforeach()
-
- set(EXCLUDE_FROM_ALL OFF)
-endif()
-
# Add unit tests.
if(COMPILER_RT_INCLUDE_TESTS)
set(ASAN_TEST_DYNAMIC False)
diff --git a/test/tsan/CMakeLists.txt b/test/tsan/CMakeLists.txt
index a68908612..cde0accb5 100644
--- a/test/tsan/CMakeLists.txt
+++ b/test/tsan/CMakeLists.txt
@@ -42,53 +42,6 @@ foreach(arch ${TSAN_TEST_ARCH})
list(APPEND TSAN_TESTSUITES ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME})
endforeach()
-# iOS and iOS simulator test suites
-# These are not added into "check-all", in order to run these tests, use
-# "check-tsan-iossim-x86_64" and similar. They also require an extra environment
-# variable to select which iOS device or simulator to use, e.g.:
-# SANITIZER_IOSSIM_TEST_DEVICE_IDENTIFIER="iPhone 6"
-if(APPLE)
- set(EXCLUDE_FROM_ALL ON)
-
- set(TSAN_TEST_TARGET_CC ${COMPILER_RT_TEST_COMPILER})
- set(TSAN_TEST_IOS "1")
- pythonize_bool(TSAN_TEST_IOS)
-
- set(arch "x86_64")
- set(TSAN_TEST_IOSSIM "1")
- pythonize_bool(TSAN_TEST_IOSSIM)
- set(TSAN_TEST_TARGET_ARCH ${arch})
- set(TSAN_TEST_TARGET_CFLAGS "-arch ${arch} -isysroot ${DARWIN_iossim_SYSROOT} ${COMPILER_RT_TEST_COMPILER_CFLAGS}")
- set(TSAN_TEST_CONFIG_SUFFIX "-${arch}-iossim")
- string(TOUPPER ${arch} ARCH_UPPER_CASE)
- set(CONFIG_NAME "IOSSim${ARCH_UPPER_CASE}Config")
- configure_lit_site_cfg(
- ${CMAKE_CURRENT_SOURCE_DIR}/lit.site.cfg.in
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/lit.site.cfg
- )
- add_lit_testsuite(check-tsan-iossim-${arch} "ThreadSanitizer iOS Simulator ${arch} tests"
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/
- DEPENDS ${TSAN_TEST_DEPS})
-
- set(arch "arm64")
- set(TSAN_TEST_IOSSIM "0")
- pythonize_bool(TSAN_TEST_IOSSIM)
- set(TSAN_TEST_TARGET_ARCH ${arch})
- set(TSAN_TEST_TARGET_CFLAGS "-arch ${arch} -isysroot ${DARWIN_ios_SYSROOT} ${COMPILER_RT_TEST_COMPILER_CFLAGS}")
- set(TSAN_TEST_CONFIG_SUFFIX "-${arch}-ios")
- string(TOUPPER ${arch} ARCH_UPPER_CASE)
- set(CONFIG_NAME "IOS${ARCH_UPPER_CASE}Config")
- configure_lit_site_cfg(
- ${CMAKE_CURRENT_SOURCE_DIR}/lit.site.cfg.in
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/lit.site.cfg
- )
- add_lit_testsuite(check-tsan-ios-${arch} "ThreadSanitizer iOS Simulator ${arch} tests"
- ${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_NAME}/
- DEPENDS ${TSAN_TEST_DEPS})
-
- set(EXCLUDE_FROM_ALL OFF)
-endif()
-
if(COMPILER_RT_INCLUDE_TESTS)
configure_lit_site_cfg(
${CMAKE_CURRENT_SOURCE_DIR}/Unit/lit.site.cfg.in
--
2.14.1

View File

@ -1,10 +0,0 @@
--- a/lib/xray/xray_buffer_queue.h
+++ b/lib/xray/xray_buffer_queue.h
@@ -17,6 +17,7 @@
#include "sanitizer_common/sanitizer_atomic.h"
#include "sanitizer_common/sanitizer_mutex.h"
+#include <cstddef>
#include <deque>
#include <unordered_set>
#include <utility>

View File

@ -1,112 +0,0 @@
{ lib, stdenv, llvm_meta, version, fetch, cmake, python3, libllvm, libcxxabi
, doFakeLibgcc ? stdenv.hostPlatform.isFreeBSD
}:
let
useLLVM = stdenv.hostPlatform.useLLVM or false;
bareMetal = stdenv.hostPlatform.parsed.kernel.name == "none";
inherit (stdenv.hostPlatform) isMusl;
in
stdenv.mkDerivation {
pname = "compiler-rt";
inherit version;
src = fetch "compiler-rt" "0ipd4jdxpczgr2w6lzrabymz6dhzj69ywmyybjjc1q397zgrvziy";
nativeBuildInputs = [ cmake python3 libllvm.dev ];
buildInputs = lib.optional stdenv.hostPlatform.isDarwin libcxxabi;
env.NIX_CFLAGS_COMPILE = toString [
"-DSCUDO_DEFAULT_OPTIONS=DeleteSizeMismatch=0:DeallocationTypeMismatch=0"
];
cmakeFlags = [
"-DCOMPILER_RT_DEFAULT_TARGET_ONLY=ON"
"-DCMAKE_C_COMPILER_TARGET=${stdenv.hostPlatform.config}"
"-DCMAKE_ASM_COMPILER_TARGET=${stdenv.hostPlatform.config}"
] ++ lib.optionals (useLLVM || bareMetal || isMusl) [
"-DCOMPILER_RT_BUILD_SANITIZERS=OFF"
"-DCOMPILER_RT_BUILD_XRAY=OFF"
"-DCOMPILER_RT_BUILD_LIBFUZZER=OFF"
] ++ lib.optionals (useLLVM || bareMetal) [
"-DCOMPILER_RT_BUILD_PROFILE=OFF"
] ++ lib.optionals (useLLVM || bareMetal) [
"-DCMAKE_C_COMPILER_WORKS=ON"
"-DCMAKE_CXX_COMPILER_WORKS=ON"
"-DCOMPILER_RT_BAREMETAL_BUILD=ON"
"-DCMAKE_SIZEOF_VOID_P=${toString (stdenv.hostPlatform.parsed.cpu.bits / 8)}"
] ++ lib.optionals (useLLVM) [
"-DCOMPILER_RT_BUILD_BUILTINS=ON"
"-DCMAKE_C_FLAGS=-nodefaultlibs"
#https://stackoverflow.com/questions/53633705/cmake-the-c-compiler-is-not-able-to-compile-a-simple-test-program
"-DCMAKE_TRY_COMPILE_TARGET_TYPE=STATIC_LIBRARY"
] ++ lib.optionals (bareMetal) [
"-DCOMPILER_RT_OS_DIR=baremetal"
] ++ lib.optionals (stdenv.hostPlatform.isDarwin) [
# The compiler-rt build infrastructure sniffs supported platforms on Darwin
# and finds i386;x86_64;x86_64h. We only build for x86_64, so linking fails
# when it tries to use libc++ and libc++api for i386.
"-DDARWIN_osx_ARCHS=${stdenv.hostPlatform.darwinArch}"
];
outputs = [ "out" "dev" ];
patches = [
./codesign.patch # Revert compiler-rt commit that makes codesign mandatory
# https://github.com/llvm/llvm-project/commit/947f9692440836dcb8d88b74b69dd379d85974ce
../../common/compiler-rt/glibc.patch
./gnu-install-dirs.patch
./sys-ustat.patch
../../common/compiler-rt/libsanitizer-no-cyclades-9.patch
./compiler-rt-5-cstddef.patch
] ++ lib.optional stdenv.hostPlatform.isAarch32 ./armv7l.patch;
# TSAN requires XPC on Darwin, which we have no public/free source files for. We can depend on the Apple frameworks
# to get it, but they're unfree. Since LLVM is rather central to the stdenv, we patch out TSAN support so that Hydra
# can build this. If we didn't do it, basically the entire nixpkgs on Darwin would have an unfree dependency and we'd
# get no binary cache for the entire platform. If you really find yourself wanting the TSAN, make this controllable by
# a flag and turn the flag off during the stdenv build.
postPatch = lib.optionalString stdenv.isDarwin ''
substituteInPlace cmake/config-ix.cmake \
--replace 'set(COMPILER_RT_HAS_TSAN TRUE)' 'set(COMPILER_RT_HAS_TSAN FALSE)'
'' + lib.optionalString (useLLVM) ''
substituteInPlace lib/builtins/int_util.c \
--replace "#include <stdlib.h>" ""
substituteInPlace lib/builtins/clear_cache.c \
--replace "#include <assert.h>" ""
substituteInPlace lib/builtins/cpu_model.c \
--replace "#include <assert.h>" ""
'';
# Hack around weird upsream RPATH bug
postInstall = lib.optionalString (stdenv.hostPlatform.isDarwin || stdenv.hostPlatform.isWasm) ''
ln -s "$out/lib"/*/* "$out/lib"
'' + lib.optionalString (useLLVM) ''
ln -s $out/lib/*/clang_rt.crtbegin-*.o $out/lib/linux/crtbegin.o
ln -s $out/lib/*/clang_rt.crtend-*.o $out/lib/linux/crtend.o
ln -s $out/lib/*/clang_rt.crtbegin_shared-*.o $out/lib/linux/crtbeginS.o
ln -s $out/lib/*/clang_rt.crtend_shared-*.o $out/lib/linux/crtendS.o
'' + lib.optionalString doFakeLibgcc ''
ln -s $out/lib/freebsd/libclang_rt.builtins-*.a $out/lib/freebsd/libgcc.a
'';
meta = llvm_meta // {
homepage = "https://compiler-rt.llvm.org/";
description = "Compiler runtime libraries";
longDescription = ''
The compiler-rt project provides highly tuned implementations of the
low-level code generator support routines like "__fixunsdfdi" and other
calls generated when a target doesn't have a short sequence of native
instructions to implement a core IR operation. It also provides
implementations of run-time libraries for dynamic testing tools such as
AddressSanitizer, ThreadSanitizer, MemorySanitizer, and DataFlowSanitizer.
'';
# "All of the code in the compiler-rt project is dual licensed under the MIT
# license and the UIUC License (a BSD-like license)":
license = with lib.licenses; [ mit ncsa ];
broken = stdenv.hostPlatform.isDarwin && stdenv.hostPlatform.isAarch64;
};
}

View File

@ -1,103 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index f997c53410c1..ac079d89b57b 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -12,6 +12,7 @@ cmake_minimum_required(VERSION 3.4.3)
# Check if compiler-rt is built as a standalone project.
if (CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR OR COMPILER_RT_STANDALONE_BUILD)
project(CompilerRT C CXX ASM)
+ include(GNUInstallDirs)
set(COMPILER_RT_STANDALONE_BUILD TRUE)
endif()
diff --git a/cmake/Modules/AddCompilerRT.cmake b/cmake/Modules/AddCompilerRT.cmake
index bc5fb9ff722b..91fb79d1980c 100644
--- a/cmake/Modules/AddCompilerRT.cmake
+++ b/cmake/Modules/AddCompilerRT.cmake
@@ -344,7 +344,7 @@ macro(add_compiler_rt_resource_file target_name file_name component)
add_custom_target(${target_name} DEPENDS ${dst_file})
# Install in Clang resource directory.
install(FILES ${file_name}
- DESTINATION ${COMPILER_RT_INSTALL_PATH}
+ DESTINATION ${COMPILER_RT_INSTALL_PATH}/${CMAKE_INSTALL_PREFIX}
COMPONENT ${component})
add_dependencies(${component} ${target_name})
@@ -361,7 +361,7 @@ macro(add_compiler_rt_script name)
add_custom_target(${name} DEPENDS ${dst})
install(FILES ${dst}
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
- DESTINATION ${COMPILER_RT_INSTALL_PATH}/bin)
+ DESTINATION ${COMPILER_RT_INSTALL_PATH}/${CMAKE_INSTALL_FULL_BINDIR})
endmacro(add_compiler_rt_script src name)
# Builds custom version of libc++ and installs it in <prefix>.
diff --git a/cmake/Modules/CompilerRTDarwinUtils.cmake b/cmake/Modules/CompilerRTDarwinUtils.cmake
index f646975475bb..75885bf305b8 100644
--- a/cmake/Modules/CompilerRTDarwinUtils.cmake
+++ b/cmake/Modules/CompilerRTDarwinUtils.cmake
@@ -391,7 +391,7 @@ macro(darwin_add_embedded_builtin_libraries)
set(DARWIN_macho_embedded_LIBRARY_OUTPUT_DIR
${COMPILER_RT_OUTPUT_DIR}/lib/macho_embedded)
set(DARWIN_macho_embedded_LIBRARY_INSTALL_DIR
- ${COMPILER_RT_INSTALL_PATH}/lib/macho_embedded)
+ ${COMPILER_RT_INSTALL_PATH}/${CMAKE_INSTALL_FULL_LIBDIR}/macho_embedded)
set(CFLAGS_armv7 "-target thumbv7-apple-darwin-eabi")
set(CFLAGS_i386 "-march=pentium")
diff --git a/cmake/base-config-ix.cmake b/cmake/base-config-ix.cmake
index b38c6ca96fac..a4580414cbc8 100644
--- a/cmake/base-config-ix.cmake
+++ b/cmake/base-config-ix.cmake
@@ -43,11 +43,11 @@ if (LLVM_TREE_AVAILABLE)
else()
# Take output dir and install path from the user.
set(COMPILER_RT_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE PATH
- "Path where built compiler-rt libraries should be stored.")
+ "Path where built compiler-rt build artifacts should be stored.")
set(COMPILER_RT_EXEC_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/bin CACHE PATH
"Path where built compiler-rt executables should be stored.")
- set(COMPILER_RT_INSTALL_PATH ${CMAKE_INSTALL_PREFIX} CACHE PATH
- "Path where built compiler-rt libraries should be installed.")
+ set(COMPILER_RT_INSTALL_PATH "" CACHE PATH
+ "Prefix where built compiler-rt artifacts should be installed, comes before CMAKE_INSTALL_PREFIX.")
option(COMPILER_RT_INCLUDE_TESTS "Generate and build compiler-rt unit tests." OFF)
option(COMPILER_RT_ENABLE_WERROR "Fail and stop if warning is triggered" OFF)
# Use a host compiler to compile/link tests.
@@ -67,9 +67,9 @@ if(NOT DEFINED COMPILER_RT_OS_DIR)
string(TOLOWER ${CMAKE_SYSTEM_NAME} COMPILER_RT_OS_DIR)
endif()
set(COMPILER_RT_LIBRARY_OUTPUT_DIR
- ${COMPILER_RT_OUTPUT_DIR}/lib/${COMPILER_RT_OS_DIR})
+ ${COMPILER_RT_OUTPUT_DIR}/${CMAKE_INSTALL_FULL_LIBDIR}/${COMPILER_RT_OS_DIR})
set(COMPILER_RT_LIBRARY_INSTALL_DIR
- ${COMPILER_RT_INSTALL_PATH}/lib/${COMPILER_RT_OS_DIR})
+ ${COMPILER_RT_INSTALL_PATH}/${CMAKE_INSTALL_FULL_LIBDIR}/${COMPILER_RT_OS_DIR})
if(APPLE)
# On Darwin if /usr/include doesn't exist, the user probably has Xcode but not
diff --git a/include/CMakeLists.txt b/include/CMakeLists.txt
index ec3bf40b95e6..af119f10ee2b 100644
--- a/include/CMakeLists.txt
+++ b/include/CMakeLists.txt
@@ -44,8 +44,8 @@ set_target_properties(compiler-rt-headers PROPERTIES FOLDER "Compiler-RT Misc")
# Install sanitizer headers.
install(FILES ${SANITIZER_HEADERS}
PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ
- DESTINATION ${COMPILER_RT_INSTALL_PATH}/include/sanitizer)
+ DESTINATION ${COMPILER_RT_INSTALL_PATH}/${CMAKE_INSTALL_FULL_INCLUDEDIR}/sanitizer)
# Install xray headers.
install(FILES ${XRAY_HEADERS}
PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ
- DESTINATION ${COMPILER_RT_INSTALL_PATH}/include/xray)
+ DESTINATION ${COMPILER_RT_INSTALL_PATH}/${CMAKE_INSTALL_FULL_INCLUDEDIR}/xray)
diff --git a/lib/dfsan/CMakeLists.txt b/lib/dfsan/CMakeLists.txt
index 2c486bff821b..0ee715da95f8 100644
--- a/lib/dfsan/CMakeLists.txt
+++ b/lib/dfsan/CMakeLists.txt
@@ -44,4 +44,4 @@ add_custom_command(OUTPUT ${dfsan_abilist_filename}
DEPENDS done_abilist.txt libc_ubuntu1404_abilist.txt)
add_dependencies(dfsan dfsan_abilist)
install(FILES ${dfsan_abilist_filename}
- DESTINATION ${COMPILER_RT_INSTALL_PATH})
+ DESTINATION ${COMPILER_RT_INSTALL_PATH}/${CMAKE_INSTALL_PREFIX})

View File

@ -1,58 +0,0 @@
From 521935db9de17ad08748fd050137ac83b7734835 Mon Sep 17 00:00:00 2001
From: Craig Topper <craig.topper@intel.com>
Date: Thu, 24 May 2018 17:59:47 +0000
Subject: [PATCH] sanitizer: Use pre-computed size of struct ustat for Linux
<sys/ustat.h> has been removed from glibc 2.28 by:
commit cf2478d53ad7071e84c724a986b56fe17f4f4ca7
Author: Adhemerval Zanella <adhemerval.zanella@linaro.org>
Date: Sun Mar 18 11:28:59 2018 +0800
Deprecate ustat syscall interface
This patch uses pre-computed size of struct ustat for Linux to fix
https://bugs.llvm.org/show_bug.cgi?id=37418
Patch by H.J. Lu.
Differential Revision: https://reviews.llvm.org/D47281
git-svn-id: https://llvm.org/svn/llvm-project/compiler-rt/trunk@333213 91177308-0d34-0410-b5e6-96231b3b80d8
---
.../sanitizer_platform_limits_posix.cc | 15 +++++++++++++--
1 file changed, 13 insertions(+), 2 deletions(-)
diff --git a/lib/sanitizer_common/sanitizer_platform_limits_posix.cc b/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
index 94b8f3f627..936d818673 100644
--- a/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
+++ b/lib/sanitizer_common/sanitizer_platform_limits_posix.cc
@@ -159,7 +159,6 @@ typedef struct user_fpregs elf_fpregset_t;
# include <sys/procfs.h>
#endif
#include <sys/user.h>
-#include <sys/ustat.h>
#include <linux/cyclades.h>
#include <linux/if_eql.h>
#include <linux/if_plip.h>
@@ -253,7 +252,19 @@ namespace __sanitizer {
#endif // SANITIZER_LINUX || SANITIZER_FREEBSD
#if SANITIZER_LINUX && !SANITIZER_ANDROID
- unsigned struct_ustat_sz = sizeof(struct ustat);
+ // Use pre-computed size of struct ustat to avoid <sys/ustat.h> which
+ // has been removed from glibc 2.28.
+#if defined(__aarch64__) || defined(__s390x__) || defined (__mips64) \
+ || defined(__powerpc64__) || defined(__arch64__) || defined(__sparcv9) \
+ || defined(__x86_64__)
+#define SIZEOF_STRUCT_USTAT 32
+#elif defined(__arm__) || defined(__i386__) || defined(__mips__) \
+ || defined(__powerpc__) || defined(__s390__)
+#define SIZEOF_STRUCT_USTAT 20
+#else
+#error Unknown size of struct ustat
+#endif
+ unsigned struct_ustat_sz = SIZEOF_STRUCT_USTAT;
unsigned struct_rlimit64_sz = sizeof(struct rlimit64);
unsigned struct_statvfs64_sz = sizeof(struct statvfs64);
#endif // SANITIZER_LINUX && !SANITIZER_ANDROID

View File

@ -1,140 +0,0 @@
{ lowPrio, newScope, pkgs, lib, stdenv, cmake, gccForLibs
, libxml2, python3, isl, fetchurl, overrideCC, wrapCCWith
, buildLlvmTools # tools, but from the previous stage, for cross
, targetLlvmLibraries # libraries, but from the next stage, for cross
, targetLlvm
}:
let
release_version = "5.0.2";
version = release_version; # differentiating these is important for rc's
targetConfig = stdenv.targetPlatform.config;
fetch = name: sha256: fetchurl {
url = "https://releases.llvm.org/${release_version}/${name}-${version}.src.tar.xz";
inherit sha256;
};
clang-tools-extra_src = fetch "clang-tools-extra" "018b3fiwah8f8br5i26qmzh6sjvzchpn358sn8v079m49f2jldm3";
llvm_meta = {
license = lib.licenses.ncsa;
maintainers = lib.teams.llvm.members;
# See llvm/cmake/config-ix.cmake.
platforms =
lib.platforms.aarch64 ++
lib.platforms.arm ++
lib.platforms.mips ++
lib.platforms.power ++
lib.platforms.s390x ++
lib.platforms.wasi ++
lib.platforms.x86;
};
tools = lib.makeExtensible (tools: let
callPackage = newScope (tools // { inherit stdenv cmake libxml2 python3 isl release_version version fetch buildLlvmTools; });
mkExtraBuildCommands = cc: ''
rsrc="$out/resource-root"
mkdir "$rsrc"
ln -s "${cc.lib}/lib/clang/${release_version}/include" "$rsrc"
echo "-resource-dir=$rsrc" >> $out/nix-support/cc-cflags
ln -s "${targetLlvmLibraries.compiler-rt.out}/lib" "$rsrc/lib"
'';
in {
libllvm = callPackage ./llvm {
inherit llvm_meta;
};
# `llvm` historically had the binaries. When choosing an output explicitly,
# we need to reintroduce `outputSpecified` to get the expected behavior e.g. of lib.get*
llvm = tools.libllvm;
libllvm-polly = callPackage ./llvm {
inherit llvm_meta;
enablePolly = true;
};
llvm-polly = tools.libllvm-polly.lib // { outputSpecified = false; };
libclang = callPackage ./clang {
inherit clang-tools-extra_src llvm_meta;
};
clang-unwrapped = tools.libclang;
llvm-manpages = lowPrio (tools.libllvm.override {
enableManpages = true;
python3 = pkgs.python3; # don't use python-boot
});
clang-manpages = lowPrio (tools.libclang.override {
enableManpages = true;
python3 = pkgs.python3; # don't use python-boot
});
# pick clang appropriate for package set we are targeting
clang =
/**/ if stdenv.targetPlatform.libc == null then tools.clangNoLibc
else if stdenv.targetPlatform.useLLVM or false then tools.clangUseLLVM
else if (pkgs.targetPackages.stdenv or stdenv).cc.isGNU then tools.libstdcxxClang
else tools.libcxxClang;
libstdcxxClang = wrapCCWith rec {
cc = tools.clang-unwrapped;
# libstdcxx is taken from gcc in an ad-hoc way in cc-wrapper.
libcxx = null;
extraPackages = [
targetLlvmLibraries.compiler-rt
];
extraBuildCommands = mkExtraBuildCommands cc;
};
libcxxClang = wrapCCWith rec {
cc = tools.clang-unwrapped;
libcxx = targetLlvmLibraries.libcxx;
extraPackages = [
libcxx.cxxabi
targetLlvmLibraries.compiler-rt
];
extraBuildCommands = mkExtraBuildCommands cc;
};
lld = callPackage ./lld {
inherit llvm_meta;
};
lldb = callPackage ./lldb {
inherit llvm_meta;
};
});
libraries = lib.makeExtensible (libraries: let
callPackage = newScope (libraries // buildLlvmTools // { inherit stdenv cmake libxml2 python3 isl release_version version fetch; });
in {
compiler-rt = callPackage ./compiler-rt {
inherit llvm_meta;
};
stdenv = overrideCC stdenv buildLlvmTools.clang;
libcxxStdenv = overrideCC stdenv buildLlvmTools.libcxxClang;
libcxx = callPackage ./libcxx {
inherit llvm_meta;
};
libcxxabi = callPackage ./libcxxabi {
inherit llvm_meta;
};
openmp = callPackage ./openmp {
inherit llvm_meta targetLlvm;
};
});
noExtend = extensible: lib.attrsets.removeAttrs extensible [ "extend" ];
in { inherit tools libraries release_version; } // (noExtend libraries) // (noExtend tools)

View File

@ -1,79 +0,0 @@
{ lib, stdenv, llvm_meta, fetch, cmake, python3, fixDarwinDylibNames, version
, cxxabi ? if stdenv.hostPlatform.isFreeBSD then libcxxrt else libcxxabi
, libcxxabi, libcxxrt
}:
assert stdenv.isDarwin -> cxxabi.pname == "libcxxabi";
stdenv.mkDerivation {
pname = "libcxx";
inherit version;
src = fetch "libcxx" "1672aaf95fgy4xsfra8pw24f6r93zwzpan1033hkcm8p2glqipvf";
postUnpack = ''
unpackFile ${libcxxabi.src}
export LIBCXXABI_INCLUDE_DIR="$PWD/$(ls -d libcxxabi-${version}*)/include"
'';
outputs = [ "out" "dev" ];
patches = [
./gnu-install-dirs.patch
] ++ lib.optionals stdenv.hostPlatform.isMusl [
../../libcxx-0001-musl-hacks.patch
];
prePatch = ''
substituteInPlace lib/CMakeLists.txt --replace "/usr/lib/libc++" "\''${LIBCXX_LIBCXXABI_LIB_PATH}/libc++"
'';
preConfigure = ''
# Get headers from the cxxabi source so we can see private headers not installed by the cxxabi package
cmakeFlagsArray=($cmakeFlagsArray -DLIBCXX_CXX_ABI_INCLUDE_PATHS="$LIBCXXABI_INCLUDE_DIR")
'' + lib.optionalString stdenv.hostPlatform.isMusl ''
patchShebangs utils/cat_files.py
'';
nativeBuildInputs = [ cmake ]
++ lib.optional stdenv.hostPlatform.isMusl python3
++ lib.optional stdenv.hostPlatform.isDarwin fixDarwinDylibNames;
buildInputs = [ cxxabi ];
cmakeFlags = [
"-DLIBCXX_LIBCPPABI_VERSION=2"
"-DLIBCXX_CXX_ABI=${cxxabi.pname}"
] ++ lib.optional stdenv.hostPlatform.isMusl "-DLIBCXX_HAS_MUSL_LIBC=1"
++ lib.optional (cxxabi.pname == "libcxxabi") "-DLIBCXX_LIBCXXABI_LIB_PATH=${cxxabi}/lib";
preInstall = lib.optionalString (stdenv.isDarwin) ''
for file in lib/*.dylib; do
if [ -L "$file" ]; then continue; fi
baseName=$(basename $(${stdenv.cc.targetPrefix}otool -D $file | tail -n 1))
installName="$out/lib/$baseName"
abiName=$(echo "$baseName" | sed -e 's/libc++/libc++abi/')
for other in $(${stdenv.cc.targetPrefix}otool -L $file | awk '$1 ~ "/libc\\+\\+abi" { print $1 }'); do
${stdenv.cc.targetPrefix}install_name_tool -change $other ${cxxabi}/lib/$abiName $file
done
done
'';
passthru = {
isLLVM = true;
inherit cxxabi;
};
meta = llvm_meta // {
homepage = "https://libcxx.llvm.org/";
description = "C++ standard library";
longDescription = ''
libc++ is an implementation of the C++ standard library, targeting C++11,
C++14 and above.
'';
# "All of the code in libc++ is dual licensed under the MIT license and the
# UIUC License (a BSD-like license)":
license = with lib.licenses; [ mit ncsa ];
};
}

View File

@ -1,72 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ca5afba86d19..ed69e4043c3d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -22,6 +22,8 @@ set(CMAKE_MODULE_PATH
if (CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)
project(libcxx CXX C)
+ include(GNUInstallDirs)
+
set(PACKAGE_NAME libcxx)
set(PACKAGE_VERSION 5.0.0)
set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}")
diff --git a/cmake/Modules/HandleLibCXXABI.cmake b/cmake/Modules/HandleLibCXXABI.cmake
index b1f6bee8f945..1b455fceed7f 100644
--- a/cmake/Modules/HandleLibCXXABI.cmake
+++ b/cmake/Modules/HandleLibCXXABI.cmake
@@ -55,7 +55,7 @@ macro(setup_abi_lib abidefines abilib abifiles abidirs)
)
if (LIBCXX_INSTALL_HEADERS)
install(FILES "${LIBCXX_BINARY_INCLUDE_DIR}/${fpath}"
- DESTINATION ${LIBCXX_INSTALL_PREFIX}include/c++/v1/${dstdir}
+ DESTINATION ${LIBCXX_INSTALL_PREFIX}${CMAKE_INSTALL_INCLUDEDIR}/c++/v1/${dstdir}
COMPONENT libcxx
PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ
)
diff --git a/include/CMakeLists.txt b/include/CMakeLists.txt
index 5a1b2ccdc426..106d3d6c1d3c 100644
--- a/include/CMakeLists.txt
+++ b/include/CMakeLists.txt
@@ -20,7 +20,7 @@ endif()
if (LIBCXX_INSTALL_HEADERS)
install(DIRECTORY .
- DESTINATION ${LIBCXX_INSTALL_PREFIX}include/c++/v1
+ DESTINATION ${LIBCXX_INSTALL_PREFIX}${CMAKE_INSTALL_INCLUDEDIR}/c++/v1
COMPONENT cxx-headers
FILES_MATCHING
${LIBCXX_HEADER_PATTERN}
@@ -44,7 +44,7 @@ if (LIBCXX_INSTALL_HEADERS)
set(generated_config_deps generate_config_header)
# Install the generated header as __config.
install(FILES ${LIBCXX_BINARY_DIR}/__generated_config
- DESTINATION ${LIBCXX_INSTALL_PREFIX}include/c++/v1
+ DESTINATION ${LIBCXX_INSTALL_PREFIX}${CMAKE_INSTALL_INCLUDEDIR}/c++/v1
PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ
RENAME __config
COMPONENT cxx-headers)
diff --git a/lib/CMakeLists.txt b/lib/CMakeLists.txt
index 578651423f3b..277befd631ac 100644
--- a/lib/CMakeLists.txt
+++ b/lib/CMakeLists.txt
@@ -355,8 +355,8 @@ if (LIBCXX_INSTALL_LIBRARY)
set(experimental_lib cxx_experimental)
endif()
install(TARGETS ${LIBCXX_TARGETS} ${experimental_lib}
- LIBRARY DESTINATION ${LIBCXX_INSTALL_PREFIX}lib${LIBCXX_LIBDIR_SUFFIX} COMPONENT cxx
- ARCHIVE DESTINATION ${LIBCXX_INSTALL_PREFIX}lib${LIBCXX_LIBDIR_SUFFIX} COMPONENT cxx
+ LIBRARY DESTINATION ${LIBCXX_INSTALL_PREFIX}${CMAKE_INSTALL_LIBDIR}${LIBCXX_LIBDIR_SUFFIX} COMPONENT cxx
+ ARCHIVE DESTINATION ${LIBCXX_INSTALL_PREFIX}${CMAKE_INSTALL_LIBDIR}${LIBCXX_LIBDIR_SUFFIX} COMPONENT cxx
)
# NOTE: This install command must go after the cxx install command otherwise
# it will not be executed after the library symlinks are installed.
@@ -364,7 +364,7 @@ if (LIBCXX_INSTALL_LIBRARY)
# Replace the libc++ filename with $<TARGET_LINKER_FILE:cxx>
# after we required CMake 3.0.
install(FILES "${LIBCXX_LIBRARY_DIR}/libc++${CMAKE_SHARED_LIBRARY_SUFFIX}"
- DESTINATION ${LIBCXX_INSTALL_PREFIX}lib${LIBCXX_LIBDIR_SUFFIX}
+ DESTINATION ${LIBCXX_INSTALL_PREFIX}${CMAKE_INSTALL_LIBDIR}${LIBCXX_LIBDIR_SUFFIX}
COMPONENT libcxx)
endif()
endif()

View File

@ -1,70 +0,0 @@
{ lib, stdenv, llvm_meta, cmake, fetch, libcxx, libunwind, llvm, version }:
stdenv.mkDerivation {
pname = "libcxxabi";
inherit version;
src = fetch "libcxxabi" "12lp799rskr4fc2xr64qn4jfkjnfd8b1aymvsxyn4k9ar7r9pgqv";
outputs = [ "out" "dev" ];
postUnpack = ''
unpackFile ${libcxx.src}
unpackFile ${llvm.src}
export cmakeFlags="-DLLVM_PATH=$PWD/$(ls -d llvm-*) -DLIBCXXABI_LIBCXX_PATH=$PWD/$(ls -d libcxx-*)"
'' + lib.optionalString stdenv.isDarwin ''
export TRIPLE=x86_64-apple-darwin
'' + lib.optionalString stdenv.hostPlatform.isMusl ''
patch -p1 -d $(ls -d libcxx-*) -i ${../../libcxx-0001-musl-hacks.patch}
'';
patches = [
./gnu-install-dirs.patch
];
nativeBuildInputs = [ cmake ];
buildInputs = lib.optional (!stdenv.isDarwin) libunwind;
preInstall = lib.optionalString stdenv.isDarwin ''
for file in lib/*.dylib; do
if [ -L "$file" ]; then continue; fi
# Fix up the install name. Preserve the basename, just replace the path.
installName="$out/lib/$(basename $(${stdenv.cc.targetPrefix}otool -D $file | tail -n 1))"
# this should be done in CMake, but having trouble figuring out
# the magic combination of necessary CMake variables
# if you fancy a try, take a look at
# https://gitlab.kitware.com/cmake/community/-/wikis/doc/cmake/RPATH-handling
${stdenv.cc.targetPrefix}install_name_tool -id $installName $file
# cc-wrapper passes '-lc++abi' to all c++ link steps, but that causes
# libcxxabi to sometimes link against a different version of itself.
# Here we simply make that second reference point to ourselves.
for other in $(${stdenv.cc.targetPrefix}otool -L $file | awk '$1 ~ "/libc\\+\\+abi" { print $1 }'); do
${stdenv.cc.targetPrefix}install_name_tool -change $other $installName $file
done
done
'';
postInstall = ''
mkdir -p "$dev/include"
install -m 644 ../include/${if stdenv.isDarwin then "*" else "cxxabi.h"} "$dev/include"
'';
passthru = {
libName = "c++abi";
};
meta = llvm_meta // {
homepage = "https://libcxxabi.llvm.org/";
description = "Provides C++ standard library support";
longDescription = ''
libc++abi is a new implementation of low level support for a standard C++ library.
'';
# "All of the code in libc++abi is dual licensed under the MIT license and
# the UIUC License (a BSD-like license)":
license = with lib.licenses; [ mit ncsa ];
maintainers = llvm_meta.maintainers ++ [ lib.maintainers.vlstill ];
};
}

View File

@ -1,28 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 4db3328deb9c..74b39acfe588 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -20,6 +20,8 @@ set(CMAKE_MODULE_PATH
if (CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)
project(libcxxabi CXX C)
+ include(GNUInstallDirs)
+
set(PACKAGE_NAME libcxxabi)
set(PACKAGE_VERSION 5.0.0)
set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}")
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index adcc412880c9..71758665af05 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -174,8 +174,8 @@ endif()
add_custom_target(cxxabi DEPENDS ${LIBCXXABI_TARGETS})
install(TARGETS ${LIBCXXABI_TARGETS}
- LIBRARY DESTINATION ${LIBCXXABI_INSTALL_PREFIX}lib${LIBCXXABI_LIBDIR_SUFFIX} COMPONENT cxxabi
- ARCHIVE DESTINATION ${LIBCXXABI_INSTALL_PREFIX}lib${LIBCXXABI_LIBDIR_SUFFIX} COMPONENT cxxabi
+ LIBRARY DESTINATION ${LIBCXXABI_INSTALL_PREFIX}${CMAKE_INSTALL_LIBDIR}${LIBCXXABI_LIBDIR_SUFFIX} COMPONENT cxxabi
+ ARCHIVE DESTINATION ${LIBCXXABI_INSTALL_PREFIX}${CMAKE_INSTALL_LIBDIR}${LIBCXXABI_LIBDIR_SUFFIX} COMPONENT cxxabi
)
if (NOT CMAKE_CONFIGURATION_TYPES)

View File

@ -1,46 +0,0 @@
{ lib, stdenv, llvm_meta
, buildLlvmTools
, fetch
, cmake
, libllvm
, version
}:
stdenv.mkDerivation rec {
pname = "lld";
inherit version;
src = fetch "lld" "1ah75rjly6747jk1zbwca3z0svr9b09ylgxd4x9ns721xir6sia6";
patches = [
./gnu-install-dirs.patch
];
nativeBuildInputs = [ cmake ];
buildInputs = [ libllvm ];
cmakeFlags = [
"-DLLVM_CONFIG_PATH=${libllvm.dev}/bin/llvm-config${lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) "-native"}"
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"-DLLVM_TABLEGEN_EXE=${buildLlvmTools.llvm}/bin/llvm-tblgen"
];
# Musl's default stack size is too small for lld to be able to link Firefox.
LDFLAGS = lib.optionalString stdenv.hostPlatform.isMusl "-Wl,-z,stack-size=2097152";
outputs = [ "out" "lib" "dev" ];
meta = llvm_meta // {
broken = stdenv.isDarwin;
homepage = "https://lld.llvm.org/";
description = "The LLVM linker (unwrapped)";
longDescription = ''
LLD is a linker from the LLVM project that is a drop-in replacement for
system linkers and runs much faster than them. It also provides features
that are useful for toolchain developers.
The linker supports ELF (Unix), PE/COFF (Windows), and Mach-O (macOS)
in descending order of completeness. Internally, LLD consists
of several different linkers.
'';
};
}

View File

@ -1,68 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index e2ab0e35f1ab..f68e23d2a70d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -6,6 +6,8 @@ if(CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(LLD_BUILT_STANDALONE TRUE)
+ include(GNUInstallDirs)
+
find_program(LLVM_CONFIG_PATH "llvm-config" DOC "Path to llvm-config binary")
if(NOT LLVM_CONFIG_PATH)
message(FATAL_ERROR "llvm-config not found: specify LLVM_CONFIG_PATH")
@@ -203,7 +205,7 @@ include_directories(BEFORE
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.h"
PATTERN ".svn" EXCLUDE
diff --git a/cmake/modules/AddLLD.cmake b/cmake/modules/AddLLD.cmake
index fd1d44199ca6..2ec1831ed8f6 100644
--- a/cmake/modules/AddLLD.cmake
+++ b/cmake/modules/AddLLD.cmake
@@ -20,9 +20,9 @@ macro(add_lld_library name)
install(TARGETS ${name}
COMPONENT ${name}
${export_to_lldtargets}
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX}
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX}
- RUNTIME DESTINATION bin)
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
if (${ARG_SHARED} AND NOT CMAKE_CONFIGURATION_TYPES)
add_custom_target(install-${name}
@@ -56,7 +56,7 @@ macro(add_lld_tool name)
install(TARGETS ${name}
${export_to_lldtargets}
- RUNTIME DESTINATION bin
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
COMPONENT ${name})
if(NOT CMAKE_CONFIGURATION_TYPES)
@@ -73,5 +73,5 @@ endmacro()
macro(add_lld_symlink name dest)
add_llvm_tool_symlink(${name} ${dest} ALWAYS_GENERATE)
# Always generate install targets
- llvm_install_symlink(${name} ${dest} ALWAYS_GENERATE)
+ llvm_install_symlink(${name} ${dest} ${CMAKE_INSTALL_FULL_BINDIR} ALWAYS_GENERATE)
endmacro()
diff --git a/tools/lld/CMakeLists.txt b/tools/lld/CMakeLists.txt
index 2df10697ff66..94aa8d092220 100644
--- a/tools/lld/CMakeLists.txt
+++ b/tools/lld/CMakeLists.txt
@@ -13,7 +13,7 @@ target_link_libraries(lld
)
install(TARGETS lld
- RUNTIME DESTINATION bin)
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
if(NOT LLD_SYMLINKS_TO_CREATE)
set(LLD_SYMLINKS_TO_CREATE lld-link ld.lld)

View File

@ -1,88 +0,0 @@
{ lib, stdenv, llvm_meta
, fetch
, fetchpatch
, cmake
, zlib
, ncurses
, swig
, which
, libedit
, libxml2
, libllvm
, libclang
, python3
, version
, darwin
}:
stdenv.mkDerivation rec {
pname = "lldb";
inherit version;
src = fetch "lldb" "05j2a63yzln43852nng8a7y47spzlyr1cvdmgmbxgd29c8r0bfkq";
patches = [
# Fix PythonString::GetString for >=python-3.7
(fetchpatch {
url = "https://github.com/llvm/llvm-project/commit/5457b426f5e15a29c0acc8af1a476132f8be2a36.patch";
sha256 = "1zbx4m0m8kbg0wq6740jcw151vb2pb1p25p401wiq8diqqagkjps";
stripLen = 1;
})
./gnu-install-dirs.patch
];
postPatch = ''
# Fix up various paths that assume llvm and clang are installed in the same place
sed -i 's,".*ClangConfig.cmake","${libclang.dev}/lib/cmake/clang/ClangConfig.cmake",' \
cmake/modules/LLDBStandalone.cmake
sed -i 's,".*tools/clang/include","${libclang.dev}/include",' \
cmake/modules/LLDBStandalone.cmake
sed -i 's,"$.LLVM_LIBRARY_DIR.",${libllvm.lib}/lib ${libclang.lib}/lib,' \
cmake/modules/LLDBStandalone.cmake
'';
outputs = [ "out" "lib" "dev" ];
nativeBuildInputs = [
cmake python3 which swig
];
buildInputs = [
ncurses zlib libedit libxml2 libllvm
] ++ lib.optionals stdenv.isDarwin [
darwin.libobjc
darwin.apple_sdk.libs.xpc
darwin.apple_sdk.frameworks.Foundation darwin.bootstrap_cmds darwin.apple_sdk.frameworks.Carbon darwin.apple_sdk.frameworks.Cocoa
];
CXXFLAGS = "-fno-rtti";
hardeningDisable = [ "format" ];
cmakeFlags = [
"-DLLDB_INCLUDE_TESTS=${if doCheck then "YES" else "NO"}"
"-DLLDB_CODESIGN_IDENTITY=" # codesigning makes nondeterministic
] ++ lib.optionals doCheck [
"-DLLDB_TEST_C_COMPILER=${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc"
"-DLLDB_TEST_CXX_COMPILER=${stdenv.cc}/bin/${stdenv.cc.targetPrefix}c++"
];
doCheck = false;
postInstall = ''
mkdir -p $out/share/man/man1
cp ../docs/lldb.1 $out/share/man/man1/
'';
meta = llvm_meta // {
homepage = "https://lldb.llvm.org/";
description = "A next-generation high-performance debugger";
longDescription = ''
LLDB is a next generation, high-performance debugger. It is built as a set
of reusable components which highly leverage existing libraries in the
larger LLVM Project, such as the Clang expression parser and LLVM
disassembler.
'';
# never built on aarch64-darwin since first introduction in nixpkgs
broken = stdenv.isDarwin && stdenv.isAarch64;
};
}

View File

@ -1,76 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ada293811b3e..6c2149309f65 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -7,6 +7,8 @@ set(CMAKE_MODULE_PATH
"${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules"
)
+include(GNUInstallDirs)
+
include(LLDBStandalone)
include(LLDBConfig)
include(AddLLDB)
diff --git a/cmake/modules/AddLLDB.cmake b/cmake/modules/AddLLDB.cmake
index 4c6f1efd673d..179a12b49cce 100644
--- a/cmake/modules/AddLLDB.cmake
+++ b/cmake/modules/AddLLDB.cmake
@@ -54,14 +54,14 @@ function(add_lldb_library name)
endif()
install(TARGETS ${name}
COMPONENT ${name}
- RUNTIME DESTINATION bin
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${out_dir}
ARCHIVE DESTINATION ${out_dir})
else()
install(TARGETS ${name}
COMPONENT ${name}
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX}
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX})
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
endif()
if (NOT CMAKE_CONFIGURATION_TYPES)
add_custom_target(install-${name}
@@ -126,7 +126,7 @@ function(add_lldb_executable name)
if(ARG_GENERATE_INSTALL AND NOT (ARG_INCLUDE_IN_FRAMEWORK AND LLDB_BUILD_FRAMEWORK ))
install(TARGETS ${name}
COMPONENT ${name}
- RUNTIME DESTINATION bin)
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
if (NOT CMAKE_CONFIGURATION_TYPES)
add_custom_target(install-${name}
DEPENDS ${name}
diff --git a/cmake/modules/LLDBConfig.cmake b/cmake/modules/LLDBConfig.cmake
index 726552675f47..f1f769f34446 100644
--- a/cmake/modules/LLDBConfig.cmake
+++ b/cmake/modules/LLDBConfig.cmake
@@ -276,7 +276,7 @@ include_directories(BEFORE
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/
COMPONENT lldb_headers
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.h"
PATTERN ".svn" EXCLUDE
@@ -286,7 +286,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/include/
COMPONENT lldb_headers
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.h"
PATTERN ".svn" EXCLUDE
diff --git a/tools/intel-mpx/CMakeLists.txt b/tools/intel-mpx/CMakeLists.txt
index 29ba9a1cacec..30e2f9334b95 100644
--- a/tools/intel-mpx/CMakeLists.txt
+++ b/tools/intel-mpx/CMakeLists.txt
@@ -12,4 +12,4 @@ target_link_libraries(lldb-intel-mpxtable
PUBLIC liblldb LLVMSupport)
install(TARGETS lldb-intel-mpxtable
- LIBRARY DESTINATION bin)
+ LIBRARY DESTINATION ${CMAKE_INSTALL_BINDIR})

View File

@ -1,283 +0,0 @@
{ lib, stdenv, llvm_meta
, pkgsBuildBuild
, fetch
, fetchpatch
, cmake
, python3
, libffi
, enableGoldPlugin ? libbfd.hasPluginAPI
, libbfd
, libxml2
, ncurses
, version
, release_version
, zlib
, buildLlvmTools
, debugVersion ? false
, enableManpages ? false
, enableSharedLibraries ? !enableManpages
, enablePolly ? false
}:
let
inherit (lib) optional optionals optionalString;
# Used when creating a versioned symlinks of libLLVM.dylib
versionSuffixes = with lib;
let parts = splitVersion release_version; in
imap (i: _: concatStringsSep "." (take i parts)) parts;
in
stdenv.mkDerivation (rec {
pname = "llvm";
inherit version;
src = fetch "llvm" "0g1bbj2n6xv4p1n6hh17vj3vpvg56wacipc81dgwga9mg2lys8nm";
polly_src = fetch "polly" "1f4i1qsw7ywx25v262p8syz339zcbvfkx295xz26hmqrn944xa6x";
unpackPhase = ''
unpackFile $src
mv llvm-${version}* llvm
sourceRoot=$PWD/llvm
'' + optionalString enablePolly ''
unpackFile $polly_src
mv polly-* $sourceRoot/tools/polly
'';
outputs = [ "out" "lib" "dev" "python" ];
nativeBuildInputs = [ cmake python3 ]
++ optional enableManpages python3.pkgs.sphinx;
buildInputs = [ libxml2 libffi ];
propagatedBuildInputs = [ ncurses zlib ];
patches = [
(fetchpatch {
url = "https://bugzilla.redhat.com/attachment.cgi?id=1389687";
name = "llvm-gcc8-type-mismatch.patch";
sha256 = "0ga2123aclq3x9w72d0rm0az12m8c1i4r1106vh701hf4cghgbch";
})
./fix-gcc9.patch
#(fetchpatch {
# name = "llvm-fix-gcc9.patch";
# url = "https://reviews.llvm.org/file/data/zs3ck5ryvc5n672fd2kw/PHID-FILE-byoqefzwmkd7qnlip4v2/file";
# sha256 = "0injj1hqgrbcbihhwp2nbal88jfykad30r54f2cdcx7gws2fcy8i";
# stripLen = 1;
#})
# When cross-compiling we configure llvm-config-native with an approximation
# of the flags used for the normal LLVM build. To avoid the need for building
# a native libLLVM.so (which would fail) we force llvm-config to be linked
# statically against the necessary LLVM components always.
../../llvm-config-link-static.patch
./gnu-install-dirs.patch
# Fix invalid std::string(nullptr) for GCC 12
(fetchpatch {
name = "nvptx-gcc-12.patch";
url = "https://github.com/llvm/llvm-project/commit/99e64623ec9b31def9375753491cc6093c831809.patch";
sha256 = "0zjfjgavqzi2ypqwqnlvy6flyvdz8hi1anwv0ybwnm2zqixg7za3";
stripLen = 1;
})
../../llvm-7-musl.patch
] ++ lib.optional enablePolly ./gnu-install-dirs-polly.patch;
postPatch = optionalString stdenv.isDarwin ''
substituteInPlace cmake/modules/AddLLVM.cmake \
--replace 'set(_install_name_dir INSTALL_NAME_DIR "@rpath")' "set(_install_name_dir)" \
--replace 'set(_install_rpath "@loader_path/../''${CMAKE_INSTALL_LIBDIR}" ''${extra_libdir})' ""
'' + ''
# FileSystem permissions tests fail with various special bits
substituteInPlace unittests/Support/CMakeLists.txt \
--replace "Path.cpp" ""
rm unittests/Support/Path.cpp
# llvm-5 does not support dwarf-5 style info, fails on gcc-11.
rm test/tools/llvm-symbolizer/print_context.c
'' + optionalString stdenv.isAarch64 ''
patch -p0 < ${../../aarch64.patch}
'' + optionalString stdenv.hostPlatform.isMusl ''
patch -p1 -i ${../../TLI-musl.patch}
substituteInPlace unittests/Support/CMakeLists.txt \
--replace "add_subdirectory(DynamicLibrary)" ""
rm unittests/Support/DynamicLibrary/DynamicLibraryTest.cpp
'' + ''
# Tweak tests to ignore namespace part of type to support
# gcc-12: https://gcc.gnu.org/PR103598.
# The change below mangles strings like:
# CHECK-NEXT: Starting llvm::Function pass manager run.
# to:
# CHECK-NEXT: Starting {{.*}}Function pass manager run.
for f in \
test/Other/new-pass-manager.ll \
test/Other/new-pm-defaults.ll \
test/Other/new-pm-lto-defaults.ll \
test/Other/new-pm-thinlto-defaults.ll \
test/Other/pass-pipeline-parsing.ll \
test/Transforms/Inline/cgscc-incremental-invalidate.ll \
test/Transforms/Inline/clear-analyses.ll \
; do
echo "PATCH: $f"
substituteInPlace $f \
--replace 'Starting llvm::' 'Starting {{.*}}' \
--replace 'Finished llvm::' 'Finished {{.*}}'
done
'';
preConfigure = ''
# Workaround for configure flags that need to have spaces
cmakeFlagsArray+=(
-DLLVM_LIT_ARGS='-svj''${NIX_BUILD_CORES} --no-progress-bar'
)
'';
# hacky fix: created binaries need to be run before installation
preBuild = ''
mkdir -p $out/
ln -sv $PWD/lib $out
'';
cmakeBuildType = if debugVersion then "Debug" else "Release";
cmakeFlags = with stdenv; let
# These flags influence llvm-config's BuildVariables.inc in addition to the
# general build. We need to make sure these are also passed via
# CROSS_TOOLCHAIN_FLAGS_NATIVE when cross-compiling or llvm-config-native
# will return different results from the cross llvm-config.
#
# Some flags don't need to be repassed because LLVM already does so (like
# CMAKE_BUILD_TYPE), others are irrelevant to the result.
flagsForLlvmConfig = [
"-DLLVM_INSTALL_CMAKE_DIR=${placeholder "dev"}/lib/cmake/llvm/"
"-DLLVM_ENABLE_RTTI=ON"
] ++ optionals enableSharedLibraries [
"-DLLVM_LINK_LLVM_DYLIB=ON"
];
in flagsForLlvmConfig ++ [
"-DLLVM_INSTALL_UTILS=ON" # Needed by rustc
"-DLLVM_BUILD_TESTS=${if doCheck then "ON" else "OFF"}"
"-DLLVM_ENABLE_FFI=ON"
"-DLLVM_HOST_TRIPLE=${stdenv.hostPlatform.config}"
"-DLLVM_DEFAULT_TARGET_TRIPLE=${stdenv.hostPlatform.config}"
"-DTARGET_TRIPLE=${stdenv.hostPlatform.config}"
]
++ lib.optionals enableManpages [
"-DLLVM_BUILD_DOCS=ON"
"-DLLVM_ENABLE_SPHINX=ON"
"-DSPHINX_OUTPUT_MAN=ON"
"-DSPHINX_OUTPUT_HTML=OFF"
"-DSPHINX_WARNINGS_AS_ERRORS=OFF"
] ++ lib.optionals (enableGoldPlugin) [
"-DLLVM_BINUTILS_INCDIR=${libbfd.dev}/include"
] ++ lib.optionals (isDarwin) [
"-DLLVM_ENABLE_LIBCXX=ON"
"-DCAN_TARGET_i386=false"
] ++ optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"-DCMAKE_CROSSCOMPILING=True"
"-DLLVM_TABLEGEN=${buildLlvmTools.llvm}/bin/llvm-tblgen"
(
let
nativeCC = pkgsBuildBuild.targetPackages.stdenv.cc;
nativeBintools = nativeCC.bintools.bintools;
nativeToolchainFlags = [
"-DCMAKE_C_COMPILER=${nativeCC}/bin/${nativeCC.targetPrefix}cc"
"-DCMAKE_CXX_COMPILER=${nativeCC}/bin/${nativeCC.targetPrefix}c++"
"-DCMAKE_AR=${nativeBintools}/bin/${nativeBintools.targetPrefix}ar"
"-DCMAKE_STRIP=${nativeBintools}/bin/${nativeBintools.targetPrefix}strip"
"-DCMAKE_RANLIB=${nativeBintools}/bin/${nativeBintools.targetPrefix}ranlib"
];
# We need to repass the custom GNUInstallDirs values, otherwise CMake
# will choose them for us, leading to wrong results in llvm-config-native
nativeInstallFlags = [
"-DCMAKE_INSTALL_PREFIX=${placeholder "out"}"
"-DCMAKE_INSTALL_BINDIR=${placeholder "out"}/bin"
"-DCMAKE_INSTALL_INCLUDEDIR=${placeholder "dev"}/include"
"-DCMAKE_INSTALL_LIBDIR=${placeholder "lib"}/lib"
"-DCMAKE_INSTALL_LIBEXECDIR=${placeholder "lib"}/libexec"
];
in "-DCROSS_TOOLCHAIN_FLAGS_NATIVE:list="
+ lib.concatStringsSep ";" (lib.concatLists [
flagsForLlvmConfig
nativeToolchainFlags
nativeInstallFlags
])
)
];
postBuild = ''
rm -fR $out
'';
preCheck = ''
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH''${LD_LIBRARY_PATH:+:}$PWD/lib
'';
postInstall = ''
mkdir -p $python/share
mv $out/share/opt-viewer $python/share/opt-viewer
moveToOutput "bin/llvm-config*" "$dev"
substituteInPlace "$dev/lib/cmake/llvm/LLVMExports-${if debugVersion then "debug" else "release"}.cmake" \
--replace "\''${_IMPORT_PREFIX}/lib/lib" "$lib/lib/lib" \
--replace "$out/bin/llvm-config" "$dev/bin/llvm-config"
substituteInPlace "$dev/lib/cmake/llvm/LLVMConfig.cmake" \
--replace 'set(LLVM_BINARY_DIR "''${LLVM_INSTALL_PREFIX}")' 'set(LLVM_BINARY_DIR "''${LLVM_INSTALL_PREFIX}'"$lib"'")'
''
+ optionalString (stdenv.isDarwin && enableSharedLibraries) ''
${lib.concatMapStringsSep "\n" (v: ''
ln -s $lib/lib/libLLVM.dylib $lib/lib/libLLVM-${v}.dylib
'') versionSuffixes}
''
+ optionalString (stdenv.buildPlatform != stdenv.hostPlatform) ''
cp NATIVE/bin/llvm-config $dev/bin/llvm-config-native
'';
doCheck = stdenv.isLinux && (!stdenv.isi686)
&& (stdenv.hostPlatform == stdenv.buildPlatform);
checkTarget = "check-all";
requiredSystemFeatures = [ "big-parallel" ];
meta = llvm_meta // {
homepage = "https://llvm.org/";
description = "A collection of modular and reusable compiler and toolchain technologies";
longDescription = ''
The LLVM Project is a collection of modular and reusable compiler and
toolchain technologies. Despite its name, LLVM has little to do with
traditional virtual machines. The name "LLVM" itself is not an acronym; it
is the full name of the project.
LLVM began as a research project at the University of Illinois, with the
goal of providing a modern, SSA-based compilation strategy capable of
supporting both static and dynamic compilation of arbitrary programming
languages. Since then, LLVM has grown to be an umbrella project consisting
of a number of subprojects, many of which are being used in production by
a wide variety of commercial and open source projects as well as being
widely used in academic research. Code in the LLVM project is licensed
under the "Apache 2.0 License with LLVM exceptions".
'';
};
} // lib.optionalAttrs enableManpages {
pname = "llvm-manpages";
buildPhase = ''
make docs-llvm-man
'';
propagatedBuildInputs = [];
installPhase = ''
make -C docs install
'';
outputs = [ "out" ];
doCheck = false;
meta = llvm_meta // {
description = "man pages for LLVM ${version}";
};
})

View File

@ -1,33 +0,0 @@
diff --git a/lib/Target/Mips/MipsFastISel.cpp b/lib/Target/Mips/MipsFastISel.cpp
index f79cb0e6..c6279046 100644
--- a/lib/Target/Mips/MipsFastISel.cpp
+++ b/lib/Target/Mips/MipsFastISel.cpp
@@ -70,6 +70,7 @@
#include <cassert>
#include <cstdint>
#include <new>
+#include <array>
#define DEBUG_TYPE "mips-fastisel"
@@ -1309,13 +1310,13 @@ bool MipsFastISel::fastLowerArguments() {
return false;
}
- const ArrayRef<MCPhysReg> GPR32ArgRegs = {Mips::A0, Mips::A1, Mips::A2,
- Mips::A3};
- const ArrayRef<MCPhysReg> FGR32ArgRegs = {Mips::F12, Mips::F14};
- const ArrayRef<MCPhysReg> AFGR64ArgRegs = {Mips::D6, Mips::D7};
- ArrayRef<MCPhysReg>::iterator NextGPR32 = GPR32ArgRegs.begin();
- ArrayRef<MCPhysReg>::iterator NextFGR32 = FGR32ArgRegs.begin();
- ArrayRef<MCPhysReg>::iterator NextAFGR64 = AFGR64ArgRegs.begin();
+ std::array<MCPhysReg, 4> GPR32ArgRegs = {{Mips::A0, Mips::A1, Mips::A2,
+ Mips::A3}};
+ std::array<MCPhysReg, 2> FGR32ArgRegs = {{Mips::F12, Mips::F14}};
+ std::array<MCPhysReg, 2> AFGR64ArgRegs = {{Mips::D6, Mips::D7}};
+ auto NextGPR32 = GPR32ArgRegs.begin();
+ auto NextFGR32 = FGR32ArgRegs.begin();
+ auto NextAFGR64 = AFGR64ArgRegs.begin();
struct AllocatedReg {
const TargetRegisterClass *RC;

View File

@ -1,106 +0,0 @@
diff --git a/tools/polly/CMakeLists.txt b/tools/polly/CMakeLists.txt
index 9ddc0f7ff81d..7ca45f286d47 100644
--- a/tools/polly/CMakeLists.txt
+++ b/tools/polly/CMakeLists.txt
@@ -2,7 +2,11 @@
if (NOT DEFINED LLVM_MAIN_SRC_DIR)
project(Polly)
cmake_minimum_required(VERSION 3.4.3)
+endif()
+
+include(GNUInstallDirs)
+if (NOT DEFINED LLVM_MAIN_SRC_DIR)
# Where is LLVM installed?
find_package(LLVM CONFIG REQUIRED)
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${LLVM_CMAKE_DIR})
@@ -157,14 +161,14 @@ include_directories(
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.h"
PATTERN ".svn" EXCLUDE
)
install(DIRECTORY ${POLLY_BINARY_DIR}/include/
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
FILES_MATCHING
PATTERN "*.h"
PATTERN "CMakeFiles" EXCLUDE
diff --git a/tools/polly/cmake/CMakeLists.txt b/tools/polly/cmake/CMakeLists.txt
index 969292cd6b00..d7aea77bdd20 100644
--- a/tools/polly/cmake/CMakeLists.txt
+++ b/tools/polly/cmake/CMakeLists.txt
@@ -79,18 +79,18 @@ file(GENERATE
# Generate PollyConfig.cmake for the install tree.
unset(POLLY_EXPORTS)
-set(POLLY_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
+set(POLLY_INSTALL_PREFIX "")
set(POLLY_CONFIG_LLVM_CMAKE_DIR "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
-set(POLLY_CONFIG_CMAKE_DIR "${POLLY_INSTALL_PREFIX}/${POLLY_INSTALL_PACKAGE_DIR}")
-set(POLLY_CONFIG_LIBRARY_DIRS "${POLLY_INSTALL_PREFIX}/lib${LLVM_LIBDIR_SUFFIX}")
+set(POLLY_CONFIG_CMAKE_DIR "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_PREFIX}/${POLLY_INSTALL_PACKAGE_DIR}")
+set(POLLY_CONFIG_LIBRARY_DIRS "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
if (POLLY_BUNDLED_ISL)
set(POLLY_CONFIG_INCLUDE_DIRS
- "${POLLY_INSTALL_PREFIX}/include"
- "${POLLY_INSTALL_PREFIX}/include/polly"
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}"
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_LIBDIR}/polly"
)
else()
set(POLLY_CONFIG_INCLUDE_DIRS
- "${POLLY_INSTALL_PREFIX}/include"
+ "${POLLY_INSTALL_PREFIX}${CMAKE_INSTALL_FULL_INCLUDEDIR}"
${ISL_INCLUDE_DIRS}
)
endif()
@@ -100,12 +100,12 @@ endif()
foreach(tgt IN LISTS POLLY_CONFIG_EXPORTED_TARGETS)
get_target_property(tgt_type ${tgt} TYPE)
if (tgt_type STREQUAL "EXECUTABLE")
- set(tgt_prefix "bin/")
+ set(tgt_prefix "${CMAKE_INSTALL_BINDIR}/")
else()
- set(tgt_prefix "lib/")
+ set(tgt_prefix "${CMAKE_INSTALL_LIBDIR}/")
endif()
- set(tgt_path "${CMAKE_INSTALL_PREFIX}/${tgt_prefix}$<TARGET_FILE_NAME:${tgt}>")
+ set(tgt_path "${tgt_prefix}$<TARGET_FILE_NAME:${tgt}>")
file(RELATIVE_PATH tgt_path ${POLLY_CONFIG_CMAKE_DIR} ${tgt_path})
if (NOT tgt_type STREQUAL "INTERFACE_LIBRARY")
diff --git a/tools/polly/cmake/polly_macros.cmake b/tools/polly/cmake/polly_macros.cmake
index 32bed50bb060..cca5bfff4970 100644
--- a/tools/polly/cmake/polly_macros.cmake
+++ b/tools/polly/cmake/polly_macros.cmake
@@ -44,8 +44,8 @@ macro(add_polly_library name)
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY OR ${name} STREQUAL "LLVMPolly")
install(TARGETS ${name}
EXPORT LLVMExports
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX}
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX})
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
endif()
set_property(GLOBAL APPEND PROPERTY LLVM_EXPORTS ${name})
endmacro(add_polly_library)
diff --git a/tools/polly/lib/External/CMakeLists.txt b/tools/polly/lib/External/CMakeLists.txt
index 286c04fba287..07905e68f595 100644
--- a/tools/polly/lib/External/CMakeLists.txt
+++ b/tools/polly/lib/External/CMakeLists.txt
@@ -268,7 +268,7 @@ if (POLLY_BUNDLED_ISL)
install(DIRECTORY
${ISL_SOURCE_DIR}/include/
${ISL_BINARY_DIR}/include/
- DESTINATION include/polly
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/polly
FILES_MATCHING
PATTERN "*.h"
PATTERN "CMakeFiles" EXCLUDE

View File

@ -1,386 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c1e03aed4809..8b8bbb8c403e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -279,15 +279,21 @@ if (CMAKE_BUILD_TYPE AND
message(FATAL_ERROR "Invalid value for CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
endif()
+include(GNUInstallDirs)
+
set(LLVM_LIBDIR_SUFFIX "" CACHE STRING "Define suffix of library directory name (32/64)" )
-set(LLVM_TOOLS_INSTALL_DIR "bin" CACHE STRING "Path for binary subdirectory (defaults to 'bin')")
+set(LLVM_TOOLS_INSTALL_DIR "${CMAKE_INSTALL_BINDIR}" CACHE STRING
+ "Path for binary subdirectory (defaults to 'bin')")
mark_as_advanced(LLVM_TOOLS_INSTALL_DIR)
set(LLVM_UTILS_INSTALL_DIR "bin" CACHE STRING
"Path to install LLVM utilities (enabled by LLVM_INSTALL_UTILS=ON) (defaults to LLVM_TOOLS_INSTALL_DIR)")
mark_as_advanced(LLVM_TOOLS_INSTALL_DIR)
+set(LLVM_INSTALL_CMAKE_DIR "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}/cmake/llvm" CACHE STRING
+ "Path for CMake subdirectory (defaults to lib/cmake/llvm)" )
+
# They are used as destination of target generators.
set(LLVM_RUNTIME_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin)
set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib${LLVM_LIBDIR_SUFFIX})
@@ -512,9 +518,9 @@ option (LLVM_ENABLE_DOXYGEN "Use doxygen to generate llvm API documentation." OF
option (LLVM_ENABLE_SPHINX "Use Sphinx to generate llvm documentation." OFF)
option (LLVM_ENABLE_OCAMLDOC "Build OCaml bindings documentation." ON)
-set(LLVM_INSTALL_DOXYGEN_HTML_DIR "share/doc/llvm/doxygen-html"
+set(LLVM_INSTALL_DOXYGEN_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html"
CACHE STRING "Doxygen-generated HTML documentation install directory")
-set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "share/doc/llvm/ocaml-html"
+set(LLVM_INSTALL_OCAMLDOC_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/ocaml-html"
CACHE STRING "OCamldoc-generated HTML documentation install directory")
option (LLVM_BUILD_EXTERNAL_COMPILER_RT
@@ -945,7 +951,7 @@ add_subdirectory(cmake/modules)
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
install(DIRECTORY include/llvm include/llvm-c
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
COMPONENT llvm-headers
FILES_MATCHING
PATTERN "*.def"
@@ -957,7 +963,7 @@ if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
)
install(DIRECTORY ${LLVM_INCLUDE_DIR}/llvm
- DESTINATION include
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
COMPONENT llvm-headers
FILES_MATCHING
PATTERN "*.def"
diff --git a/cmake/modules/AddLLVM.cmake b/cmake/modules/AddLLVM.cmake
index 1c922651b133..d555fd627a4f 100644
--- a/cmake/modules/AddLLVM.cmake
+++ b/cmake/modules/AddLLVM.cmake
@@ -589,11 +589,11 @@ macro(add_llvm_library name)
else()
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY OR ${name} STREQUAL "LTO" OR
(LLVM_LINK_LLVM_DYLIB AND ${name} STREQUAL "LLVM"))
- set(install_dir lib${LLVM_LIBDIR_SUFFIX})
+ set(install_dir ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
if(ARG_SHARED OR BUILD_SHARED_LIBS)
if(WIN32 OR CYGWIN OR MINGW)
set(install_type RUNTIME)
- set(install_dir bin)
+ set(install_dir ${CMAKE_INSTALL_BINDIR})
else()
set(install_type LIBRARY)
endif()
@@ -637,9 +637,9 @@ macro(add_llvm_loadable_module name)
if (NOT LLVM_INSTALL_TOOLCHAIN_ONLY)
if(WIN32 OR CYGWIN)
# DLL platform
- set(dlldir "bin")
+ set(dlldir "${CMAKE_INSTALL_BINDIR}")
else()
- set(dlldir "lib${LLVM_LIBDIR_SUFFIX}")
+ set(dlldir "${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
endif()
if(${name} IN_LIST LLVM_DISTRIBUTION_COMPONENTS OR
@@ -651,7 +651,7 @@ macro(add_llvm_loadable_module name)
install(TARGETS ${name}
${export_to_llvmexports}
LIBRARY DESTINATION ${dlldir}
- ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX})
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
endif()
set_property(GLOBAL APPEND PROPERTY LLVM_EXPORTS ${name})
endif()
@@ -864,7 +864,7 @@ macro(add_llvm_example name)
endif()
add_llvm_executable(${name} ${ARGN})
if( LLVM_BUILD_EXAMPLES )
- install(TARGETS ${name} RUNTIME DESTINATION examples)
+ install(TARGETS ${name} RUNTIME DESTINATION ${CMAKE_INSTALL_DOCDIR}/examples)
endif()
set_target_properties(${name} PROPERTIES FOLDER "Examples")
endmacro(add_llvm_example name)
@@ -1275,7 +1275,7 @@ function(llvm_install_library_symlink name dest type)
set(full_name ${CMAKE_${type}_LIBRARY_PREFIX}${name}${CMAKE_${type}_LIBRARY_SUFFIX})
set(full_dest ${CMAKE_${type}_LIBRARY_PREFIX}${dest}${CMAKE_${type}_LIBRARY_SUFFIX})
- set(output_dir lib${LLVM_LIBDIR_SUFFIX})
+ set(output_dir ${CMAKE_INSTALL_FULL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
if(WIN32 AND "${type}" STREQUAL "SHARED")
set(output_dir bin)
endif()
@@ -1293,7 +1293,7 @@ function(llvm_install_library_symlink name dest type)
endif()
endfunction()
-function(llvm_install_symlink name dest)
+function(llvm_install_symlink name dest output_dir)
cmake_parse_arguments(ARG "ALWAYS_GENERATE" "COMPONENT" "" ${ARGN})
foreach(path ${CMAKE_MODULE_PATH})
if(EXISTS ${path}/LLVMInstallSymlink.cmake)
@@ -1316,7 +1316,7 @@ function(llvm_install_symlink name dest)
set(full_dest ${dest}${CMAKE_EXECUTABLE_SUFFIX})
install(SCRIPT ${INSTALL_SYMLINK}
- CODE "install_symlink(${full_name} ${full_dest} ${LLVM_TOOLS_INSTALL_DIR})"
+ CODE "install_symlink(${full_name} ${full_dest} ${output_dir})"
COMPONENT ${component})
if (NOT CMAKE_CONFIGURATION_TYPES AND NOT ARG_ALWAYS_GENERATE)
@@ -1400,7 +1400,8 @@ function(add_llvm_tool_symlink link_name target)
endif()
if ((TOOL_IS_TOOLCHAIN OR NOT LLVM_INSTALL_TOOLCHAIN_ONLY) AND LLVM_BUILD_TOOLS)
- llvm_install_symlink(${link_name} ${target})
+ GNUInstallDirs_get_absolute_install_dir(output_dir LLVM_TOOLS_INSTALL_DIR)
+ llvm_install_symlink(${link_name} ${target} ${output_dir})
endif()
endif()
endfunction()
@@ -1452,9 +1453,9 @@ function(llvm_setup_rpath name)
if (APPLE)
set(_install_name_dir INSTALL_NAME_DIR "@rpath")
- set(_install_rpath "@loader_path/../lib" ${extra_libdir})
+ set(_install_rpath "@loader_path/../${CMAKE_INSTALL_LIBDIR}" ${extra_libdir})
elseif(UNIX)
- set(_install_rpath "\$ORIGIN/../lib${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
+ set(_install_rpath "\$ORIGIN/../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}" ${extra_libdir})
if(${CMAKE_SYSTEM_NAME} MATCHES "(FreeBSD|DragonFly)")
set_property(TARGET ${name} APPEND_STRING PROPERTY
LINK_FLAGS " -Wl,-z,origin ")
diff --git a/cmake/modules/AddOCaml.cmake b/cmake/modules/AddOCaml.cmake
index 1d8094cc505f..afdbe6e6d19c 100644
--- a/cmake/modules/AddOCaml.cmake
+++ b/cmake/modules/AddOCaml.cmake
@@ -140,9 +140,9 @@ function(add_ocaml_library name)
endforeach()
if( APPLE )
- set(ocaml_rpath "@executable_path/../../../lib${LLVM_LIBDIR_SUFFIX}")
+ set(ocaml_rpath "@executable_path/../../../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
elseif( UNIX )
- set(ocaml_rpath "\\$ORIGIN/../../../lib${LLVM_LIBDIR_SUFFIX}")
+ set(ocaml_rpath "\\$ORIGIN/../../../${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX}")
endif()
list(APPEND ocaml_flags "-ldopt" "-Wl,-rpath,${ocaml_rpath}")
diff --git a/cmake/modules/AddSphinxTarget.cmake b/cmake/modules/AddSphinxTarget.cmake
index 4540c5c36c8e..4cefb17fbd55 100644
--- a/cmake/modules/AddSphinxTarget.cmake
+++ b/cmake/modules/AddSphinxTarget.cmake
@@ -73,7 +73,7 @@ function (add_sphinx_target builder project)
elseif (builder STREQUAL html)
string(TOUPPER "${project}" project_upper)
- set(${project_upper}_INSTALL_SPHINX_HTML_DIR "share/doc/${project}/html"
+ set(${project_upper}_INSTALL_SPHINX_HTML_DIR "${CMAKE_INSTALL_DOCDIR}/${project}/html"
CACHE STRING "HTML documentation install directory for ${project}")
# '/.' indicates: copy the contents of the directory directly into
diff --git a/cmake/modules/CMakeLists.txt b/cmake/modules/CMakeLists.txt
index ac4b0b7c0304..21a6a3da8667 100644
--- a/cmake/modules/CMakeLists.txt
+++ b/cmake/modules/CMakeLists.txt
@@ -1,4 +1,4 @@
-set(LLVM_INSTALL_PACKAGE_DIR lib${LLVM_LIBDIR_SUFFIX}/cmake/llvm)
+set(LLVM_INSTALL_PACKAGE_DIR ${LLVM_INSTALL_CMAKE_DIR} CACHE STRING "Path for CMake subdirectory (defaults to 'cmake/llvm')")
set(llvm_cmake_builddir "${LLVM_BINARY_DIR}/${LLVM_INSTALL_PACKAGE_DIR}")
# First for users who use an installed LLVM, create the LLVMExports.cmake file.
@@ -84,11 +84,11 @@ foreach(p ${_count})
set(LLVM_CONFIG_CODE "${LLVM_CONFIG_CODE}
get_filename_component(LLVM_INSTALL_PREFIX \"\${LLVM_INSTALL_PREFIX}\" PATH)")
endforeach(p)
-set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/include")
-set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/lib\${LLVM_LIBDIR_SUFFIX}")
+set(LLVM_CONFIG_INCLUDE_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}")
+set(LLVM_CONFIG_LIBRARY_DIRS "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}\${LLVM_LIBDIR_SUFFIX}")
set(LLVM_CONFIG_CMAKE_DIR "\${LLVM_INSTALL_PREFIX}/${LLVM_INSTALL_PACKAGE_DIR}")
set(LLVM_CONFIG_BINARY_DIR "\${LLVM_INSTALL_PREFIX}")
-set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/bin")
+set(LLVM_CONFIG_TOOLS_BINARY_DIR "\${LLVM_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}")
set(LLVM_CONFIG_EXPORTS_FILE "\${LLVM_CMAKE_DIR}/LLVMExports.cmake")
set(LLVM_CONFIG_EXPORTS "${LLVM_EXPORTS}")
configure_file(
diff --git a/cmake/modules/LLVMInstallSymlink.cmake b/cmake/modules/LLVMInstallSymlink.cmake
index 482697b06baf..af2ac1e6c979 100644
--- a/cmake/modules/LLVMInstallSymlink.cmake
+++ b/cmake/modules/LLVMInstallSymlink.cmake
@@ -10,7 +10,7 @@ function(install_symlink name target outdir)
set(LINK_OR_COPY copy)
endif()
- set(bindir "${DESTDIR}${CMAKE_INSTALL_PREFIX}/${outdir}/")
+ set(bindir "${DESTDIR}${outdir}/")
message("Creating ${name}")
diff --git a/docs/CMake.rst b/docs/CMake.rst
index b6ebf37adc92..34c73d2869e0 100644
--- a/docs/CMake.rst
+++ b/docs/CMake.rst
@@ -196,7 +196,7 @@ CMake manual, or execute ``cmake --help-variable VARIABLE_NAME``.
**LLVM_LIBDIR_SUFFIX**:STRING
Extra suffix to append to the directory where libraries are to be
installed. On a 64-bit architecture, one could use ``-DLLVM_LIBDIR_SUFFIX=64``
- to install libraries to ``/usr/lib64``.
+ to install libraries to ``/usr/lib64``. See also ``CMAKE_INSTALL_LIBDIR``.
**CMAKE_C_FLAGS**:STRING
Extra flags to use when compiling C source files.
@@ -461,8 +461,8 @@ LLVM-specific variables
**LLVM_INSTALL_DOXYGEN_HTML_DIR**:STRING
The path to install Doxygen-generated HTML documentation to. This path can
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
- `share/doc/llvm/doxygen-html`.
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
+ `${CMAKE_INSTALL_DOCDIR}/${project}/doxygen-html`.
**LLVM_ENABLE_SPHINX**:BOOL
If specified, CMake will search for the ``sphinx-build`` executable and will make
@@ -493,13 +493,33 @@ LLVM-specific variables
**LLVM_INSTALL_SPHINX_HTML_DIR**:STRING
The path to install Sphinx-generated HTML documentation to. This path can
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
- `share/doc/llvm/html`.
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
+ `${CMAKE_INSTALL_DOCDIR}/${project}/html`.
**LLVM_INSTALL_OCAMLDOC_HTML_DIR**:STRING
The path to install OCamldoc-generated HTML documentation to. This path can
- either be absolute or relative to the CMAKE_INSTALL_PREFIX. Defaults to
- `share/doc/llvm/ocaml-html`.
+ either be absolute or relative to the ``CMAKE_INSTALL_PREFIX``. Defaults to
+ `${CMAKE_INSTALL_DOCDIR}/${project}/ocaml-html`.
+
+**CMAKE_INSTALL_BINDIR**:STRING
+ The path to install binary tools, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `bin`.
+
+**CMAKE_INSTALL_LIBDIR**:STRING
+ The path to install libraries, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `lib`.
+
+**CMAKE_INSTALL_INCLUDEDIR**:STRING
+ The path to install header files, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `include`.
+
+**CMAKE_INSTALL_DOCDIR**:STRING
+ The path to install documentation, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `share/doc`.
+
+**CMAKE_INSTALL_MANDIR**:STRING
+ The path to install manpage files, relative to the ``CMAKE_INSTALL_PREFIX``.
+ Defaults to `share/man`.
**LLVM_CREATE_XCODE_TOOLCHAIN**:BOOL
OS X Only: If enabled CMake will generate a target named
@@ -651,9 +671,11 @@ the ``cmake`` command or by setting it directly in ``ccmake`` or ``cmake-gui``).
This file is available in two different locations.
-* ``<INSTALL_PREFIX>/lib/cmake/llvm/LLVMConfig.cmake`` where
- ``<INSTALL_PREFIX>`` is the install prefix of an installed version of LLVM.
- On Linux typically this is ``/usr/lib/cmake/llvm/LLVMConfig.cmake``.
+* ``<LLVM_INSTALL_PACKAGE_DIR>LLVMConfig.cmake`` where
+ ``<LLVM_INSTALL_PACKAGE_DIR>`` is the location where LLVM CMake modules are
+ installed as part of an installed version of LLVM. This is typically
+ ``cmake/llvm/`` within the lib directory. On Linux, this is typically
+ ``/usr/lib/cmake/llvm/LLVMConfig.cmake``.
* ``<LLVM_BUILD_ROOT>/lib/cmake/llvm/LLVMConfig.cmake`` where
``<LLVM_BUILD_ROOT>`` is the root of the LLVM build tree. **Note: this is only
diff --git a/include/llvm/CMakeLists.txt b/include/llvm/CMakeLists.txt
index 1d5ca3ba92b0..026f5453c1da 100644
--- a/include/llvm/CMakeLists.txt
+++ b/include/llvm/CMakeLists.txt
@@ -4,5 +4,5 @@ add_subdirectory(Support)
# If we're doing an out-of-tree build, copy a module map for generated
# header files into the build area.
if (NOT "${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
- configure_file(module.modulemap.build module.modulemap COPYONLY)
+ configure_file(module.modulemap.build ${LLVM_INCLUDE_DIR}/module.modulemap COPYONLY)
endif (NOT "${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
diff --git a/tools/llvm-config/BuildVariables.inc.in b/tools/llvm-config/BuildVariables.inc.in
index f201e1f7bff0..4582ed556a02 100644
--- a/tools/llvm-config/BuildVariables.inc.in
+++ b/tools/llvm-config/BuildVariables.inc.in
@@ -24,6 +24,10 @@
#define LLVM_CXXFLAGS "@LLVM_CXXFLAGS@"
#define LLVM_BUILDMODE "@LLVM_BUILDMODE@"
#define LLVM_LIBDIR_SUFFIX "@LLVM_LIBDIR_SUFFIX@"
+#define LLVM_INSTALL_BINDIR "@CMAKE_INSTALL_BINDIR@"
+#define LLVM_INSTALL_LIBDIR "@CMAKE_INSTALL_LIBDIR@"
+#define LLVM_INSTALL_INCLUDEDIR "@CMAKE_INSTALL_INCLUDEDIR@"
+#define LLVM_INSTALL_CMAKEDIR "@LLVM_INSTALL_CMAKE_DIR@"
#define LLVM_TARGETS_BUILT "@LLVM_TARGETS_BUILT@"
#define LLVM_SYSTEM_LIBS "@LLVM_SYSTEM_LIBS@"
#define LLVM_BUILD_SYSTEM "@LLVM_BUILD_SYSTEM@"
diff --git a/tools/llvm-config/llvm-config.cpp b/tools/llvm-config/llvm-config.cpp
index 08b096afb052..2deae0dcfacc 100644
--- a/tools/llvm-config/llvm-config.cpp
+++ b/tools/llvm-config/llvm-config.cpp
@@ -332,12 +332,26 @@ int main(int argc, char **argv) {
("-I" + ActiveIncludeDir + " " + "-I" + ActiveObjRoot + "/include");
} else {
ActivePrefix = CurrentExecPrefix;
- ActiveIncludeDir = ActivePrefix + "/include";
- SmallString<256> path(StringRef(LLVM_TOOLS_INSTALL_DIR));
- sys::fs::make_absolute(ActivePrefix, path);
- ActiveBinDir = path.str();
- ActiveLibDir = ActivePrefix + "/lib" + LLVM_LIBDIR_SUFFIX;
- ActiveCMakeDir = ActiveLibDir + "/cmake/llvm";
+ {
+ SmallString<256> path(StringRef(LLVM_INSTALL_INCLUDEDIR));
+ sys::fs::make_absolute(ActivePrefix, path);
+ ActiveIncludeDir = std::string(path.str());
+ }
+ {
+ SmallString<256> path(StringRef(LLVM_INSTALL_BINDIR));
+ sys::fs::make_absolute(ActivePrefix, path);
+ ActiveBinDir = std::string(path.str());
+ }
+ {
+ SmallString<256> path(StringRef(LLVM_INSTALL_LIBDIR LLVM_LIBDIR_SUFFIX));
+ sys::fs::make_absolute(ActivePrefix, path);
+ ActiveLibDir = std::string(path.str());
+ }
+ {
+ SmallString<256> path(StringRef(LLVM_INSTALL_CMAKEDIR));
+ sys::fs::make_absolute(ActivePrefix, path);
+ ActiveCMakeDir = std::string(path.str());
+ }
ActiveIncludeOption = "-I" + ActiveIncludeDir;
}
diff --git a/tools/lto/CMakeLists.txt b/tools/lto/CMakeLists.txt
index 6e913519a809..85641eef721f 100644
--- a/tools/lto/CMakeLists.txt
+++ b/tools/lto/CMakeLists.txt
@@ -19,7 +19,7 @@ set(LLVM_EXPORTED_SYMBOL_FILE ${CMAKE_CURRENT_SOURCE_DIR}/lto.exports)
add_llvm_library(LTO SHARED ${SOURCES} DEPENDS intrinsics_gen)
install(FILES ${LLVM_MAIN_INCLUDE_DIR}/llvm-c/lto.h
- DESTINATION include/llvm-c
+ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/llvm-c
COMPONENT LTO)
if (APPLE)
diff --git a/tools/opt-viewer/CMakeLists.txt b/tools/opt-viewer/CMakeLists.txt
index 19b606933082..27b9f71b3d79 100644
--- a/tools/opt-viewer/CMakeLists.txt
+++ b/tools/opt-viewer/CMakeLists.txt
@@ -8,6 +8,6 @@ set (files
foreach (file ${files})
install(PROGRAMS ${file}
- DESTINATION share/opt-viewer
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/opt-viewer
COMPONENT opt-viewer)
endforeach (file)

View File

@ -1,37 +0,0 @@
{ lib
, stdenv
, llvm_meta
, fetch
, cmake
, llvm
, targetLlvm
, perl
, version
}:
stdenv.mkDerivation {
pname = "openmp";
inherit version;
src = fetch "openmp" "0p2n52676wlq6y9q99n5pivq6pvvda1p994r69fxj206ahn59jir";
nativeBuildInputs = [ cmake perl ];
buildInputs = [
(if stdenv.buildPlatform == stdenv.hostPlatform then llvm else targetLlvm)
];
meta = llvm_meta // {
homepage = "https://openmp.llvm.org/";
description = "Support for the OpenMP language";
longDescription = ''
The OpenMP subproject of LLVM contains the components required to build an
executable OpenMP program that are outside the compiler itself.
Contains the code for the runtime library against which code compiled by
"clang -fopenmp" must be linked before it can run and the library that
supports offload to target devices.
'';
# "All of the code is dual licensed under the MIT license and the UIUC
# License (a BSD-like license)":
license = with lib.licenses; [ mit ncsa ];
};
}

View File

@ -32,6 +32,8 @@ stdenv.mkDerivation (finalAttrs: {
"-DBLISP_USE_SYSTEM_LIBRARIES=ON"
];
env.NIX_CFLAGS_COMPILE = lib.optionalString stdenv.isDarwin "-Wno-error=implicit-function-declaration";
passthru.tests.version = testers.testVersion {
package = finalAttrs.finalPackage;
version = "v${finalAttrs.version}";
@ -42,6 +44,7 @@ stdenv.mkDerivation (finalAttrs: {
license = licenses.mit;
mainProgram = "blisp";
homepage = "https://github.com/pine64/blisp";
platforms = platforms.unix;
maintainers = [ maintainers.bdd ];
};
})

View File

@ -1,7 +1,13 @@
{ lib, stdenv, fetchzip, pkg-config, libusb1, systemdMinimal }:
let
binDirPrefix = if stdenv.isDarwin then "osx_" else "linux_";
in
{ lib
, stdenv
, fetchzip
, pkg-config
, fixDarwinDylibNames
, libusb1
, systemdMinimal
, darwin
}:
stdenv.mkDerivation rec {
pname = "libusbsio";
version = "2.1.11";
@ -15,15 +21,30 @@ stdenv.mkDerivation rec {
rm -r bin/*
'';
nativeBuildInputs = [ pkg-config ];
makeFlags = [
"CC=${stdenv.cc.targetPrefix}cc"
"BINDIR="
];
nativeBuildInputs = [
pkg-config
] ++ lib.optionals stdenv.isDarwin [
fixDarwinDylibNames
];
buildInputs = [
libusb1
] ++ lib.optionals stdenv.isDarwin (with darwin.apple_sdk.frameworks; [
AppKit
CoreFoundation
IOKit
]) ++ lib.optionals stdenv.isLinux [
systemdMinimal # libudev
];
installPhase = ''
runHook preInstall
install -D bin/${binDirPrefix}${stdenv.hostPlatform.parsed.cpu.name}/libusbsio${stdenv.hostPlatform.extensions.sharedLibrary} $out/lib/libusbsio${stdenv.hostPlatform.extensions.sharedLibrary}
install -D bin/libusbsio${stdenv.hostPlatform.extensions.sharedLibrary} $out/lib/libusbsio${stdenv.hostPlatform.extensions.sharedLibrary}
runHook postInstall
'';

View File

@ -30,6 +30,6 @@ stdenv.mkDerivation rec {
description = "C Library for reading, writing and updating ESRI Shapefiles";
homepage = "http://shapelib.maptools.org/";
license = licenses.gpl2;
maintainers = [ maintainers.ehmry ];
maintainers = with maintainers; teams.geospatial.members ++ [ ehmry ];
};
}

View File

@ -1,23 +1,40 @@
{ lib
, buildPythonPackage
, fetchPypi
, fetchFromGitHub
, pytestCheckHook
, pythonOlder
, pyquaternion
, matplotlib
, numpy
, pendulum
, pillow
, poetry-core
, pyquaternion
}:
buildPythonPackage rec {
pname = "bbox";
version = "0.9.4";
pyproject = true;
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
hash = "sha256-GGQhKkdwmrYPEhtldPY3WUInSniU/B40NZvt1gXEuzg=";
src = fetchFromGitHub {
owner = "varunagrawal";
repo = pname;
# matches 0.9.4 on PyPi + tests
rev = "d3f07ed0e38b6015cf4181e3b3edae6a263f8565";
hash = "sha256-FrJ8FhlqwmnEB/QvPlkDfqZncNGPhwY9aagM9yv1LGs=";
};
propagatedBuildInputs = [ pyquaternion numpy ];
buildInputs = [ poetry-core ];
nativeCheckInputs = [
matplotlib
pendulum
pillow
pytestCheckHook
];
pythonImportsCheck = [ "bbox" ];

View File

@ -22,12 +22,6 @@ buildPythonPackage rec {
hash = "sha256-XMXSOwcH8fLgzXCcT+grjYxhBdtF4H/Vr+S7J4GYZSw=";
};
patches = [
# Fix deprecated numpy types
# https://sources.debian.org/data/main/b/brian/2.5.1-3/debian/patches/numpy1.24.patch
./numpy1.24.patch
];
propagatedBuildInputs = [
cython
jinja2

View File

@ -1,18 +0,0 @@
Description: Remove deprecated use of np.float
Author: Marcel Stimberg <marcel.stimberg@inserm.fr>
Bug-Debian: https://bugs.debian.org/1027193
Applied-Upstream: 61ef84b316a3d0a892298adf51abd8ac50900758
Last-Update: 2023-01-06
---
This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
--- brian.orig/brian2/units/fundamentalunits.py
+++ brian/brian2/units/fundamentalunits.py
@@ -1597,7 +1597,7 @@
unitless = np.array(self / best_unit, copy=False)
threshold = np.get_printoptions()['threshold'] // 100
if unitless.ndim == 0:
- sympy_quantity = np.float(unitless)
+ sympy_quantity = float(unitless)
elif unitless.ndim == 1:
array_str = np.array2string(unitless, separator=" & ", threshold=threshold,
max_line_width=sys.maxsize)

View File

@ -1,6 +1,7 @@
{ lib
, buildPythonPackage
, fetchPypi
, fetchpatch
, oldest-supported-numpy
, setuptools-scm
, wheel
@ -26,6 +27,16 @@ buildPythonPackage rec {
hash = "sha256-YAJa2f0w2CenKubnYLbP8HodDhabLB2hAkyw/CPkp6o=";
};
patches = [
# upstream patch fixes 2 failing unittests. remove on update
(fetchpatch {
name = "python311.patch";
url = "https://github.com/tskit-dev/msprime/commit/639125ec942cb898cf4a80638f229e11ce393fbc.patch";
hash = "sha256-peli4tdu8Bv21xIa5H8SRdfjQnTMO72IPFqybmSBSO8=";
includes = [ "tests/test_ancestry.py" ];
})
];
nativeBuildInputs = [
gsl
oldest-supported-numpy

View File

@ -15,7 +15,7 @@
buildPythonPackage rec {
pname = "owslib";
version = "0.29.2";
version = "0.29.3";
format = "setuptools";
disabled = pythonOlder "3.7";
@ -24,7 +24,7 @@ buildPythonPackage rec {
owner = "geopython";
repo = "OWSLib";
rev = "refs/tags/${version}";
hash = "sha256-dbL4VdnPszwiDO+UjluuyqeBRMKojTnZPEFKEYiIWS0=";
hash = "sha256-yAJXknSsGXcerzaOVSrFO4j5E6B/4/0JfoSxZ+Szmws=";
};
postPatch = ''
@ -72,7 +72,7 @@ buildPythonPackage rec {
meta = with lib; {
description = "Client for Open Geospatial Consortium web service interface standards";
homepage = "https://www.osgeo.org/projects/owslib/";
changelog = "https://github.com/geopython/OWSLib/blob/${version}/CHANGES.rst";
changelog = "https://github.com/geopython/OWSLib/releases/tag/${version}";
license = licenses.bsd3;
maintainers = teams.geospatial.members;
};

View File

@ -1,5 +1,13 @@
{ lib, fetchPypi, buildPythonPackage
, six, udev, pytest, mock, hypothesis, docutils
{ lib
, fetchPypi
, buildPythonPackage
, six
, udev
, pytest
, mock
, hypothesis
, docutils
, stdenvNoCC
}:
buildPythonPackage rec {
@ -11,7 +19,7 @@ buildPythonPackage rec {
hash = "sha256-deVNNyGPWsRbDaHw/ZzF5SajysPvHPrUEM96sziwFHE=";
};
postPatch = ''
postPatch = lib.optionalString stdenvNoCC.isLinux ''
substituteInPlace src/pyudev/_ctypeslib/utils.py \
--replace "find_library(name)" "'${lib.getLib udev}/lib/libudev.so'"
'';
@ -27,9 +35,10 @@ buildPythonPackage rec {
# https://github.com/pyudev/pyudev/issues/187
doCheck = false;
meta = {
meta = with lib; {
homepage = "https://pyudev.readthedocs.org/";
description = "Pure Python libudev binding";
license = lib.licenses.lgpl21Plus;
license = licenses.lgpl21Plus;
maintainers = with maintainers; [ frogamic ];
};
}

View File

@ -8,14 +8,14 @@
buildPythonPackage rec {
pname = "sqids";
version = "0.4.0";
version = "0.4.1";
pyproject = true;
disabled = pythonOlder "3.8";
src = fetchPypi {
inherit pname version;
hash = "sha256-U9nVpRc9PIM1ZO+bcL3RGIfAdlUMf+uFw/SpPTAjdnA=";
hash = "sha256-/8P7/vY0kb7ouUCpgGU4g0Xb77BtSeQVt6nkdcogD50=";
};
nativeBuildInputs = [

View File

@ -16,9 +16,10 @@
filelock,
jinja2,
networkx,
openai-triton,
sympy,
numpy, pyyaml, cffi, click, typing-extensions,
# ROCm build and `torch.compile` requires `openai-triton`
tritonSupport ? (!stdenv.isDarwin), openai-triton,
# Unit tests
hypothesis, psutil,
@ -303,12 +304,13 @@ in buildPythonPackage rec {
"-Wno-pass-failed"
] ++ [
"-Wno-unused-command-line-argument"
"-Wno-maybe-uninitialized"
"-Wno-uninitialized"
"-Wno-array-bounds"
"-Wno-stringop-overflow"
"-Wno-free-nonheap-object"
"-Wno-unused-result"
] ++ lib.optionals stdenv.cc.isGNU [
"-Wno-maybe-uninitialized"
"-Wno-stringop-overflow"
]));
nativeBuildInputs = [
@ -377,12 +379,10 @@ in buildPythonPackage rec {
# the following are required for tensorboard support
pillow six future tensorboard protobuf
# ROCm build and `torch.compile` requires openai-triton
openai-triton
# torch/csrc requires `pybind11` at runtime
pybind11
]
++ lib.optionals tritonSupport [ openai-triton ]
++ lib.optionals MPISupport [ mpi ]
++ lib.optionals rocmSupport [ rocmtoolkit_joined ];

View File

@ -1,5 +1,6 @@
{ lib
, stdenv
, fetchpatch
, buildPythonPackage
, python
, ndtypes
@ -15,6 +16,17 @@ buildPythonPackage {
propagatedBuildInputs = [ ndtypes ];
buildInputs = [ libndtypes ];
patches = [
# python311 fixes which are on main. remove on update
(fetchpatch {
name = "python311.patch";
url = "https://github.com/xnd-project/xnd/commit/e1a06d9f6175f4f4e1da369b7e907ad6b2952c00.patch";
hash = "sha256-xzrap+FL5be13bVdsJ3zeV7t57ZC4iyhuZhuLsOzHyE=";
})
];
postPatch = ''
substituteInPlace setup.py \
--replace 'include_dirs = ["libxnd", "ndtypes/python/ndtypes"] + INCLUDES' \

View File

@ -15,12 +15,11 @@
, runtimeShell
# List of Node.js runtimes the package should support
, nodeRuntimes ? [ "node20" ]
, nodejs_16
, nodejs_20
}:
# Node.js runtimes supported by upstream
assert builtins.all (x: builtins.elem x [ "node16" "node20" ]) nodeRuntimes;
assert builtins.all (x: builtins.elem x [ "node20" ]) nodeRuntimes;
buildDotnetModule rec {
pname = "github-runner";
@ -210,8 +209,6 @@ buildDotnetModule rec {
preCheck = ''
mkdir -p _layout/externals
'' + lib.optionalString (lib.elem "node16" nodeRuntimes) ''
ln -s ${nodejs_16} _layout/externals/node16
'' + lib.optionalString (lib.elem "node20" nodeRuntimes) ''
ln -s ${nodejs_20} _layout/externals/node20
'';
@ -250,8 +247,6 @@ buildDotnetModule rec {
# externals/node$version. As opposed to the official releases, we don't
# link the Alpine Node flavors.
mkdir -p $out/lib/externals
'' + lib.optionalString (lib.elem "node16" nodeRuntimes) ''
ln -s ${nodejs_16} $out/lib/externals/node16
'' + lib.optionalString (lib.elem "node20" nodeRuntimes) ''
ln -s ${nodejs_20} $out/lib/externals/node20
'' + ''

View File

@ -1,60 +0,0 @@
{ elk7Version
, enableUnfree ? true
, lib
, stdenv
, makeWrapper
, fetchurl
, nodejs_16
, coreutils
, which
}:
let
nodejs = nodejs_16;
inherit (builtins) elemAt;
info = lib.splitString "-" stdenv.hostPlatform.system;
arch = elemAt info 0;
plat = elemAt info 1;
hashes =
{
x86_64-linux = "sha512-09XokG5krjxGnk34DhxpLOGRLjb2jd82uZtwGfrzSuuqMpBhkEptK2oySGxuGdHF8uowwlR5p5YO2TvBwMsWkQ==";
x86_64-darwin = "sha512-cqRJnvu730Jfkr6vwbHUFuZube1g522cmvnDwTzhGGK6VN/7+9XL3vavqtUPDVdTLTUk+DrNiIQK7MaJH3SHMg==";
aarch64-linux = "sha512-zhtYThz5j4+w5gI1JWSnHv709Tk23eegVsrtYmdaYhZiTw2yvCTYI5uNAfBjBr8XPdp6CKF4e6Bh2wHKDYg1mg==";
aarch64-darwin = "sha512-cqRJnvu730Jfkr6vwbHUFuZube1g522cmvnDwTzhGGK6VN/7+9XL3vavqtUPDVdTLTUk+DrNiIQK7MaJH3SHMg==";
};
in stdenv.mkDerivation rec {
pname = "kibana";
version = elk7Version;
src = fetchurl {
url = "https://artifacts.elastic.co/downloads/kibana/${pname}-${version}-${plat}-${arch}.tar.gz";
hash = hashes.${stdenv.hostPlatform.system} or (throw "Unknown architecture");
};
patches = [
# Kibana specifies it specifically needs nodejs 10.15.2 but nodejs in nixpkgs is at 10.15.3.
# The <nixpkgs/nixos/tests/elk.nix> test succeeds with this newer version so lets just
# disable the version check.
./disable-nodejs-version-check-7.patch
];
nativeBuildInputs = [ makeWrapper ];
installPhase = ''
mkdir -p $out/libexec/kibana $out/bin
mv * $out/libexec/kibana/
rm -r $out/libexec/kibana/node
makeWrapper $out/libexec/kibana/bin/kibana $out/bin/kibana \
--prefix PATH : "${lib.makeBinPath [ nodejs coreutils which ]}"
sed -i 's@NODE=.*@NODE=${nodejs}/bin/node@' $out/libexec/kibana/bin/kibana
'';
meta = with lib; {
description = "Visualize logs and time-stamped data";
homepage = "http://www.elasticsearch.org/overview/kibana";
license = licenses.elastic20;
maintainers = with maintainers; [ offline basvandijk ];
platforms = with platforms; unix;
};
}

View File

@ -1,19 +0,0 @@
diff --git a/src/setup_node_env/node_version_validator.js b/src/setup_node_env/node_version_validator.js
index 3f611e5a..f5c60c85 100644
--- a/src/setup_node_env/node_version_validator.js
+++ b/src/setup_node_env/node_version_validator.js
@@ -25,11 +25,11 @@ var pkg = require('../../package.json'); // Note: This is written in ES5 so we c
var currentVersion = process && process.version || null;
var rawRequiredVersion = pkg && pkg.engines && pkg.engines.node || null;
var requiredVersion = rawRequiredVersion ? 'v' + rawRequiredVersion : rawRequiredVersion;
-var isVersionValid = !!currentVersion && !!requiredVersion && currentVersion === requiredVersion; // Validates current the NodeJS version compatibility when Kibana starts.
+var isVersionValid = !!currentVersion && !!requiredVersion; // Validates current the NodeJS version compatibility when Kibana starts.
if (!isVersionValid) {
var errorMessage = 'Kibana does not support the current Node.js version ' + currentVersion + '. Please use Node.js ' + requiredVersion + '.'; // Actions to apply when validation fails: error report + exit.
console.error(errorMessage);
process.exit(1);
-}
\ No newline at end of file
+}

View File

@ -1,26 +0,0 @@
{ callPackage, lib, overrideCC, pkgs, buildPackages, openssl, python3, enableNpm ? true }:
let
# Clang 16+ cannot build Node v14 due to -Wenum-constexpr-conversion errors.
# Use an older version of clang with the current libc++ for compatibility (e.g., with icu).
ensureCompatibleCC = packages:
if packages.stdenv.cc.isClang && lib.versionAtLeast (lib.getVersion packages.stdenv.cc.cc) "16"
then overrideCC packages.llvmPackages_15.stdenv (packages.llvmPackages_15.stdenv.cc.override {
inherit (packages.llvmPackages) libcxx;
extraPackages = [ packages.llvmPackages.libcxxabi ];
})
else packages.stdenv;
buildNodejs = callPackage ./nodejs.nix {
inherit openssl;
stdenv = ensureCompatibleCC pkgs;
buildPackages = buildPackages // { stdenv = ensureCompatibleCC buildPackages; };
python = python3;
};
in
buildNodejs {
inherit enableNpm;
version = "14.21.3";
sha256 = "sha256-RY7AkuYK1wDdzwectj1DXBXaTHuz0/mbmo5YqZ5UB14=";
patches = lib.optional pkgs.stdenv.isDarwin ./bypass-xcodebuild.diff;
}

View File

@ -1,35 +0,0 @@
{ callPackage, lib, overrideCC, pkgs, buildPackages, openssl, python3, fetchpatch, enableNpm ? true }:
let
# Clang 16+ cannot build Node v14 due to -Wenum-constexpr-conversion errors.
# Use an older version of clang with the current libc++ for compatibility (e.g., with icu).
ensureCompatibleCC = packages:
if packages.stdenv.cc.isClang && lib.versionAtLeast (lib.getVersion packages.stdenv.cc.cc) "16"
then overrideCC packages.llvmPackages_15.stdenv (packages.llvmPackages_15.stdenv.cc.override {
inherit (packages.llvmPackages) libcxx;
extraPackages = [ packages.llvmPackages.libcxxabi ];
})
else packages.stdenv;
buildNodejs = callPackage ./nodejs.nix {
inherit openssl;
stdenv = ensureCompatibleCC pkgs;
buildPackages = buildPackages // { stdenv = ensureCompatibleCC buildPackages; };
python = python3;
};
npmPatches = callPackage ./npm-patches.nix { };
in
buildNodejs {
inherit enableNpm;
# If you do upgrade here, please update in pkgs/top-level/release.nix
# the permitted insecure version to ensure it gets cached for our users
# and backport this to stable release (23.05).
version = "16.20.2";
sha256 = "sha256-V28aA8RV5JGo0TK1h+trO4RlH8iXS7NjhDPdRNIsj0k=";
patches = [
./disable-darwin-v8-system-instrumentation.patch
./bypass-darwin-xcrun-node16.patch
./node-npm-build-npm-package-logic-node16.patch
] ++ npmPatches;
}

View File

@ -2,7 +2,7 @@
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}:
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ./node-env.nix {

View File

@ -26,6 +26,8 @@ stdenv.mkDerivation (finalAttrs: {
"BUILD_PATH=$(out)/bin"
];
env.NIX_CFLAGS_COMPILE = "-Wno-error=implicit-function-declaration";
postInstall = ''
mkdir -p $out/Library/LaunchDaemons
cp ${./org.nixos.skhd.plist} $out/Library/LaunchDaemons/org.nixos.skhd.plist

View File

@ -1,20 +0,0 @@
diff --git a/ci/build/build-vscode.sh b/ci/build/build-vscode.sh
index a72549fb..3aed1ad5 100755
--- a/ci/build/build-vscode.sh
+++ b/ci/build/build-vscode.sh
@@ -58,7 +58,6 @@ main() {
# telemetry available; telemetry can still be disabled by flag or setting).
# This needs to be done before building as Code will read this file and embed
# it into the client-side code.
- git checkout product.json # Reset in case the script exited early.
cp product.json product.original.json # Since jq has no inline edit.
jq --slurp '.[0] * .[1]' product.original.json <(
cat << EOF
@@ -105,7 +104,6 @@ EOF
# Reset so if you develop after building you will not be stuck with the wrong
# commit (the dev client will use `oss-dev` but the dev server will still use
# product.json which will have `stable-$commit`).
- git checkout product.json
popd

View File

@ -1,327 +0,0 @@
{ lib
, stdenv
, fetchFromGitHub
, buildGoModule
, makeWrapper
, cacert
, moreutils
, jq
, git
, rsync
, pkg-config
, yarn
, python3
, esbuild
, nodejs
, node-gyp
, libsecret
, xorg
, ripgrep
, AppKit
, Cocoa
, CoreServices
, Security
, cctools
, xcbuild
, quilt
, nixosTests
}:
let
system = stdenv.hostPlatform.system;
python = python3;
yarn' = yarn.override { inherit nodejs; };
defaultYarnOpts = [ ];
esbuild' = esbuild.override {
buildGoModule = args: buildGoModule (args // rec {
version = "0.16.17";
src = fetchFromGitHub {
owner = "evanw";
repo = "esbuild";
rev = "v${version}";
hash = "sha256-8L8h0FaexNsb3Mj6/ohA37nYLFogo5wXkAhGztGUUsQ=";
};
vendorHash = "sha256-+BfxCyg0KkDQpHt/wycy/8CTG6YBA/VJvJFhhzUnSiQ=";
});
};
# replaces esbuild's download script with a binary from nixpkgs
patchEsbuild = path: version: ''
mkdir -p ${path}/node_modules/esbuild/bin
jq "del(.scripts.postinstall)" ${path}/node_modules/esbuild/package.json | sponge ${path}/node_modules/esbuild/package.json
sed -i 's/${version}/${esbuild'.version}/g' ${path}/node_modules/esbuild/lib/main.js
ln -s -f ${esbuild'}/bin/esbuild ${path}/node_modules/esbuild/bin/esbuild
'';
# Comment from @code-asher, the code-server maintainer
# See https://github.com/NixOS/nixpkgs/pull/240001#discussion_r1244303617
#
# If the commit is missing it will break display languages (Japanese, Spanish,
# etc). For some reason VS Code has a hard dependency on the commit being set
# for that functionality.
# The commit is also used in cache busting. Without the commit you could run
# into issues where the browser is loading old versions of assets from the
# cache.
# Lastly, it can be helpful for the commit to be accurate in bug reports
# especially when they are built outside of our CI as sometimes the version
# numbers can be unreliable (since they are arbitrarily provided).
#
# To compute the commit when upgrading this derivation, do:
# `$ git rev-parse <git-rev>` where <git-rev> is the git revision of the `src`
# Example: `$ git rev-parse v4.16.1`
commit = "94ef3776ad7bebfb5780dfc9632e04d20d5c9a6c";
in
stdenv.mkDerivation (finalAttrs: {
pname = "code-server";
version = "4.16.1";
src = fetchFromGitHub {
owner = "coder";
repo = "code-server";
rev = "v${finalAttrs.version}";
fetchSubmodules = true;
hash = "sha256-h4AooHHKV/EfN2S1z7CQKqnYW3uA3sKhSW4senlzjxI=";
};
yarnCache = stdenv.mkDerivation {
name = "${finalAttrs.pname}-${finalAttrs.version}-${system}-yarn-cache";
inherit (finalAttrs) src;
nativeBuildInputs = [ yarn' git cacert ];
buildPhase = ''
runHook preBuild
export HOME=$PWD
export GIT_SSL_CAINFO="${cacert}/etc/ssl/certs/ca-bundle.crt"
yarn --cwd "./vendor" install --modules-folder modules --ignore-scripts --frozen-lockfile
yarn config set yarn-offline-mirror $out
find "$PWD" -name "yarn.lock" -printf "%h\n" | \
xargs -I {} yarn --cwd {} \
--frozen-lockfile --ignore-scripts --ignore-platform \
--ignore-engines --no-progress --non-interactive
find ./lib/vscode -name "yarn.lock" -printf "%h\n" | \
xargs -I {} yarn --cwd {} \
--ignore-scripts --ignore-engines
runHook postBuild
'';
outputHashMode = "recursive";
outputHashAlgo = "sha256";
outputHash = "sha256-vkju+oxEYrEXFAnjz/Mf1g0ZhxBALLAaRuWE0swSWwM=";
};
nativeBuildInputs = [
nodejs
yarn'
python
pkg-config
makeWrapper
git
rsync
jq
moreutils
quilt
];
buildInputs = lib.optionals (!stdenv.isDarwin) [ libsecret ]
++ (with xorg; [ libX11 libxkbfile ])
++ lib.optionals stdenv.isDarwin [
AppKit
Cocoa
CoreServices
Security
cctools
xcbuild
];
patches = [
# Remove all git calls from the VS Code build script except `git rev-parse
# HEAD` which is replaced in postPatch with the commit.
./build-vscode-nogit.patch
];
postPatch = ''
export HOME=$PWD
patchShebangs ./ci
# inject git commit
substituteInPlace ./ci/build/build-vscode.sh \
--replace '$(git rev-parse HEAD)' "${commit}"
substituteInPlace ./ci/build/build-release.sh \
--replace '$(git rev-parse HEAD)' "${commit}"
'';
configurePhase = ''
runHook preConfigure
# run yarn offline by default
echo '--install.offline true' >> .yarnrc
# set default yarn opts
${lib.concatMapStrings (option: ''
yarn --offline config set ${option}
'') defaultYarnOpts}
# set offline mirror to yarn cache we created in previous steps
yarn --offline config set yarn-offline-mirror "${finalAttrs.yarnCache}"
# skip unnecessary electron download
export ELECTRON_SKIP_BINARY_DOWNLOAD=1
# set nodedir to prevent node-gyp from downloading headers
# taken from https://nixos.org/manual/nixpkgs/stable/#javascript-tool-specific
mkdir -p $HOME/.node-gyp/${nodejs.version}
echo 9 > $HOME/.node-gyp/${nodejs.version}/installVersion
ln -sfv ${nodejs}/include $HOME/.node-gyp/${nodejs.version}
export npm_config_nodedir=${nodejs}
# use updated node-gyp. fixes the following error on Darwin:
# PermissionError: [Errno 1] Operation not permitted: '/usr/sbin/pkgutil'
export npm_config_node_gyp=${node-gyp}/lib/node_modules/node-gyp/bin/node-gyp.js
runHook postConfigure
'';
buildPhase = ''
runHook preBuild
# install code-server dependencies
yarn --offline --ignore-scripts
# apply patches
quilt push -a
# patch shebangs of everything to allow binary packages to build
patchShebangs .
export PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1
export SKIP_SUBMODULE_DEPS=1
export NODE_OPTIONS=--openssl-legacy-provider
# rebuild binary packages now that scripts have been patched
echo "----- NPM rebuild"
npm rebuild --prefer-offline
# Replicate ci/dev/postinstall.sh
echo "----- Replicate ci/dev/postinstall.sh"
yarn --cwd "./vendor" install --modules-folder modules --offline --ignore-scripts --frozen-lockfile
# remove all built-in extensions, as these are 3rd party extensions that
# get downloaded from vscode marketplace
jq --slurp '.[0] * .[1]' "./lib/vscode/product.json" <(
cat << EOF
{
"builtInExtensions": []
}
EOF
) | sponge ./lib/vscode/product.json
# disable automatic updates
sed -i '/update.mode/,/\}/{s/default:.*/default: "none",/g}' \
lib/vscode/src/vs/platform/update/common/update.config.contribution.ts
# Patch out remote download of nodejs from build script
patch -p1 -i ${./remove-node-download.patch}
# Fetch packages for vscode
find ./lib/vscode -name "yarn.lock" -printf "%h\n" | \
xargs -I {} yarn --cwd {} \
--frozen-lockfile --ignore-scripts --ignore-engines
# patch shebangs of everything to allow binary packages to build
patchShebangs .
${patchEsbuild "./lib/vscode/build" "0.12.6"}
${patchEsbuild "./lib/vscode/extensions" "0.11.23"}
'' + lib.optionalString stdenv.isDarwin ''
# use prebuilt binary for @parcel/watcher, which requires macOS SDK 10.13+
# (see issue #101229)
pushd ./lib/vscode/remote/node_modules/@parcel/watcher
mkdir -p ./build/Release
mv ./prebuilds/darwin-x64/node.napi.glibc.node ./build/Release/watcher.node
jq "del(.scripts) | .gypfile = false" ./package.json | sponge ./package.json
popd
'' + ''
# put ripgrep binary into bin, so postinstall does not try to download it
find -name ripgrep -type d \
-execdir mkdir -p {}/bin \; \
-execdir ln -s ${ripgrep}/bin/rg {}/bin/rg \;
# run postinstall scripts after patching
find ./lib/vscode \( -path "*/node_modules/*" -or -path "*/extensions/*" \) \
-and -type f -name "yarn.lock" -printf "%h\n" | \
xargs -I {} sh -c 'jq -e ".scripts.postinstall" {}/package.json >/dev/null && yarn --cwd {} postinstall --frozen-lockfile --offline || true'
# build code-server
yarn build
# build vscode
VERSION=${finalAttrs.version} yarn build:vscode
# inject version into package.json
jq --slurp '.[0] * .[1]' ./package.json <(
cat << EOF
{
"version": "${finalAttrs.version}"
}
EOF
) | sponge ./package.json
# create release
yarn release
runHook postBuild
'';
installPhase = ''
runHook preInstall
mkdir -p $out/libexec/code-server $out/bin
# copy release to libexec path
cp -R -T release "$out/libexec/code-server"
# install only production dependencies
yarn --offline --cwd "$out/libexec/code-server" --production
# create wrapper
makeWrapper "${nodejs}/bin/node" "$out/bin/code-server" \
--add-flags "$out/libexec/code-server/out/node/entry.js"
runHook postInstall
'';
passthru = {
prefetchYarnCache = lib.overrideDerivation finalAttrs.yarnCache (d: {
outputHash = lib.fakeSha256;
});
tests = {
inherit (nixosTests) code-server;
};
# vscode-with-extensions compatibility
executableName = "code-server";
longName = "Visual Studio Code Server";
};
meta = {
description = "Run VS Code on a remote server";
longDescription = ''
code-server is VS Code running on a remote server, accessible through the
browser.
'';
homepage = "https://github.com/coder/code-server";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ offline henkery code-asher ];
platforms = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" ];
mainProgram = "code-server";
};
})

View File

@ -1,10 +0,0 @@
--- ./vendor/modules/code-oss-dev/node_modules/playwright/install.js
+++ ./vendor/modules/code-oss-dev/node_modules/playwright/install.js
@@ -14,6 +14,4 @@
* limitations under the License.
*/
-const { installDefaultBrowsersForNpmInstall } = require('playwright-core/lib/utils/registry');
-
-installDefaultBrowsersForNpmInstall();
+process.stdout.write('Browser install disabled by Nix build script\n');

View File

@ -1,28 +0,0 @@
--- ./lib/vscode/build/gulpfile.reh.js
+++ ./lib/vscode/build/gulpfile.reh.js
@@ -268,9 +268,6 @@
.pipe(util.stripSourceMappingURL())
.pipe(jsFilter.restore);
- const nodePath = `.build/node/v${nodeVersion}/${platform}-${arch}`;
- const node = gulp.src(`${nodePath}/**`, { base: nodePath, dot: true });
-
let web = [];
if (type === 'reh-web') {
web = [
@@ -287,7 +284,6 @@
license,
sources,
deps,
- node,
...web
);
@@ -385,7 +381,6 @@
const destinationFolderName = `vscode-${type}${dashed(platform)}${dashed(arch)}`;
const serverTaskCI = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
- gulp.task(`node-${platform}-${arch}`),
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
packageTask(type, platform, arch, sourceFolderName, destinationFolderName)
));

View File

@ -0,0 +1,40 @@
{ lib
, buildNpmPackage
, fetchFromGitHub
, nodejs
, python3
}:
buildNpmPackage rec {
pname = "jitsi-excalidraw-backend";
version = "17";
src = fetchFromGitHub {
owner = "jitsi";
repo = "excalidraw-backend";
rev = "x${version}";
hash = "sha256-aQePkVA8KRL06VewiD0ePRpj88pAItcV7B2SBnRRtCs=";
};
npmDepsHash = "sha256-BJqjaqTeg5i+ECGMuiBYVToK2i2XCOVP9yeDFz6nP4k=";
nativeBuildInputs = [ python3 ];
installPhase = ''
mkdir -p $out/share
cp -r {node_modules,dist} $out/share
'';
postFixup = ''
makeWrapper ${nodejs}/bin/node $out/bin/jitsi-excalidraw-backend \
--add-flags dist/index.js \
--chdir $out/share
'';
meta = with lib; {
description = "Excalidraw collaboration backend for Jitsi";
homepage = "https://github.com/jitsi/excalidraw-backend";
license = licenses.mit;
maintainers = with maintainers; [ camillemndn ];
};
}

View File

@ -2,7 +2,7 @@
{pkgs ? import ../../.. {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}:
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ../../development/node-packages/node-env.nix {

View File

@ -10,9 +10,9 @@
]
},
"calendar": {
"sha256": "1827l5hxsvfnyc2m78rs95cnb2fwi14nl7ricpl25ndh1j0a7ccz",
"url": "https://github.com/nextcloud-releases/calendar/releases/download/v4.5.2/calendar-v4.5.2.tar.gz",
"version": "4.5.2",
"sha256": "1iq8np72pjw2bn4zmbp2rmrh12n4z62rwrz2bb94xjza1fws7b2v",
"url": "https://github.com/nextcloud-releases/calendar/releases/download/v4.5.3/calendar-v4.5.3.tar.gz",
"version": "4.5.3",
"description": "The Calendar app is a user interface for Nextcloud's CalDAV server. Easily sync events from various devices with your Nextcloud and edit them online.\n\n* 🚀 **Integration with other Nextcloud apps!** Currently Contacts - more to come.\n* 🌐 **WebCal Support!** Want to see your favorite teams matchdays in your calendar? No problem!\n* 🙋 **Attendees!** Invite people to your events\n* ⌚️ **Free/Busy!** See when your attendees are available to meet\n* ⏰ **Reminders!** Get alarms for events inside your browser and via email\n* 🔍 Search! Find your events at ease\n* ☑️ Tasks! See tasks with a due date directly in the calendar\n* 🙈 **Were not reinventing the wheel!** Based on the great [c-dav library](https://github.com/nextcloud/cdav-library), [ical.js](https://github.com/mozilla-comm/ical.js) and [fullcalendar](https://github.com/fullcalendar/fullcalendar) libraries.",
"homepage": "https://github.com/nextcloud/calendar/",
"licenses": [
@ -29,6 +29,16 @@
"agpl"
]
},
"cookbook": {
"sha256": "18rzvdqd99nlkk3p0y9y8b17ihw5c4c9wsx8psq6xadspm97002y",
"url": "https://github.com/nextcloud/cookbook/releases/download/v0.10.2/Cookbook-0.10.2.tar.gz",
"version": "0.10.2",
"description": "A library for all your recipes. It uses JSON files following the schema.org recipe format. To add a recipe to the collection, you can paste in the URL of the recipe, and the provided web page will be parsed and downloaded to whichever folder you specify in the app settings.",
"homepage": "",
"licenses": [
"agpl"
]
},
"cospend": {
"sha256": "1rg9k33yapbl8chpxx3bjyzc9h4krjavksbxsvw14kpm01rss3j9",
"url": "https://github.com/julien-nc/cospend-nc/releases/download/v1.5.10/cospend-1.5.10.tar.gz",
@ -40,9 +50,9 @@
]
},
"deck": {
"sha256": "0j228lbf0zrm2sq45f9abgkln1qzgrkw8ac5r6fhyi0qfxcpmm0m",
"url": "https://github.com/nextcloud-releases/deck/releases/download/v1.9.3/deck-v1.9.3.tar.gz",
"version": "1.9.3",
"sha256": "15hir3fssvzyysazbaad8qiz0bv00x73c2wfdj5aviz9h0gp4x4v",
"url": "https://github.com/nextcloud-releases/deck/releases/download/v1.9.4/deck-v1.9.4.tar.gz",
"version": "1.9.4",
"description": "Deck is a kanban style organization tool aimed at personal planning and project organization for teams integrated with Nextcloud.\n\n\n- 📥 Add your tasks to cards and put them in order\n- 📄 Write down additional notes in Markdown\n- 🔖 Assign labels for even better organization\n- 👥 Share with your team, friends or family\n- 📎 Attach files and embed them in your Markdown description\n- 💬 Discuss with your team using comments\n- ⚡ Keep track of changes in the activity stream\n- 🚀 Get your project organized",
"homepage": "https://github.com/nextcloud/deck",
"licenses": [
@ -93,7 +103,7 @@
"sha256": "0imddmyg9s1v3d20spr26g7mbyz2mwl3v2l1a4nz5kaxl3a6fsr2",
"url": "https://github.com/nextcloud-releases/impersonate/releases/download/v1.13.1/impersonate-v1.13.1.tar.gz",
"version": "1.13.1",
"description": "By installing the impersonate app of your Nextcloud you enable administrators to impersonate other users on the Nextcloud server. This is especially useful for debugging issues reported by users.\n\nTo impersonate a user an administrator has to simply follow the following four steps:\n\n1. Login as administrator to Nextcloud.\n2. Open users administration interface.\n3. Select the impersonate button on the affected user.\n4. Confirm the impersonation.\n\nThe administrator is then logged-in as the user, to switch back to the regular user account they simply have to press the logout button.\n\n**Note:**\n\n- This app is not compatible with instances that have encryption enabled.\n- While impersonate actions are logged note that actions performed impersonated will be logged as the impersonated user.\n- Impersonating a user is only possible after their first login.",
"description": "By installing the impersonate app of your Nextcloud you enable administrators to impersonate other users on the Nextcloud server. This is especially useful for debugging issues reported by users.\n\nTo impersonate a user an administrator has to simply follow the following four steps:\n\n1. Login as administrator to Nextcloud.\n2. Open users administration interface.\n3. Select the impersonate button on the affected user.\n4. Confirm the impersonation.\n\nThe administrator is then logged-in as the user, to switch back to the regular user account they simply have to press the logout button.\n\n**Note:**\n\n- This app is not compatible with instances that have encryption enabled.\n- While impersonate actions are logged note that actions performed impersonated will be logged as the impersonated user.\n- Impersonating a user is only possible after their first login.\n- You can limit which users/groups can use impersonation in Administration settings > Additional settings.",
"homepage": "https://github.com/nextcloud/impersonate",
"licenses": [
"agpl"
@ -110,10 +120,10 @@
]
},
"mail": {
"sha256": "0mr3npd48j444lalkph1z8rsz5jllpgdrcqsjdrqgmf70y0g55mh",
"url": "https://github.com/nextcloud-releases/mail/releases/download/v3.4.3/mail-v3.4.3.tar.gz",
"version": "3.4.3",
"description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 Were not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!",
"sha256": "0ascail3vfkv3mm5s4s3ma74d6qxai76kdqxknmljnw56xb19qfv",
"url": "https://github.com/nextcloud-releases/mail/releases/download/v3.4.4/mail-v3.4.4.tar.gz",
"version": "3.4.4",
"description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 Were not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!\n\n## Ethical AI Rating\n\n### Priority Inbox\n\nPositive:\n* The software for training and inferencing of this model is open source.\n* The model is created and trained on-premises based on the user's own data.\n* The training data is accessible to the user, making it possible to check or correct for bias or optimise the performance and CO2 usage.\n\n### Thread Summaries (opt-in)\n\n**Rating:** 🟢/🟡/🟠/🔴\n\nThe rating depends on the installed text processing backend. See [the rating overview](https://docs.nextcloud.com/server/latest/admin_manual/ai/index.html) for details.\n\nLearn more about the Nextcloud Ethical AI Rating [in our blog](https://nextcloud.com/blog/nextcloud-ethical-ai-rating/).",
"homepage": "https://github.com/nextcloud/mail#readme",
"licenses": [
"agpl"
@ -130,15 +140,25 @@
]
},
"memories": {
"sha256": "0i146mlg27phs407clclksn1wjkyl0c5fb2zw0npbx5dvqnpglcd",
"url": "https://github.com/pulsejet/memories/releases/download/v6.0.1/memories.tar.gz",
"version": "6.0.1",
"sha256": "1yn1wkv4jxpc8faf4rl46yfddyplnryrkws3jz0x1wcr9zlxdkng",
"url": "https://github.com/pulsejet/memories/releases/download/v6.1.0/memories.tar.gz",
"version": "6.1.0",
"description": "# Memories: Photo Management for Nextcloud\n\nMemories is a *batteries-included* photo management solution for Nextcloud with advanced features including:\n\n- **📸 Timeline**: Sort photos and videos by date taken, parsed from Exif data.\n- **⏪ Rewind**: Jump to any time in the past instantly and relive your memories.\n- **🤖 AI Tagging**: Group photos by people and objects, powered by [recognize](https://github.com/nextcloud/recognize) and [facerecognition](https://github.com/matiasdelellis/facerecognition).\n- **🖼️ Albums**: Create albums to group photos and videos together. Then share these albums with others.\n- **🫱🏻‍🫲🏻 External Sharing**: Share photos and videos with people outside of your Nextcloud instance.\n- **📱 Mobile Support**: Work from any device, of any shape and size through the web app.\n- **✏️ Edit Metadata**: Edit dates and other metadata on photos quickly and in bulk.\n- **📦 Archive**: Store photos you don't want to see in your timeline in a separate folder.\n- **📹 Video Transcoding**: Transcode videos and use HLS for maximal performance.\n- **🗺️ Map**: View your photos on a map, tagged with accurate reverse geocoding.\n- **📦 Migration**: Migrate easily from Nextcloud Photos and Google Takeout.\n- **⚡️ Performance**: Do all this very fast.\n\n## 🚀 Installation\n\n1. Install the app from the Nextcloud app store (try a demo [here](https://demo.memories.gallery/apps/memories/)).\n1. Perform the recommended [configuration steps](https://memories.gallery/config/).\n1. Run `php occ memories:index` to generate metadata indices for existing photos.\n1. Open the 📷 Memories app in Nextcloud and set the directory containing your photos.",
"homepage": "https://memories.gallery",
"licenses": [
"agpl"
]
},
"music": {
"sha256": "06w82v34csx4scl5n4k4fpdxiivrzjb3yvj3hh4bc15gdz68cis9",
"url": "https://github.com/owncloud/music/releases/download/v1.9.1/music_1.9.1_for_nextcloud.tar.gz",
"version": "1.9.1",
"description": "A stand-alone music player app and a \"lite\" player for the Files app\n\n- On modern browsers, supports audio types .mp3, .ogg, .m4a, .m4b, .flac, .wav, and more\n- Playlist support with import from m3u, m3u8, and pls files\n- Browse by artists, albums, genres, or folders\n- Gapless play\n- Filter the shown content with the search function\n- Play internet radio and podcast channels\n- Setup Last.fm connection to see background information on artists, albums, and songs\n- Control with media control keys on the keyboard or OS\n- The app can handle libraries consisting of thousands of albums and tens of thousands of songs\n- Includes a server backend compatible with the Subsonic and Ampache protocols, allowing playback and browsing of your library on various external apps e.g. on Android or iPhone",
"homepage": "https://github.com/owncloud/music",
"licenses": [
"agpl"
]
},
"news": {
"sha256": "1bkh73h0ibxyjpy3cmvhnlr7isvgqkcxdw2dw15mxksj2cln9wki",
"url": "https://github.com/nextcloud/news/releases/download/24.0.0/news.tar.gz",
@ -160,9 +180,9 @@
]
},
"notify_push": {
"sha256": "0hdxnkar2ibis5p0gp3yr1i6894la9wxq4pzrbqdrq2cgvsj6a18",
"url": "https://github.com/nextcloud-releases/notify_push/releases/download/v0.6.3/notify_push-v0.6.3.tar.gz",
"version": "0.6.3",
"sha256": "0lwyy1pnyfw464vab1v5k8q0rgarrj5w12cf1nsywjaafb8y1ym0",
"url": "https://github.com/nextcloud-releases/notify_push/releases/download/v0.6.5/notify_push-v0.6.5.tar.gz",
"version": "0.6.5",
"description": "Push update support for desktop app.\n\nOnce the app is installed, the push binary needs to be setup. You can either use the setup wizard with `occ notify_push:setup` or see the [README](http://github.com/nextcloud/notify_push) for detailed setup instructions",
"homepage": "",
"licenses": [
@ -180,9 +200,9 @@
]
},
"polls": {
"sha256": "1v5zb164f60qskfiv02l9x2v0d4rayacg5qivd70dawmyqnz4vmd",
"url": "https://github.com/nextcloud/polls/releases/download/v5.3.2/polls.tar.gz",
"version": "5.3.2",
"sha256": "1dmws4ybv3pzxz5g9c7ghblx0nfal2ssmsg7hjcs1n4xcjc6585p",
"url": "https://github.com/nextcloud/polls/releases/download/v5.4.2/polls.tar.gz",
"version": "5.4.2",
"description": "A polls app, similar to Doodle/Dudle with the possibility to restrict access (members, certain groups/users, hidden and public).",
"homepage": "https://github.com/nextcloud/polls",
"licenses": [
@ -240,9 +260,9 @@
]
},
"twofactor_nextcloud_notification": {
"sha256": "03fp8wcwhyp8ivrcf20klgg17sxc9fia5sa44rbrv3cicmwmw50b",
"url": "https://github.com/nextcloud-releases/twofactor_nextcloud_notification/releases/download/v3.7.0/twofactor_nextcloud_notification-v3.7.0.tar.gz",
"version": "3.7.0",
"sha256": "0gaqgzbryim580dxarak7p4g3wd8wp3w6lw9jhl84jh46wrsbrj8",
"url": "https://github.com/nextcloud-releases/twofactor_nextcloud_notification/releases/download/v3.8.0/twofactor_nextcloud_notification-v3.8.0.tar.gz",
"version": "3.8.0",
"description": "Allows using any of your logged in devices as second factor",
"homepage": "https://github.com/nextcloud/twofactor_nextcloud_notification",
"licenses": [
@ -250,9 +270,9 @@
]
},
"twofactor_webauthn": {
"sha256": "1lqcw74rsnl8c4sirw9208ra3c8zl8zp93scs7y8fv2n4n60l465",
"url": "https://github.com/nextcloud-releases/twofactor_webauthn/releases/download/v1.2.0/twofactor_webauthn-v1.2.0.tar.gz",
"version": "1.2.0",
"sha256": "0z6m2chq5kxc8f10g6n1lh51yi10svy2qp5gp0v8xs71apqcc2wx",
"url": "https://github.com/nextcloud-releases/twofactor_webauthn/releases/download/v1.3.0/twofactor_webauthn-v1.3.0.tar.gz",
"version": "1.3.0",
"description": "A two-factor provider for WebAuthn devices",
"homepage": "https://github.com/nextcloud/twofactor_webauthn#readme",
"licenses": [
@ -270,9 +290,9 @@
]
},
"user_saml": {
"sha256": "1gsq5mcn5nnxd56jlp4j2610gqq2gk3ma9yvhgy74wl0sqil98jd",
"url": "https://github.com/nextcloud-releases/user_saml/releases/download/v5.2.2/user_saml-v5.2.2.tar.gz",
"version": "5.2.2",
"sha256": "04ivgqdls72p6x5yrh5p5ycbbfmxhzd0dqin1grblm6cbl7b4s2k",
"url": "https://github.com/nextcloud-releases/user_saml/releases/download/v5.2.4/user_saml-v5.2.4.tar.gz",
"version": "5.2.4",
"description": "Using the SSO & SAML app of your Nextcloud you can make it easily possible to integrate your existing Single-Sign-On solution with Nextcloud. In addition, you can use the Nextcloud LDAP user provider to keep the convenience for users. (e.g. when sharing)\nThe following providers are supported and tested at the moment:\n\n* **SAML 2.0**\n\t* OneLogin\n\t* Shibboleth\n\t* Active Directory Federation Services (ADFS)\n\n* **Authentication via Environment Variable**\n\t* Kerberos (mod_auth_kerb)\n\t* Any other provider that authenticates using the environment variable\n\nWhile theoretically any other authentication provider implementing either one of those standards is compatible, we like to note that they are not part of any internal test matrix.",
"homepage": "https://github.com/nextcloud/user_saml",
"licenses": [

View File

@ -10,9 +10,9 @@
]
},
"calendar": {
"sha256": "1827l5hxsvfnyc2m78rs95cnb2fwi14nl7ricpl25ndh1j0a7ccz",
"url": "https://github.com/nextcloud-releases/calendar/releases/download/v4.5.2/calendar-v4.5.2.tar.gz",
"version": "4.5.2",
"sha256": "1iq8np72pjw2bn4zmbp2rmrh12n4z62rwrz2bb94xjza1fws7b2v",
"url": "https://github.com/nextcloud-releases/calendar/releases/download/v4.5.3/calendar-v4.5.3.tar.gz",
"version": "4.5.3",
"description": "The Calendar app is a user interface for Nextcloud's CalDAV server. Easily sync events from various devices with your Nextcloud and edit them online.\n\n* 🚀 **Integration with other Nextcloud apps!** Currently Contacts - more to come.\n* 🌐 **WebCal Support!** Want to see your favorite teams matchdays in your calendar? No problem!\n* 🙋 **Attendees!** Invite people to your events\n* ⌚️ **Free/Busy!** See when your attendees are available to meet\n* ⏰ **Reminders!** Get alarms for events inside your browser and via email\n* 🔍 Search! Find your events at ease\n* ☑️ Tasks! See tasks with a due date directly in the calendar\n* 🙈 **Were not reinventing the wheel!** Based on the great [c-dav library](https://github.com/nextcloud/cdav-library), [ical.js](https://github.com/mozilla-comm/ical.js) and [fullcalendar](https://github.com/fullcalendar/fullcalendar) libraries.",
"homepage": "https://github.com/nextcloud/calendar/",
"licenses": [
@ -29,6 +29,16 @@
"agpl"
]
},
"cookbook": {
"sha256": "18rzvdqd99nlkk3p0y9y8b17ihw5c4c9wsx8psq6xadspm97002y",
"url": "https://github.com/nextcloud/cookbook/releases/download/v0.10.2/Cookbook-0.10.2.tar.gz",
"version": "0.10.2",
"description": "A library for all your recipes. It uses JSON files following the schema.org recipe format. To add a recipe to the collection, you can paste in the URL of the recipe, and the provided web page will be parsed and downloaded to whichever folder you specify in the app settings.",
"homepage": "",
"licenses": [
"agpl"
]
},
"cospend": {
"sha256": "1rg9k33yapbl8chpxx3bjyzc9h4krjavksbxsvw14kpm01rss3j9",
"url": "https://github.com/julien-nc/cospend-nc/releases/download/v1.5.10/cospend-1.5.10.tar.gz",
@ -40,9 +50,9 @@
]
},
"deck": {
"sha256": "060im5zlj7w6x9d5jpxsziqc8ym6fk573dynvdz231jx360s52g6",
"url": "https://github.com/nextcloud-releases/deck/releases/download/v1.11.0/deck-v1.11.0.tar.gz",
"version": "1.11.0",
"sha256": "18gscc95zay7nrzdm1h5b52r4bpmpzc1h1xb00214qnb59mydiwr",
"url": "https://github.com/nextcloud-releases/deck/releases/download/v1.11.1/deck-v1.11.1.tar.gz",
"version": "1.11.1",
"description": "Deck is a kanban style organization tool aimed at personal planning and project organization for teams integrated with Nextcloud.\n\n\n- 📥 Add your tasks to cards and put them in order\n- 📄 Write down additional notes in Markdown\n- 🔖 Assign labels for even better organization\n- 👥 Share with your team, friends or family\n- 📎 Attach files and embed them in your Markdown description\n- 💬 Discuss with your team using comments\n- ⚡ Keep track of changes in the activity stream\n- 🚀 Get your project organized",
"homepage": "https://github.com/nextcloud/deck",
"licenses": [
@ -93,7 +103,7 @@
"sha256": "1rpqi7yqzhmdqfl2hq326zv91gn246bgqjzcimsiap20f22z169j",
"url": "https://github.com/nextcloud-releases/impersonate/releases/download/v1.14.0/impersonate-v1.14.0.tar.gz",
"version": "1.14.0",
"description": "By installing the impersonate app of your Nextcloud you enable administrators to impersonate other users on the Nextcloud server. This is especially useful for debugging issues reported by users.\n\nTo impersonate a user an administrator has to simply follow the following four steps:\n\n1. Login as administrator to Nextcloud.\n2. Open users administration interface.\n3. Select the impersonate button on the affected user.\n4. Confirm the impersonation.\n\nThe administrator is then logged-in as the user, to switch back to the regular user account they simply have to press the logout button.\n\n**Note:**\n\n- This app is not compatible with instances that have encryption enabled.\n- While impersonate actions are logged note that actions performed impersonated will be logged as the impersonated user.\n- Impersonating a user is only possible after their first login.",
"description": "By installing the impersonate app of your Nextcloud you enable administrators to impersonate other users on the Nextcloud server. This is especially useful for debugging issues reported by users.\n\nTo impersonate a user an administrator has to simply follow the following four steps:\n\n1. Login as administrator to Nextcloud.\n2. Open users administration interface.\n3. Select the impersonate button on the affected user.\n4. Confirm the impersonation.\n\nThe administrator is then logged-in as the user, to switch back to the regular user account they simply have to press the logout button.\n\n**Note:**\n\n- This app is not compatible with instances that have encryption enabled.\n- While impersonate actions are logged note that actions performed impersonated will be logged as the impersonated user.\n- Impersonating a user is only possible after their first login.\n- You can limit which users/groups can use impersonation in Administration settings > Additional settings.",
"homepage": "https://github.com/nextcloud/impersonate",
"licenses": [
"agpl"
@ -110,10 +120,10 @@
]
},
"mail": {
"sha256": "0mr3npd48j444lalkph1z8rsz5jllpgdrcqsjdrqgmf70y0g55mh",
"url": "https://github.com/nextcloud-releases/mail/releases/download/v3.4.3/mail-v3.4.3.tar.gz",
"version": "3.4.3",
"description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 Were not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!",
"sha256": "0ascail3vfkv3mm5s4s3ma74d6qxai76kdqxknmljnw56xb19qfv",
"url": "https://github.com/nextcloud-releases/mail/releases/download/v3.4.4/mail-v3.4.4.tar.gz",
"version": "3.4.4",
"description": "**💌 A mail app for Nextcloud**\n\n- **🚀 Integration with other Nextcloud apps!** Currently Contacts, Calendar & Files more to come.\n- **📥 Multiple mail accounts!** Personal and company account? No problem, and a nice unified inbox. Connect any IMAP account.\n- **🔒 Send & receive encrypted mails!** Using the great [Mailvelope](https://mailvelope.com) browser extension.\n- **🙈 Were not reinventing the wheel!** Based on the great [Horde](https://horde.org) libraries.\n- **📬 Want to host your own mail server?** We do not have to reimplement this as you could set up [Mail-in-a-Box](https://mailinabox.email)!\n\n## Ethical AI Rating\n\n### Priority Inbox\n\nPositive:\n* The software for training and inferencing of this model is open source.\n* The model is created and trained on-premises based on the user's own data.\n* The training data is accessible to the user, making it possible to check or correct for bias or optimise the performance and CO2 usage.\n\n### Thread Summaries (opt-in)\n\n**Rating:** 🟢/🟡/🟠/🔴\n\nThe rating depends on the installed text processing backend. See [the rating overview](https://docs.nextcloud.com/server/latest/admin_manual/ai/index.html) for details.\n\nLearn more about the Nextcloud Ethical AI Rating [in our blog](https://nextcloud.com/blog/nextcloud-ethical-ai-rating/).",
"homepage": "https://github.com/nextcloud/mail#readme",
"licenses": [
"agpl"
@ -130,15 +140,25 @@
]
},
"memories": {
"sha256": "0i146mlg27phs407clclksn1wjkyl0c5fb2zw0npbx5dvqnpglcd",
"url": "https://github.com/pulsejet/memories/releases/download/v6.0.1/memories.tar.gz",
"version": "6.0.1",
"sha256": "1yn1wkv4jxpc8faf4rl46yfddyplnryrkws3jz0x1wcr9zlxdkng",
"url": "https://github.com/pulsejet/memories/releases/download/v6.1.0/memories.tar.gz",
"version": "6.1.0",
"description": "# Memories: Photo Management for Nextcloud\n\nMemories is a *batteries-included* photo management solution for Nextcloud with advanced features including:\n\n- **📸 Timeline**: Sort photos and videos by date taken, parsed from Exif data.\n- **⏪ Rewind**: Jump to any time in the past instantly and relive your memories.\n- **🤖 AI Tagging**: Group photos by people and objects, powered by [recognize](https://github.com/nextcloud/recognize) and [facerecognition](https://github.com/matiasdelellis/facerecognition).\n- **🖼️ Albums**: Create albums to group photos and videos together. Then share these albums with others.\n- **🫱🏻‍🫲🏻 External Sharing**: Share photos and videos with people outside of your Nextcloud instance.\n- **📱 Mobile Support**: Work from any device, of any shape and size through the web app.\n- **✏️ Edit Metadata**: Edit dates and other metadata on photos quickly and in bulk.\n- **📦 Archive**: Store photos you don't want to see in your timeline in a separate folder.\n- **📹 Video Transcoding**: Transcode videos and use HLS for maximal performance.\n- **🗺️ Map**: View your photos on a map, tagged with accurate reverse geocoding.\n- **📦 Migration**: Migrate easily from Nextcloud Photos and Google Takeout.\n- **⚡️ Performance**: Do all this very fast.\n\n## 🚀 Installation\n\n1. Install the app from the Nextcloud app store (try a demo [here](https://demo.memories.gallery/apps/memories/)).\n1. Perform the recommended [configuration steps](https://memories.gallery/config/).\n1. Run `php occ memories:index` to generate metadata indices for existing photos.\n1. Open the 📷 Memories app in Nextcloud and set the directory containing your photos.",
"homepage": "https://memories.gallery",
"licenses": [
"agpl"
]
},
"music": {
"sha256": "06w82v34csx4scl5n4k4fpdxiivrzjb3yvj3hh4bc15gdz68cis9",
"url": "https://github.com/owncloud/music/releases/download/v1.9.1/music_1.9.1_for_nextcloud.tar.gz",
"version": "1.9.1",
"description": "A stand-alone music player app and a \"lite\" player for the Files app\n\n- On modern browsers, supports audio types .mp3, .ogg, .m4a, .m4b, .flac, .wav, and more\n- Playlist support with import from m3u, m3u8, and pls files\n- Browse by artists, albums, genres, or folders\n- Gapless play\n- Filter the shown content with the search function\n- Play internet radio and podcast channels\n- Setup Last.fm connection to see background information on artists, albums, and songs\n- Control with media control keys on the keyboard or OS\n- The app can handle libraries consisting of thousands of albums and tens of thousands of songs\n- Includes a server backend compatible with the Subsonic and Ampache protocols, allowing playback and browsing of your library on various external apps e.g. on Android or iPhone",
"homepage": "https://github.com/owncloud/music",
"licenses": [
"agpl"
]
},
"news": {
"sha256": "1bkh73h0ibxyjpy3cmvhnlr7isvgqkcxdw2dw15mxksj2cln9wki",
"url": "https://github.com/nextcloud/news/releases/download/24.0.0/news.tar.gz",
@ -160,9 +180,9 @@
]
},
"notify_push": {
"sha256": "0hdxnkar2ibis5p0gp3yr1i6894la9wxq4pzrbqdrq2cgvsj6a18",
"url": "https://github.com/nextcloud-releases/notify_push/releases/download/v0.6.3/notify_push-v0.6.3.tar.gz",
"version": "0.6.3",
"sha256": "0lwyy1pnyfw464vab1v5k8q0rgarrj5w12cf1nsywjaafb8y1ym0",
"url": "https://github.com/nextcloud-releases/notify_push/releases/download/v0.6.5/notify_push-v0.6.5.tar.gz",
"version": "0.6.5",
"description": "Push update support for desktop app.\n\nOnce the app is installed, the push binary needs to be setup. You can either use the setup wizard with `occ notify_push:setup` or see the [README](http://github.com/nextcloud/notify_push) for detailed setup instructions",
"homepage": "",
"licenses": [
@ -180,9 +200,9 @@
]
},
"polls": {
"sha256": "1v5zb164f60qskfiv02l9x2v0d4rayacg5qivd70dawmyqnz4vmd",
"url": "https://github.com/nextcloud/polls/releases/download/v5.3.2/polls.tar.gz",
"version": "5.3.2",
"sha256": "1dmws4ybv3pzxz5g9c7ghblx0nfal2ssmsg7hjcs1n4xcjc6585p",
"url": "https://github.com/nextcloud/polls/releases/download/v5.4.2/polls.tar.gz",
"version": "5.4.2",
"description": "A polls app, similar to Doodle/Dudle with the possibility to restrict access (members, certain groups/users, hidden and public).",
"homepage": "https://github.com/nextcloud/polls",
"licenses": [
@ -240,9 +260,9 @@
]
},
"twofactor_nextcloud_notification": {
"sha256": "03fp8wcwhyp8ivrcf20klgg17sxc9fia5sa44rbrv3cicmwmw50b",
"url": "https://github.com/nextcloud-releases/twofactor_nextcloud_notification/releases/download/v3.7.0/twofactor_nextcloud_notification-v3.7.0.tar.gz",
"version": "3.7.0",
"sha256": "0gaqgzbryim580dxarak7p4g3wd8wp3w6lw9jhl84jh46wrsbrj8",
"url": "https://github.com/nextcloud-releases/twofactor_nextcloud_notification/releases/download/v3.8.0/twofactor_nextcloud_notification-v3.8.0.tar.gz",
"version": "3.8.0",
"description": "Allows using any of your logged in devices as second factor",
"homepage": "https://github.com/nextcloud/twofactor_nextcloud_notification",
"licenses": [
@ -250,9 +270,9 @@
]
},
"twofactor_webauthn": {
"sha256": "1lqcw74rsnl8c4sirw9208ra3c8zl8zp93scs7y8fv2n4n60l465",
"url": "https://github.com/nextcloud-releases/twofactor_webauthn/releases/download/v1.2.0/twofactor_webauthn-v1.2.0.tar.gz",
"version": "1.2.0",
"sha256": "0z6m2chq5kxc8f10g6n1lh51yi10svy2qp5gp0v8xs71apqcc2wx",
"url": "https://github.com/nextcloud-releases/twofactor_webauthn/releases/download/v1.3.0/twofactor_webauthn-v1.3.0.tar.gz",
"version": "1.3.0",
"description": "A two-factor provider for WebAuthn devices",
"homepage": "https://github.com/nextcloud/twofactor_webauthn#readme",
"licenses": [
@ -270,9 +290,9 @@
]
},
"user_saml": {
"sha256": "1gsq5mcn5nnxd56jlp4j2610gqq2gk3ma9yvhgy74wl0sqil98jd",
"url": "https://github.com/nextcloud-releases/user_saml/releases/download/v5.2.2/user_saml-v5.2.2.tar.gz",
"version": "5.2.2",
"sha256": "04ivgqdls72p6x5yrh5p5ycbbfmxhzd0dqin1grblm6cbl7b4s2k",
"url": "https://github.com/nextcloud-releases/user_saml/releases/download/v5.2.4/user_saml-v5.2.4.tar.gz",
"version": "5.2.4",
"description": "Using the SSO & SAML app of your Nextcloud you can make it easily possible to integrate your existing Single-Sign-On solution with Nextcloud. In addition, you can use the Nextcloud LDAP user provider to keep the convenience for users. (e.g. when sharing)\nThe following providers are supported and tested at the moment:\n\n* **SAML 2.0**\n\t* OneLogin\n\t* Shibboleth\n\t* Active Directory Federation Services (ADFS)\n\n* **Authentication via Environment Variable**\n\t* Kerberos (mod_auth_kerb)\n\t* Any other provider that authenticates using the environment variable\n\nWhile theoretically any other authentication provider implementing either one of those standards is compatible, we like to note that they are not part of any internal test matrix.",
"homepage": "https://github.com/nextcloud/user_saml",
"licenses": [

View File

@ -2,6 +2,7 @@
"bookmarks": "agpl3Plus"
, "calendar": "agpl3Plus"
, "contacts": "agpl3Plus"
, "cookbook": "agpl3Plus"
, "cospend": "agpl3Plus"
, "deck": "agpl3Plus"
, "files_texteditor": "agpl3Plus"
@ -13,6 +14,7 @@
, "mail": "agpl3Plus"
, "maps": "agpl3Plus"
, "memories": "agpl3Plus"
, "music": "agpl3Plus"
, "news": "agpl3Plus"
, "notes": "agpl3Plus"
, "notify_push": "agpl3Plus"

View File

@ -5,13 +5,18 @@
, lib
, makeWrapper
, monkeysAudio
, perlPackages
, nixosTests
, perl538Packages
, sox
, stdenv
, wavpack
, zlib
, enableUnfreeFirmware ? false
}:
let
perlPackages = perl538Packages;
in
perlPackages.buildPerlPackage rec {
pname = "slimserver";
version = "8.3.1";
@ -25,10 +30,99 @@ perlPackages.buildPerlPackage rec {
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ perlPackages.CryptOpenSSLRSA perlPackages.IOSocketSSL ];
buildInputs = with perlPackages; [
AnyEvent
ArchiveZip
AsyncUtil
AudioScan
CarpClan
CGI
ClassAccessor
ClassAccessorChained
ClassC3
# ClassC3Componentised # Error: DBIx::Class::Row::throw_exception(): DBIx::Class::Relationship::BelongsTo::belongs_to(): Can't infer join condition for track
ClassDataInheritable
ClassInspector
ClassISA
ClassMember
ClassSingleton
ClassVirtual
ClassXSAccessor
CompressRawZlib
CryptOpenSSLRSA
DataDump
DataPage
DataURIEncode
DBDSQLite
DBI
# DBIxClass # https://github.com/Logitech/slimserver/issues/138
DigestSHA1
EncodeDetect
EV
ExporterLite
FileBOM
FileCopyRecursive
FileNext
FileReadBackwards
FileSlurp
FileWhich
HTMLParser
HTTPCookies
HTTPDaemon
HTTPMessage
ImageScale
IOAIO
IOInterface
IOSocketSSL
IOString
JSONXS
JSONXSVersionOneAndTwo
# LogLog4perl # Internal error: Root Logger not initialized.
LWP
LWPProtocolHttps
MP3CutGapless
NetHTTP
NetHTTPSNB
PathClass
ProcBackground
# SQLAbstract # DBI Exception: DBD::SQLite::db prepare_cached failed: no such function: ARRAY
SQLAbstractLimit
SubName
TemplateToolkit
TextUnidecode
TieCacheLRU
TieCacheLRUExpires
TieRegexpHash
TimeDate
URI
URIFind
UUIDTiny
XMLParser
XMLSimple
YAMLLibYAML
]
# ++ (lib.optional stdenv.isDarwin perlPackages.MacFSEvents)
++ (lib.optional stdenv.isLinux perlPackages.LinuxInotify2);
prePatch = ''
# remove vendored binaries
rm -rf Bin
# remove most vendored modules, keeping necessary ones
mkdir -p CPAN_used/Class/C3/ CPAN_used/SQL
rm -r CPAN/SQL/Abstract/Limit.pm
cp -rv CPAN/Class/C3/Componentised.pm CPAN_used/Class/C3/
cp -rv CPAN/DBIx CPAN_used/
cp -rv CPAN/Log CPAN_used/
cp -rv CPAN/SQL/* CPAN_used/SQL/
rm -r CPAN
mv CPAN_used CPAN
${lib.optionalString (!enableUnfreeFirmware) ''
# remove unfree firmware
rm -rf Firmware
''}
touch Makefile.PL
'';
@ -38,18 +132,26 @@ perlPackages.buildPerlPackage rec {
cp -r . $out
wrapProgram $out/slimserver.pl \
--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ zlib stdenv.cc.cc.lib ]}" \
--prefix PATH : "${lib.makeBinPath [ lame flac faad2 sox monkeysAudio wavpack ]}"
--prefix PATH : "${lib.makeBinPath ([ lame flac faad2 sox wavpack ] ++ (lib.optional stdenv.isLinux monkeysAudio))}"
mkdir $out/bin
ln -s $out/slimserver.pl $out/bin/slimserver
'';
outputs = [ "out" ];
passthru.tests = {
inherit (nixosTests) slimserver;
};
meta = with lib; {
homepage = "https://github.com/Logitech/slimserver";
description = "Server for Logitech Squeezebox players. This server is also called Logitech Media Server";
# the firmware is not under a free license!
# the firmware is not under a free license, but not included in the default package
# https://github.com/Logitech/slimserver/blob/public/8.3/License.txt
license = licenses.unfree;
license = if enableUnfreeFirmware then licenses.unfree else licenses.gpl2Only;
mainProgram = "slimserver";
maintainers = with maintainers; [ adamcstephens jecaro ];
platforms = platforms.unix;
broken = stdenv.isDarwin;
};
}

View File

@ -67,7 +67,6 @@ with pkgs;
# libcxx does not build for some reason on aarch64-linux
(filterAttrs (n: _: n != "llvmPackages_7"))
] ++ lib.optionals (stdenv.hostPlatform.isDarwin && stdenv.hostPlatform.isAarch64) [
(filterAttrs (n: _: n != "llvmPackages_5"))
(filterAttrs (n: _: n != "llvmPackages_6"))
(filterAttrs (n: _: n != "llvmPackages_7"))
(filterAttrs (n: _: n != "llvmPackages_8"))

View File

@ -18,20 +18,19 @@
, rustc
, rustPlatform
, makeWrapper
, writeScript
, fuseSupport ? false
}:
let
version = "1.3.3";
in
stdenv.mkDerivation {
stdenv.mkDerivation (finalAttrs: {
pname = "bcachefs-tools";
inherit version;
version = "1.3.3";
src = fetchFromGitHub {
owner = "koverstreet";
repo = "bcachefs-tools";
rev = "v${version}";
rev = "v${finalAttrs.version}";
hash = "sha256-73vgwgBqyRLQ/Tts7bl6DhZMOs8ndIOiCke5tN89Wps=";
};
@ -71,7 +70,7 @@ stdenv.mkDerivation {
makeFlags = [
"PREFIX=${placeholder "out"}"
"VERSION=${version}"
"VERSION=${finalAttrs.version}"
"INITRAMFS_DIR=${placeholder "out"}/etc/initramfs-tools"
];
@ -79,9 +78,24 @@ stdenv.mkDerivation {
rm tests/test_fuse.py
'';
passthru.tests = {
smoke-test = nixosTests.bcachefs;
inherit (nixosTests.installer) bcachefsSimple bcachefsEncrypted bcachefsMulti;
passthru = {
tests = {
smoke-test = nixosTests.bcachefs;
inherit (nixosTests.installer) bcachefsSimple bcachefsEncrypted bcachefsMulti;
};
updateScript = writeScript "update-bcachefs-tools-and-cargo-lock.sh" ''
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p curl jq common-updater-scripts
res="$(curl ''${GITHUB_TOKEN:+-u ":$GITHUB_TOKEN"} \
-sL "https://api.github.com/repos/${finalAttrs.src.owner}/${finalAttrs.src.repo}/tags?per_page=1")"
version="$(echo $res | jq '.[0].name | split("v") | .[1]' --raw-output)"
update-source-version ${finalAttrs.pname} "$version" --ignore-same-hash
curl "https://raw.githubusercontent.com/${finalAttrs.src.owner}/${finalAttrs.src.repo}/v$version/rust-src/Cargo.lock" > \
"$(git rev-parse --show-toplevel)/pkgs/tools/filesystems/bcachefs-tools/Cargo.lock"
'';
};
enableParallelBuilding = true;
@ -93,4 +107,4 @@ stdenv.mkDerivation {
maintainers = with maintainers; [ davidak Madouura ];
platforms = platforms.linux;
};
}
})

View File

@ -152,6 +152,7 @@ mapAliases ({
clasp = clingo; # added 2022-12-22
claws-mail-gtk3 = claws-mail; # Added 2021-07-10
cntk = throw "'cntk' has been removed from nixpkgs, as it was broken and unmaintained"; # Added 2023-10-09
code-server = throw "'code-server' has been removed from nixpkgs, as it was depending on EOL Node.js and is unmaintained."; # Added 2023-10-30
codimd = hedgedoc; # Added 2020-11-29
inherit (libsForQt5.mauiPackages) communicator; # added 2022-05-17
compton = throw "'compton' has been renamed to/replaced by 'picom'"; # Converted to throw 2023-09-10
@ -161,7 +162,6 @@ mapAliases ({
cvs_fast_export = cvs-fast-export; # Added 2021-06-10
# these are for convenience, not for backward compat and shouldn't expire
clang5Stdenv = lowPrio llvmPackages_5.stdenv;
clang6Stdenv = lowPrio llvmPackages_6.stdenv;
clang7Stdenv = lowPrio llvmPackages_7.stdenv;
clang8Stdenv = lowPrio llvmPackages_8.stdenv;
@ -428,6 +428,8 @@ mapAliases ({
keysmith = libsForQt5.kdeGear.keysmith; # Added 2021-07-14
kfctl = throw "kfctl is broken and has been archived by upstream" ; # Added 2023-08-21
kgx = gnome-console; # Added 2022-02-19
kibana7 = throw "Kibana 7.x has been removed from nixpkgs as it depends on an end of life Node.js version and received no maintenance in time."; # Added 2023-30-10
kibana = kibana7;
kicad-with-packages3d = throw "'kicad-with-packages3d' has been renamed to/replaced by 'kicad'"; # Converted to throw 2023-09-10
kio-admin = libsForQt5.kdeGear.kio-admin; # Added 2023-03-18
kodiGBM = kodi-gbm;
@ -623,10 +625,14 @@ mapAliases ({
nixopsUnstable = nixops_unstable; # Added 2022-03-03
nixosTest = testers.nixosTest; # Added 2022-05-05
nmap-unfree = nmap; # Added 2021-04-06
nodejs_14 = throw "nodejs_14 has been removed as it is EOL."; # Added 2023-10-30
nodejs-slim_14 = throw "nodejs-slim_14 has been removed as it is EOL."; # Added 2023-10-30
nodejs-14_x = nodejs_14; # Added 2022-11-06
nodejs-slim-14_x = nodejs-slim_14; # Added 2022-11-06
nodejs_16 = throw "nodejs_16 has been removed as it is EOL."; # Added 2023-10-30
nodejs-16_x = nodejs_16; # Added 2022-11-06
nodejs-16_x-openssl_1_1 = throw "nodejs-16_x-openssl_1_1 has been removed."; # Added 2023-02-04
nodejs-slim_16 = throw "nodejs-slim_16 has been removed as it is EOL."; # Added 2022-11-06
nodejs-slim-16_x = nodejs-slim_16; # Added 2022-11-06
nodejs-18_x = nodejs_18; # Added 2022-11-06
nodejs-slim-18_x = nodejs-slim_18; # Added 2022-11-06

View File

@ -9830,9 +9830,6 @@ with pkgs;
kluctl = callPackage ../applications/networking/cluster/kluctl { };
kibana7 = callPackage ../development/tools/misc/kibana/7.x.nix { };
kibana = kibana7;
kibi = callPackage ../applications/editors/kibi { };
kio-fuse = libsForQt5.callPackage ../tools/filesystems/kio-fuse { };
@ -10311,15 +10308,6 @@ with pkgs;
nodejs-slim = nodejs-slim_18;
corepack = hiPrio corepack_18;
nodejs_14 = callPackage ../development/web/nodejs/v14.nix { openssl = openssl_1_1; };
nodejs-slim_14 = callPackage ../development/web/nodejs/v14.nix {
openssl = openssl_1_1;
enableNpm = false;
};
nodejs_16 = callPackage ../development/web/nodejs/v16.nix { };
nodejs-slim_16 = callPackage ../development/web/nodejs/v16.nix { enableNpm = false; };
nodejs_18 = callPackage ../development/web/nodejs/v18.nix { };
nodejs-slim_18 = callPackage ../development/web/nodejs/v18.nix { enableNpm = false; };
corepack_18 = hiPrio (callPackage ../development/web/nodejs/corepack.nix { nodejs = nodejs_18; });
@ -15646,7 +15634,6 @@ with pkgs;
};
clang = llvmPackages.clang;
clang_5 = llvmPackages_5.clang;
clang_6 = llvmPackages_6.clang;
clang_7 = llvmPackages_7.clang;
clang_8 = llvmPackages_8.clang;
@ -15663,10 +15650,6 @@ with pkgs;
llvmPackages = llvmPackages_14;
};
clang-tools_5 = callPackage ../development/tools/clang-tools {
llvmPackages = llvmPackages_5;
};
clang-tools_6 = callPackage ../development/tools/clang-tools {
llvmPackages = llvmPackages_6;
};
@ -16599,7 +16582,6 @@ with pkgs;
};
lld = llvmPackages.lld;
lld_5 = llvmPackages_5.lld;
lld_6 = llvmPackages_6.lld;
lld_7 = llvmPackages_7.lld;
lld_8 = llvmPackages_8.lld;
@ -16613,7 +16595,6 @@ with pkgs;
lld_16 = llvmPackages_16.lld;
lldb = lldb_14;
lldb_5 = llvmPackages_5.lldb;
lldb_6 = llvmPackages_6.lldb;
lldb_7 = llvmPackages_7.lldb;
lldb_8 = llvmPackages_8.lldb;
@ -16627,7 +16608,6 @@ with pkgs;
lldb_16 = llvmPackages_16.lldb;
llvm = llvmPackages.llvm;
llvm_5 = llvmPackages_5.llvm;
llvm_6 = llvmPackages_6.llvm;
llvm_7 = llvmPackages_7.llvm;
llvm_8 = llvmPackages_8.llvm;
@ -16660,13 +16640,6 @@ with pkgs;
stdenv.targetPlatform));
in pkgs.${"llvmPackages_${minSupported}"};
llvmPackages_5 = recurseIntoAttrs (callPackage ../development/compilers/llvm/5 {
inherit (stdenvAdapters) overrideCC;
buildLlvmTools = buildPackages.llvmPackages_5.tools;
targetLlvm = targetPackages.llvmPackages_5.llvm or llvmPackages_5.llvm;
targetLlvmLibraries = targetPackages.llvmPackages_5.libraries or llvmPackages_5.libraries;
});
llvmPackages_6 = recurseIntoAttrs (callPackage ../development/compilers/llvm/6 {
inherit (stdenvAdapters) overrideCC;
buildLlvmTools = buildPackages.llvmPackages_6.tools;
@ -17760,7 +17733,7 @@ with pkgs;
### End of CuboCore
maude = callPackage ../development/interpreters/maude {
stdenv = if stdenv.cc.isClang then llvmPackages_5.stdenv else stdenv;
stdenv = if stdenv.cc.isClang then llvmPackages_7.stdenv else stdenv;
};
me_cleaner = callPackage ../tools/misc/me_cleaner { };
@ -26565,6 +26538,8 @@ with pkgs;
jicofo = callPackage ../servers/jicofo { };
jitsi-excalidraw = callPackage ../servers/jitsi-excalidraw { };
jitsi-meet = callPackage ../servers/web-apps/jitsi-meet { };
jitsi-meet-prosody = callPackage ../misc/jitsi-meet-prosody { };
@ -36355,13 +36330,6 @@ with pkgs;
inherit (nodePackages) node-gyp;
};
code-server = callPackage ../servers/code-server {
nodejs = nodejs_16;
inherit (darwin.apple_sdk.frameworks) AppKit Cocoa CoreServices Security;
inherit (darwin) cctools;
inherit (nodePackages) node-gyp;
};
vue = callPackage ../applications/misc/vue { };
vuze = callPackage ../applications/networking/p2p/vuze {
@ -37691,6 +37659,9 @@ with pkgs;
deliantra-server = callPackage ../games/deliantra/server.nix {
stdenv = gcc10StdenvCompat;
# perl538 defines 'struct object' in sv.h. many conflicts result
perl = perl536;
perlPackages = perl536Packages;
};
deliantra-arch = callPackage ../games/deliantra/arch.nix {
stdenv = gcc10StdenvCompat;
@ -40026,7 +39997,7 @@ with pkgs;
root5 = lowPrio (callPackage ../applications/science/misc/root/5.nix {
inherit (darwin.apple_sdk.frameworks) Cocoa OpenGL;
stdenv = if stdenv.cc.isClang then llvmPackages_5.stdenv else stdenv;
stdenv = if stdenv.cc.isClang then llvmPackages_7.stdenv else stdenv;
});
rinetd = callPackage ../servers/rinetd { };

View File

@ -1169,6 +1169,20 @@ with self; {
};
};
AsyncUtil = buildPerlPackage {
pname = "Async-Util";
version = "0.01";
src = fetchurl {
url = "mirror://cpan/authors/id/W/WH/WHITNEY/Async-Util-0.01.tar.gz";
hash = "sha256-jzKxHKvFD2Xjh79W8mWBV6IsNah5Nmbhtfis/hMQkQY=";
};
buildInputs = [ AnyEvent ListMoreUtils ];
meta = {
description = "Utilities for doing common async operations";
license = with lib.licenses; [ artistic1 gpl1Plus ];
};
};
ArchiveCpio = buildPerlPackage {
pname = "Archive-Cpio";
version = "0.10";
@ -1253,6 +1267,18 @@ with self; {
};
};
AudioCuefileParser = buildPerlPackage {
pname = "Audio-Cuefile-Parser";
version = "0.02";
src = fetchurl {
url = "mirror://cpan/authors/id/M/MA/MATTK/Audio-Cuefile-Parser-0.02.tar.gz";
hash = "sha256-ulbQcMhz2WxoatmoH99P6JuETkPrSd/gAL+c70PFtmk=";
};
meta = {
license = with lib.licenses; [ artistic1 gpl1Plus ];
};
};
AudioFLACHeader = buildPerlPackage {
pname = "Audio-FLAC-Header";
version = "2.4";
@ -3602,6 +3628,19 @@ with self; {
};
};
ClassMember = buildPerlPackage {
pname = "Class-Member";
version = "1.6";
src = fetchurl {
url = "mirror://cpan/authors/id/O/OP/OPI/Class-Member-1.6.tar.gz";
hash = "sha256-p1KK8in6OhIF3NJakd59dKxvp9lSgbmTtV6Lb0+HuZE=";
};
meta = {
description = "A set of modules to make the module developement easier";
license = with lib.licenses; [ artistic1 gpl1Plus ];
};
};
ClassMethodMaker = buildPerlPackage {
pname = "Class-MethodMaker";
version = "2.24";
@ -12712,6 +12751,19 @@ with self; {
};
};
IOInterface = buildPerlModule {
pname = "IO-Interface";
version = "1.09";
src = fetchurl {
url = "mirror://cpan/authors/id/L/LD/LDS/IO-Interface-1.09.tar.gz";
hash = "sha256-5j6BxS6x4OYOwtmD9VUtJJPhFxeZJclnV/I8S9n6cTo=";
};
meta = {
description = "Access and modify network interface card configuration";
license = with lib.licenses; [ artistic1 gpl1Plus ];
};
};
IOInteractive = buildPerlPackage {
pname = "IO-Interactive";
version = "1.025";
@ -17933,6 +17985,20 @@ with self; {
};
};
MP3CutGapless = buildPerlPackage {
pname = "MP3-Cut-Gapless";
version = "0.03";
src = fetchurl {
url = "mirror://cpan/authors/id/A/AG/AGRUNDMA/MP3-Cut-Gapless-0.03.tar.gz";
hash = "sha256-PoS3OdHx4902FvhR3GV14WXTKEZ/AySGB5UOWVH+pPM=";
};
propagatedBuildInputs = [ AudioCuefileParser ];
meta = {
description = "Split an MP3 file without gaps (based on pcutmp3)";
license = with lib.licenses; [ artistic1 ];
};
};
MP3Info = buildPerlPackage {
pname = "MP3-Info";
version = "1.26";

View File

@ -143,10 +143,12 @@ let
jobs.tests.cc-wrapper.llvmPackages.clang.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages.libcxx.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_5.clang.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_5.libcxx.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_6.clang.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_6.libcxx.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_7.clang.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_7.libcxx.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_7.clang.x86_64-linux
jobs.tests.cc-wrapper.llvmPackages_7.libcxx.x86_64-linux
jobs.tests.cc-multilib-gcc.x86_64-linux
jobs.tests.cc-multilib-clang.x86_64-linux
jobs.tests.stdenv-inputs.x86_64-linux