Merge master into haskell-updates

This commit is contained in:
github-actions[bot] 2022-11-22 00:15:18 +00:00 committed by GitHub
commit b9993d09a1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
252 changed files with 6993 additions and 4808 deletions

View file

@ -34,6 +34,10 @@ jobs:
pairs:
- from: master
into: haskell-updates
- from: release-22.11
into: staging-next-22.11
- from: staging-next-22.11
into: staging-22.11
- from: release-22.05
into: staging-next-22.05
- from: staging-next-22.05

View file

@ -1 +1 @@
22.11
23.05

View file

@ -100,10 +100,10 @@ stdenv.mkDerivation {
name = "hello";
src = fetchgit {
url = "https://...";
sparseCheckout = ''
directory/to/be/included
another/directory
'';
sparseCheckout = [
"directory/to/be/included"
"another/directory"
];
sha256 = "0000000000000000000000000000000000000000000000000000";
};
}

View file

@ -35,6 +35,70 @@ passthru.tests.version = testers.testVersion {
};
```
## `testBuildFailure` {#tester-testBuildFailure}
Make sure that a build does not succeed. This is useful for testing testers.
This returns a derivation with an override on the builder, with the following effects:
- Fail the build when the original builder succeeds
- Move `$out` to `$out/result`, if it exists (assuming `out` is the default output)
- Save the build log to `$out/testBuildFailure.log` (same)
Example:
```nix
runCommand "example" {
failed = testers.testBuildFailure (runCommand "fail" {} ''
echo ok-ish >$out
echo failing though
exit 3
'');
} ''
grep -F 'ok-ish' $failed/result
grep -F 'failing though' $failed/testBuildFailure.log
[[ 3 = $(cat $failed/testBuildFailure.exit) ]]
touch $out
'';
```
While `testBuildFailure` is designed to keep changes to the original builder's
environment to a minimum, some small changes are inevitable.
- The file `$TMPDIR/testBuildFailure.log` is present. It should not be deleted.
- `stdout` and `stderr` are a pipe instead of a tty. This could be improved.
- One or two extra processes are present in the sandbox during the original
builder's execution.
- The derivation and output hashes are different, but not unusual.
- The derivation includes a dependency on `buildPackages.bash` and
`expect-failure.sh`, which is built to include a transitive dependency on
`buildPackages.coreutils` and possibly more. These are not added to `PATH`
or any other environment variable, so they should be hard to observe.
## `testEqualContents` {#tester-equalContents}
Check that two paths have the same contents.
Example:
```nix
testers.testEqualContents {
assertion = "sed -e performs replacement";
expected = writeText "expected" ''
foo baz baz
'';
actual = runCommand "actual" {
# not really necessary for a package that's in stdenv
nativeBuildInputs = [ gnused ];
base = writeText "base" ''
foo bar baz
'';
} ''
sed -e 's/bar/baz/g' $base >$out
'';
}
```
## `testEqualDerivation` {#tester-testEqualDerivation}
Checks that two packages produce the exact same build instructions.

View file

@ -196,7 +196,7 @@ buildNpmPackage rec {
* `makeCacheWritable`: Whether to make the cache writable prior to installing dependencies. Don't set this unless npm tries to write to the cache directory, as it can slow down the build.
* `npmBuildScript`: The script to run to build the project. Defaults to `"build"`.
* `npmFlags`: Flags to pass to all npm commands.
* `npmInstallFlags`: Flags to pass to `npm ci`.
* `npmInstallFlags`: Flags to pass to `npm ci` and `npm prune`.
* `npmBuildFlags`: Flags to pass to `npm run ${npmBuildScript}`.
* `npmPackFlags`: Flags to pass to `npm pack`.

View file

@ -15,7 +15,7 @@ For other versions such as daily builds (beta and nightly),
use either `rustup` from nixpkgs (which will manage the rust installation in your home directory),
or use a community maintained [Rust overlay](#using-community-rust-overlays).
## Compiling Rust applications with Cargo {#compiling-rust-applications-with-cargo}
## `buildRustPackage`: Compiling Rust applications with Cargo {#compiling-rust-applications-with-cargo}
Rust applications are packaged by using the `buildRustPackage` helper from `rustPlatform`:
@ -608,7 +608,7 @@ buildPythonPackage rec {
}
```
## Compiling Rust crates using Nix instead of Cargo {#compiling-rust-crates-using-nix-instead-of-cargo}
## `buildRustCrate`: Compiling Rust crates using Nix instead of Cargo {#compiling-rust-crates-using-nix-instead-of-cargo}
### Simple operation {#simple-operation}

View file

@ -3,7 +3,7 @@
let
inherit (builtins) head tail length;
inherit (lib.trivial) id;
inherit (lib.trivial) flip id mergeAttrs pipe;
inherit (lib.strings) concatStringsSep concatMapStringsSep escapeNixIdentifier sanitizeDerivationName;
inherit (lib.lists) foldr foldl' concatMap concatLists elemAt all partition groupBy take foldl;
in
@ -77,6 +77,25 @@ rec {
let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'";
in attrByPath attrPath (abort errorMsg);
/* Map each attribute in the given set and merge them into a new attribute set.
Type:
concatMapAttrs ::
(String -> a -> AttrSet)
-> AttrSet
-> AttrSet
Example:
concatMapAttrs
(name: value: {
${name} = value;
${name + value} = value;
})
{ x = "a"; y = "b"; }
=> { x = "a"; xa = "a"; y = "b"; yb = "b"; }
*/
concatMapAttrs = f: flip pipe [ (mapAttrs f) attrValues (foldl' mergeAttrs { }) ];
/* Update or set specific paths of an attribute set.
@ -606,7 +625,7 @@ rec {
getMan = getOutput "man";
/* Pick the outputs of packages to place in buildInputs */
chooseDevOutputs = drvs: builtins.map getDev drvs;
chooseDevOutputs = builtins.map getDev;
/* Make various Nix tools consider the contents of the resulting
attribute set when looking for what to build, find, etc.

View file

@ -78,7 +78,7 @@ let
inherit (self.attrsets) attrByPath hasAttrByPath setAttrByPath
getAttrFromPath attrVals attrValues getAttrs catAttrs filterAttrs
filterAttrsRecursive foldAttrs collect nameValuePair mapAttrs
mapAttrs' mapAttrsToList mapAttrsRecursive mapAttrsRecursiveCond
mapAttrs' mapAttrsToList concatMapAttrs mapAttrsRecursive mapAttrsRecursiveCond
genAttrs isDerivation toDerivation optionalAttrs
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
recursiveUpdate matchAttrs overrideExisting showAttrPath getOutput getBin

View file

@ -557,7 +557,7 @@ rec {
else if platform.isRiscV then riscv-multiplatform
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then fuloong2f_n32
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then (import ./examples.nix { inherit lib; }).mipsel-linux-gnu
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then powernv

View file

@ -478,6 +478,23 @@ runTests {
# ATTRSETS
testConcatMapAttrs = {
expr = concatMapAttrs
(name: value: {
${name} = value;
${name + value} = value;
})
{
foo = "bar";
foobar = "baz";
};
expected = {
foo = "bar";
foobar = "baz";
foobarbaz = "baz";
};
};
# code from the example
testRecursiveUpdateUntil = {
expr = recursiveUpdateUntil (path: l: r: path == ["foo"]) {

View file

@ -195,7 +195,7 @@ rec {
On each release the first letter is bumped and a new animal is chosen
starting with that new letter.
*/
codeName = "Raccoon";
codeName = "Stoat";
/* Returns the current nixpkgs version suffix as string. */
versionSuffix =

View file

@ -1285,6 +1285,15 @@
fingerprint = "DD52 6BC7 767D BA28 16C0 95E5 6840 89CE 67EB B691";
}];
};
ataraxiasjel = {
email = "nix@ataraxiadev.com";
github = "AtaraxiaSjel";
githubId = 5314145;
name = "Dmitriy";
keys = [{
fingerprint = "922D A6E7 58A0 FE4C FAB4 E4B2 FD26 6B81 0DF4 8DF2";
}];
};
atemu = {
name = "Atemu";
email = "atemu.main+nixpkgs@gmail.com";
@ -4820,6 +4829,12 @@
githubId = 868283;
name = "Fatih Altinok";
};
fstamour = {
email = "fr.st-amour@gmail.com";
github = "fstamour";
githubId = 2881922;
name = "Francis St-Amour";
};
ftrvxmtrx = {
email = "ftrvxmtrx@gmail.com";
github = "ftrvxmtrx";
@ -4949,6 +4964,13 @@
githubId = 37017396;
name = "gbtb";
};
gdamjan = {
email = "gdamjan@gmail.com";
matrix = "@gdamjan:spodeli.org";
github = "gdamjan";
githubId = 81654;
name = "Damjan Georgievski";
};
gdinh = {
email = "nix@contact.dinh.ai";
github = "gdinh";
@ -9419,12 +9441,6 @@
githubId = 2072185;
name = "Marc Scholten";
};
mpsyco = {
email = "fr.st-amour@gmail.com";
github = "fstamour";
githubId = 2881922;
name = "Francis St-Amour";
};
mtrsk = {
email = "marcos.schonfinkel@protonmail.com";
github = "mtrsk";
@ -14271,6 +14287,12 @@
githubId = 32751441;
name = "urlordjames";
};
ursi = {
email = "masondeanm@aol.com";
github = "ursi";
githubId = 17836748;
name = "Mason Mackaman";
};
uskudnik = {
email = "urban.skudnik@gmail.com";
github = "uskudnik";
@ -15893,4 +15915,10 @@
github = "wuyoli";
githubId = 104238274;
};
jordanisaacs = {
name = "Jordan Isaacs";
email = "nix@jdisaacs.com";
github = "jordanisaacs";
githubId = 19742638;
};
}

View file

@ -278,6 +278,16 @@
<link linkend="opt-services.prometheus.sachet.enable">services.prometheus.sachet</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://evcc.io">EVCC</link> is an EV charge
controller with PV integration. It supports a multitude of
chargers, meters, vehicle APIs and more and ties that together
with a well-tested backend and a lightweight web frontend.
Available as
<link linkend="opt-services.evcc.enable">services.evcc</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://github.com/leetronics/infnoise">infnoise</link>,
@ -580,6 +590,15 @@
future Git update without notice.
</para>
</listitem>
<listitem>
<para>
The <literal>fetchgit</literal> fetcher supports sparse
checkouts via the <literal>sparseCheckout</literal> option.
This used to accept a multi-line string with
directories/patterns to check out, but now requires a list of
strings.
</para>
</listitem>
<listitem>
<para>
<literal>openssh</literal> was updated to version 9.1,
@ -1392,6 +1411,26 @@ services.github-runner.serviceOverrides.SupplementaryGroups = [
if you intend to add packages to <literal>/bin</literal>.
</para>
</listitem>
<listitem>
<para>
The <literal>proxmox.qemuConf.bios</literal> option was added,
it corresponds to <literal>Hardware-&gt;BIOS</literal> field
in Proxmox web interface. Use
<literal>&quot;ovmf&quot;</literal> value to build UEFI image,
default value remains <literal>&quot;bios&quot;</literal>. New
option <literal>proxmox.partitionTableType</literal> defaults
to either <literal>&quot;legacy&quot;</literal> or
<literal>&quot;efi&quot;</literal>, depending on the
<literal>bios</literal> value. Setting
<literal>partitionTableType</literal> to
<literal>&quot;hybrid&quot;</literal> results in an image,
which supports both methods
(<literal>&quot;bios&quot;</literal> and
<literal>&quot;ovmf&quot;</literal>), thereby remaining
bootable after change to Proxmox
<literal>Hardware-&gt;BIOS</literal> field.
</para>
</listitem>
<listitem>
<para>
memtest86+ was updated from 5.00-coreboot-002 to 6.00-beta2.

View file

@ -0,0 +1,51 @@
<section xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink" xml:id="sec-release-23.05">
<title>Release 23.05 (“Stoat”, 2023.05/??)</title>
<para>
Support is planned until the end of December 2023, handing over to
23.11.
</para>
<section xml:id="sec-release-23.05-highlights">
<title>Highlights</title>
<para>
In addition to numerous new and upgraded packages, this release
has the following highlights:
</para>
<itemizedlist spacing="compact">
<listitem>
<para>
Create the first release note entry in this section!
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-23.05-new-services">
<title>New Services</title>
<itemizedlist spacing="compact">
<listitem>
<para>
Create the first release note entry in this section!
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-23.05-incompatibilities">
<title>Backward Incompatibilities</title>
<itemizedlist spacing="compact">
<listitem>
<para>
Create the first release note entry in this section!
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-23.05-notable-changes">
<title>Other Notable Changes</title>
<itemizedlist spacing="compact">
<listitem>
<para>
Create the first release note entry in this section!
</para>
</listitem>
</itemizedlist>
</section>
</section>

View file

@ -8,6 +8,7 @@
This section lists the release notes for each stable version of NixOS and
current unstable revision.
</para>
<xi:include href="../from_md/release-notes/rl-2305.section.xml" />
<xi:include href="../from_md/release-notes/rl-2211.section.xml" />
<xi:include href="../from_md/release-notes/rl-2205.section.xml" />
<xi:include href="../from_md/release-notes/rl-2111.section.xml" />

View file

@ -98,6 +98,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- [Sachet](https://github.com/messagebird/sachet/), an SMS alerting tool for the Prometheus Alertmanager. Available as [services.prometheus.sachet](#opt-services.prometheus.sachet.enable).
- [EVCC](https://evcc.io) is an EV charge controller with PV integration. It supports a multitude of chargers, meters, vehicle APIs and more and ties that together with a well-tested backend and a lightweight web frontend. Available as [services.evcc](#opt-services.evcc.enable).
- [infnoise](https://github.com/leetronics/infnoise), a hardware True Random Number Generator dongle.
Available as [services.infnoise](options.html#opt-services.infnoise.enable).
@ -191,6 +193,8 @@ Available as [services.patroni](options.html#opt-services.patroni.enable).
- The `fetchgit` fetcher now uses [cone mode](https://www.git-scm.com/docs/git-sparse-checkout/2.37.0#_internalscone_mode_handling) by default for sparse checkouts. [Non-cone mode](https://www.git-scm.com/docs/git-sparse-checkout/2.37.0#_internalsnon_cone_problems) can be enabled by passing `nonConeMode = true`, but note that non-cone mode is deprecated and this option may be removed alongside a future Git update without notice.
- The `fetchgit` fetcher supports sparse checkouts via the `sparseCheckout` option. This used to accept a multi-line string with directories/patterns to check out, but now requires a list of strings.
- `openssh` was updated to version 9.1, disabling the generation of DSA keys when using `ssh-keygen -A` as they are insecure. Also, `SetEnv` directives in `ssh_config` and `sshd_config` are now first-match-wins
- `bsp-layout` no longer uses the command `cycle` to switch to other window layouts, as it got replaced by the commands `previous` and `next`.
@ -451,6 +455,8 @@ Available as [services.patroni](options.html#opt-services.patroni.enable).
- `dockerTools.buildImage` deprecates the misunderstood `contents` parameter, in favor of `copyToRoot`.
Use `copyToRoot = buildEnv { ... };` or similar if you intend to add packages to `/bin`.
- The `proxmox.qemuConf.bios` option was added, it corresponds to `Hardware->BIOS` field in Proxmox web interface. Use `"ovmf"` value to build UEFI image, default value remains `"bios"`. New option `proxmox.partitionTableType` defaults to either `"legacy"` or `"efi"`, depending on the `bios` value. Setting `partitionTableType` to `"hybrid"` results in an image, which supports both methods (`"bios"` and `"ovmf"`), thereby remaining bootable after change to Proxmox `Hardware->BIOS` field.
- memtest86+ was updated from 5.00-coreboot-002 to 6.00-beta2. It is now the upstream version from https://www.memtest.org/, as coreboot's fork is no longer available.
- Option descriptions, examples, and defaults writting in DocBook are now deprecated. Using CommonMark is preferred and will become the default in a future release.

View file

@ -0,0 +1,29 @@
# Release 23.05 (“Stoat”, 2023.05/??) {#sec-release-23.05}
Support is planned until the end of December 2023, handing over to 23.11.
## Highlights {#sec-release-23.05-highlights}
In addition to numerous new and upgraded packages, this release has the following highlights:
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
- Create the first release note entry in this section!
## New Services {#sec-release-23.05-new-services}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
- Create the first release note entry in this section!
## Backward Incompatibilities {#sec-release-23.05-incompatibilities}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
- Create the first release note entry in this section!
## Other Notable Changes {#sec-release-23.05-notable-changes}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
- Create the first release note entry in this section!

View file

@ -491,6 +491,7 @@
./services/hardware/vdr.nix
./services/home-automation/home-assistant.nix
./services/home-automation/zigbee2mqtt.nix
./services/home-automation/evcc.nix
./services/logging/SystemdJournal2Gelf.nix
./services/logging/awstats.nix
./services/logging/filebeat.nix

View file

@ -361,8 +361,10 @@ in {
fi
echo 'include "${redisConfStore}"' > "${redisConfRun}"
${optionalString (conf.requirePassFile != null) ''
{echo -n "requirepass "
cat ${escapeShellArg conf.requirePassFile}} >> "${redisConfRun}"
{
echo -n "requirepass "
cat ${escapeShellArg conf.requirePassFile}
} >> "${redisConfRun}"
''}
'');
Type = "notify";

View file

@ -46,6 +46,11 @@ let
SUBSYSTEM=="input", KERNEL=="mice", TAG+="systemd"
'';
nixosInitrdRules = ''
# Mark dm devices as db_persist so that they are kept active after switching root
SUBSYSTEM=="block", KERNEL=="dm-[0-9]*", ACTION=="add|change", OPTIONS+="db_persist"
'';
# Perform substitutions in all udev rules files.
udevRulesFor = { name, udevPackages, udevPath, udev, systemd, binPackages, initrdBin ? null }: pkgs.runCommand name
{ preferLocalBuild = true;
@ -364,8 +369,10 @@ in
EOF
'';
boot.initrd.services.udev.rules = nixosInitrdRules;
boot.initrd.systemd.additionalUpstreamUnits = [
# TODO: "initrd-udevadm-cleanup-db.service" is commented out because of https://github.com/systemd/systemd/issues/12953
"initrd-udevadm-cleanup-db.service"
"systemd-udevd-control.socket"
"systemd-udevd-kernel.socket"
"systemd-udevd.service"

View file

@ -0,0 +1,92 @@
{ lib
, pkgs
, config
, ...
}:
with lib;
let
cfg = config.services.evcc;
format = pkgs.formats.yaml {};
configFile = format.generate "evcc.yml" cfg.settings;
package = pkgs.evcc;
in
{
meta.maintainers = with lib.maintainers; [ hexa ];
options.services.evcc = with types; {
enable = mkEnableOption (lib.mdDoc "EVCC, the extensible EV Charge Controller with PV integration");
extraArgs = mkOption {
type = listOf str;
default = [];
description = lib.mdDoc ''
Extra arguments to pass to the evcc executable.
'';
};
settings = mkOption {
type = format.type;
description = lib.mdDoc ''
evcc configuration as a Nix attribute set.
Check for possible options in the sample [evcc.dist.yaml](https://github.com/andig/evcc/blob/${package.version}/evcc.dist.yaml].
'';
};
};
config = mkIf cfg.enable {
systemd.services.evcc = {
after = [
"network-online.target"
"mosquitto.target"
];
wantedBy = [
"multi-user.target"
];
serviceConfig = {
ExecStart = "${package}/bin/evcc --config ${configFile} ${escapeShellArgs cfg.extraArgs}";
CapabilityBoundingSet = [ "" ];
DeviceAllow = [
"char-ttyUSB"
];
DevicePolicy = "closed";
DynamicUser = true;
LockPersonality = true;
MemoryDenyWriteExecute = true;
RestrictAddressFamilies = [
"AF_INET"
"AF_INET6"
"AF_UNIX"
];
RestrictNamespaces = true;
RestrictRealtime = true;
PrivateTmp = true;
PrivateUsers = true;
ProcSubset = "pid";
ProtectClock = true;
ProtectControlGroups= true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectProc = "invisible";
SystemCallArchitectures = "native";
SystemCallFilter = [
"@system-service"
"~@privileged"
];
UMask = "0077";
User = "evcc";
};
};
};
meta.buildDocsInSandbox = false;
}

View file

@ -7,8 +7,8 @@ let
registrationFile = "${dataDir}/telegram-registration.yaml";
cfg = config.services.mautrix-telegram;
settingsFormat = pkgs.formats.json {};
settingsFileUnsubstituted = settingsFormat.generate "mautrix-telegram-config-unsubstituted.json" cfg.settings;
settingsFile = "${dataDir}/config.json";
settingsFile =
settingsFormat.generate "mautrix-telegram-config.json" cfg.settings;
in {
options = {
@ -97,12 +97,23 @@ in {
default = null;
description = lib.mdDoc ''
File containing environment variables to be passed to the mautrix-telegram service,
in which secret tokens can be specified securely by defining values for
in which secret tokens can be specified securely by defining values for e.g.
`MAUTRIX_TELEGRAM_APPSERVICE_AS_TOKEN`,
`MAUTRIX_TELEGRAM_APPSERVICE_HS_TOKEN`,
`MAUTRIX_TELEGRAM_TELEGRAM_API_ID`,
`MAUTRIX_TELEGRAM_TELEGRAM_API_HASH` and optionally
`MAUTRIX_TELEGRAM_TELEGRAM_BOT_TOKEN`.
These environment variables can also be used to set other options by
replacing hierachy levels by `.`, converting the name to uppercase
and prepending `MAUTRIX_TELEGRAM_`.
For example, the first value above maps to
{option}`settings.appservice.as_token`.
The environment variable values can be prefixed with `json::` to have
them be parsed as JSON. For example, `login_shared_secret_map` can be
set as follows:
`MAUTRIX_TELEGRAM_BRIDGE_LOGIN_SHARED_SECRET_MAP=json::{"example.com":"secret"}`.
'';
};
@ -141,16 +152,6 @@ in {
environment.HOME = dataDir;
preStart = ''
# Not all secrets can be passed as environment variable (yet)
# https://github.com/tulir/mautrix-telegram/issues/584
[ -f ${settingsFile} ] && rm -f ${settingsFile}
old_umask=$(umask)
umask 0177
${pkgs.envsubst}/bin/envsubst \
-o ${settingsFile} \
-i ${settingsFileUnsubstituted}
umask $old_umask
# generate the appservice's registration file if absent
if [ ! -f '${registrationFile}' ]; then
${pkgs.mautrix-telegram}/bin/mautrix-telegram \
@ -186,8 +187,6 @@ in {
--config='${settingsFile}'
'';
};
restartTriggers = [ settingsFileUnsubstituted ];
};
};

View file

@ -22,9 +22,9 @@ let
# we can only check for values consistently after converting them to their corresponding environment variable name.
configEnv =
let
configEnv = listToAttrs (concatLists (mapAttrsToList (name: value:
if value != null then [ (nameValuePair (nameToEnvVar name) (if isBool value then boolToString value else toString value)) ] else []
) cfg.config));
configEnv = concatMapAttrs (name: value: optionalAttrs (value != null) {
${nameToEnvVar name} = if isBool value then boolToString value else toString value;
}) cfg.config;
in { DATA_FOLDER = "/var/lib/bitwarden_rs"; } // optionalAttrs (!(configEnv ? WEB_VAULT_ENABLED) || configEnv.WEB_VAULT_ENABLED == "true") {
WEB_VAULT_FOLDER = "${cfg.webVaultPackage}/share/vaultwarden/vault";
} // configEnv;

View file

@ -1,8 +1,6 @@
# D-Bus configuration and system bus daemon.
{ config, lib, options, pkgs, ... }:
with lib;
{ config, lib, pkgs, ... }:
let
@ -16,11 +14,11 @@ let
serviceDirectories = cfg.packages;
};
inherit (lib) mkOption types;
in
{
###### interface
options = {
services.dbus = {
@ -65,31 +63,13 @@ in
'';
default = "disabled";
};
socketActivated = mkOption {
type = types.nullOr types.bool;
default = null;
visible = false;
description = lib.mdDoc ''
Removed option, do not use.
'';
};
};
};
###### implementation
config = mkIf cfg.enable {
warnings = optional (cfg.socketActivated != null) (
let
files = showFiles options.services.dbus.socketActivated.files;
in
"The option 'services.dbus.socketActivated' in ${files} no longer has"
+ " any effect and can be safely removed: the user D-Bus session is"
+ " now always socket activated."
);
environment.systemPackages = [ pkgs.dbus.daemon pkgs.dbus ];
config = lib.mkIf cfg.enable {
environment.systemPackages = [
pkgs.dbus
];
environment.etc."dbus-1".source = configDir;
@ -102,10 +82,12 @@ in
users.groups.messagebus.gid = config.ids.gids.messagebus;
systemd.packages = [ pkgs.dbus.daemon ];
systemd.packages = [
pkgs.dbus
];
security.wrappers.dbus-daemon-launch-helper = {
source = "${pkgs.dbus.daemon}/libexec/dbus-daemon-launch-helper";
source = "${pkgs.dbus}/libexec/dbus-daemon-launch-helper";
owner = "root";
group = "messagebus";
setuid = true;
@ -114,26 +96,36 @@ in
};
services.dbus.packages = [
pkgs.dbus.out
pkgs.dbus
config.system.path
];
systemd.services.dbus = {
# Don't restart dbus-daemon. Bad things tend to happen if we do.
reloadIfChanged = true;
restartTriggers = [ configDir ];
environment = { LD_LIBRARY_PATH = config.system.nssModules.path; };
};
systemd.user = {
services.dbus = {
# Don't restart dbus-daemon. Bad things tend to happen if we do.
reloadIfChanged = true;
restartTriggers = [ configDir ];
restartTriggers = [
configDir
];
environment = {
LD_LIBRARY_PATH = config.system.nssModules.path;
};
sockets.dbus.wantedBy = [ "sockets.target" ];
};
environment.pathsToLink = [ "/etc/dbus-1" "/share/dbus-1" ];
systemd.user.services.dbus = {
# Don't restart dbus-daemon. Bad things tend to happen if we do.
reloadIfChanged = true;
restartTriggers = [
configDir
];
};
systemd.user.sockets.dbus.wantedBy = [
"sockets.target"
];
environment.pathsToLink = [
"/etc/dbus-1"
"/share/dbus-1"
];
};
}

View file

@ -688,7 +688,7 @@ in {
inherit (cfg) group;
};
})
(lib.attrsets.setAttrByPath [ cfg.user "packages" ] [ cfg.package mastodonEnv ])
(lib.attrsets.setAttrByPath [ cfg.user "packages" ] [ cfg.package mastodonEnv pkgs.imagemagick ])
];
users.groups.${cfg.group}.members = lib.optional cfg.configureNginx config.services.nginx.user;

View file

@ -6,10 +6,10 @@ let
in
{
# See here for a reference of all the options:
# https://github.com/outline/outline/blob/v0.65.2/.env.sample
# https://github.com/outline/outline/blob/v0.65.2/app.json
# https://github.com/outline/outline/blob/v0.65.2/server/env.ts
# https://github.com/outline/outline/blob/v0.65.2/shared/types.ts
# https://github.com/outline/outline/blob/v0.67.0/.env.sample
# https://github.com/outline/outline/blob/v0.67.0/app.json
# https://github.com/outline/outline/blob/v0.67.0/server/env.ts
# https://github.com/outline/outline/blob/v0.67.0/shared/types.ts
# The order is kept the same here to make updating easier.
options.services.outline = {
enable = lib.mkEnableOption (lib.mdDoc "outline");
@ -123,7 +123,7 @@ in
description = lib.mdDoc ''
To support uploading of images for avatars and document attachments an
s3-compatible storage must be provided. AWS S3 is recommended for
redundency however if you want to keep all file storage local an
redundancy however if you want to keep all file storage local an
alternative such as [minio](https://github.com/minio/minio)
can be used.
@ -435,6 +435,16 @@ in
'';
};
sentryTunnel = lib.mkOption {
type = lib.types.nullOr lib.types.str;
default = null;
description = lib.mdDoc ''
Optionally add a
[Sentry proxy tunnel](https://docs.sentry.io/platforms/javascript/troubleshooting/#using-the-tunnel-option)
for bypassing ad blockers in the UI.
'';
};
logo = lib.mkOption {
type = lib.types.nullOr lib.types.str;
default = null;
@ -621,6 +631,7 @@ in
DEBUG = cfg.debugOutput;
GOOGLE_ANALYTICS_ID = lib.optionalString (cfg.googleAnalyticsId != null) cfg.googleAnalyticsId;
SENTRY_DSN = lib.optionalString (cfg.sentryDsn != null) cfg.sentryDsn;
SENTRY_TUNNEL = lib.optionalString (cfg.sentryTunnel != null) cfg.sentryTunnel;
TEAM_LOGO = lib.optionalString (cfg.logo != null) cfg.logo;
DEFAULT_LANGUAGE = cfg.defaultLanguage;

View file

@ -3,13 +3,14 @@
let
inInitrd = lib.any (fs: fs == "ext2" || fs == "ext3" || fs == "ext4") config.boot.initrd.supportedFilesystems;
inSystem = lib.any (fs: fs == "ext2" || fs == "ext3" || fs == "ext4") config.boot.supportedFilesystems;
in
{
config = {
system.fsPackages = lib.mkIf (config.boot.initrd.systemd.enable -> inInitrd) [ pkgs.e2fsprogs ];
system.fsPackages = lib.mkIf (config.boot.initrd.systemd.enable -> (inInitrd || inSystem)) [ pkgs.e2fsprogs ];
# As of kernel 4.3, there is no separate ext3 driver (they're also handled by ext4.ko)
boot.initrd.availableKernelModules = lib.mkIf (config.boot.initrd.systemd.enable -> inInitrd) [ "ext2" "ext4" ];

View file

@ -53,6 +53,13 @@ with lib;
Guest memory in MB
'';
};
bios = mkOption {
type = types.enum [ "seabios" "ovmf" ];
default = "seabios";
description = ''
Select BIOS implementation (seabios = Legacy BIOS, ovmf = UEFI).
'';
};
# optional configs
name = mkOption {
@ -99,6 +106,17 @@ with lib;
Additional options appended to qemu-server.conf
'';
};
partitionTableType = mkOption {
type = types.enum [ "efi" "hybrid" "legacy" "legacy+gpt" ];
description = ''
Partition table type to use. See make-disk-image.nix partitionTableType for details.
Defaults to 'legacy' for 'proxmox.qemuConf.bios="seabios"' (default), other bios values defaults to 'efi'.
Use 'hybrid' to build grub-based hybrid bios+efi images.
'';
default = if config.proxmox.qemuConf.bios == "seabios" then "legacy" else "efi";
defaultText = lib.literalExpression ''if config.proxmox.qemuConf.bios == "seabios" then "legacy" else "efi"'';
example = "hybrid";
};
filenameSuffix = mkOption {
type = types.str;
default = config.proxmox.qemuConf.name;
@ -122,9 +140,33 @@ with lib;
${lib.concatStrings (lib.mapAttrsToList cfgLine properties)}
#qmdump#map:virtio0:drive-virtio0:local-lvm:raw:
'';
inherit (cfg) partitionTableType;
supportEfi = partitionTableType == "efi" || partitionTableType == "hybrid";
supportBios = partitionTableType == "legacy" || partitionTableType == "hybrid" || partitionTableType == "legacy+gpt";
hasBootPartition = partitionTableType == "efi" || partitionTableType == "hybrid";
hasNoFsPartition = partitionTableType == "hybrid" || partitionTableType == "legacy+gpt";
in {
assertions = [
{
assertion = config.boot.loader.systemd-boot.enable -> config.proxmox.qemuConf.bios == "ovmf";
message = "systemd-boot requires 'ovmf' bios";
}
{
assertion = partitionTableType == "efi" -> config.proxmox.qemuConf.bios == "ovmf";
message = "'efi' disk partitioning requires 'ovmf' bios";
}
{
assertion = partitionTableType == "legacy" -> config.proxmox.qemuConf.bios == "seabios";
message = "'legacy' disk partitioning requires 'seabios' bios";
}
{
assertion = partitionTableType == "legacy+gpt" -> config.proxmox.qemuConf.bios == "seabios";
message = "'legacy+gpt' disk partitioning requires 'seabios' bios";
}
];
system.build.VMA = import ../../lib/make-disk-image.nix {
name = "proxmox-${cfg.filenameSuffix}";
inherit partitionTableType;
postVM = let
# Build qemu with PVE's patch that adds support for the VMA format
vma = (pkgs.qemu_kvm.override {
@ -181,7 +223,18 @@ with lib;
boot = {
growPartition = true;
kernelParams = [ "console=ttyS0" ];
loader.grub.device = lib.mkDefault "/dev/vda";
loader.grub = {
device = lib.mkDefault (if (hasNoFsPartition || supportBios) then
# Even if there is a separate no-fs partition ("/dev/disk/by-partlabel/no-fs" i.e. "/dev/vda2"),
# which will be used the bootloader, do not set it as loader.grub.device.
# GRUB installation fails, unless the whole disk is selected.
"/dev/vda"
else
"nodev");
efiSupport = lib.mkDefault supportEfi;
efiInstallAsRemovable = lib.mkDefault supportEfi;
};
loader.timeout = 0;
initrd.availableKernelModules = [ "uas" "virtio_blk" "virtio_pci" ];
};
@ -191,6 +244,10 @@ with lib;
autoResize = true;
fsType = "ext4";
};
fileSystems."/boot" = lib.mkIf hasBootPartition {
device = "/dev/disk/by-label/ESP";
fsType = "vfat";
};
services.qemuGuest.enable = lib.mkDefault true;
};

View file

@ -198,6 +198,7 @@ in {
etebase-server = handleTest ./etebase-server.nix {};
etesync-dav = handleTest ./etesync-dav.nix {};
extra-python-packages = handleTest ./extra-python-packages.nix {};
evcc = handleTest ./evcc.nix {};
fancontrol = handleTest ./fancontrol.nix {};
fcitx = handleTest ./fcitx {};
fenics = handleTest ./fenics.nix {};

96
nixos/tests/evcc.nix Normal file
View file

@ -0,0 +1,96 @@
import ./make-test-python.nix ({ pkgs, lib, ...} :
{
name = "evcc";
meta.maintainers = with lib.maintainers; [ hexa ];
nodes = {
machine = { config, ... }: {
services.evcc = {
enable = true;
settings = {
network = {
schema = "http";
host = "localhost";
port = 7070;
};
log = "info";
site = {
title = "NixOS Test";
meters = {
grid = "grid";
pv = "pv";
};
};
meters = [ {
type = "custom";
name = "grid";
power = {
source = "script";
cmd = "/bin/sh -c 'echo -4500'";
};
} {
type = "custom";
name = "pv";
power = {
source = "script";
cmd = "/bin/sh -c 'echo 7500'";
};
} ];
chargers = [ {
name = "dummy-charger";
type = "custom";
status = {
source = "script";
cmd = "/bin/sh -c 'echo charger status F'";
};
enabled = {
source = "script";
cmd = "/bin/sh -c 'echo charger enabled state false'";
};
enable = {
source = "script";
cmd = "/bin/sh -c 'echo set charger enabled state true'";
};
maxcurrent = {
source = "script";
cmd = "/bin/sh -c 'echo set charger max current 7200'";
};
} ];
loadpoints = [ {
title = "Dummy";
charger = "dummy-charger";
} ];
};
};
};
};
testScript = ''
start_all()
machine.wait_for_unit("evcc.service")
machine.wait_for_open_port(7070)
with subtest("Check package version propagates into frontend"):
machine.fail(
"curl --fail http://localhost:7070 | grep '0.0.1-alpha'"
)
machine.succeed(
"curl --fail http://localhost:7070 | grep '${pkgs.evcc.version}'"
)
with subtest("Check journal for errors"):
_, output = machine.execute("journalctl -o cat -u evcc.service")
assert "ERROR" not in output
with subtest("Check systemd hardening"):
_, output = machine.execute("systemd-analyze security evcc.service | grep -v ''")
machine.log(output)
'';
})

View file

@ -26,8 +26,9 @@ let
powerManagement.resumeCommands = "systemctl --no-block restart backdoor.service";
fileSystems = {
"/".device = "/dev/vda2";
fileSystems."/" = {
device = "/dev/vda2";
fsType = "ext3";
};
swapDevices = mkOverride 0 [ { device = "/dev/vda1"; } ];
boot.resumeDevice = mkIf systemdStage1 "/dev/vda1";

View file

@ -23,6 +23,8 @@ import ./make-test-python.nix ({ lib, pkgs, ... }: {
cryptroot2.device = "/dev/vdd";
};
virtualisation.bootDevice = "/dev/mapper/cryptroot";
# test mounting device unlocked in initrd after switching root
virtualisation.fileSystems."/cryptroot2".device = "/dev/mapper/cryptroot2";
};
};
@ -31,6 +33,8 @@ import ./make-test-python.nix ({ lib, pkgs, ... }: {
machine.wait_for_unit("multi-user.target")
machine.succeed("echo -n supersecret | cryptsetup luksFormat -q --iter-time=1 /dev/vdc -")
machine.succeed("echo -n supersecret | cryptsetup luksFormat -q --iter-time=1 /dev/vdd -")
machine.succeed("echo -n supersecret | cryptsetup luksOpen -q /dev/vdd cryptroot2")
machine.succeed("mkfs.ext4 /dev/mapper/cryptroot2")
# Boot from the encrypted disk
machine.succeed("bootctl set-default nixos-generation-1-specialisation-boot-luks.conf")
@ -44,5 +48,6 @@ import ./make-test-python.nix ({ lib, pkgs, ... }: {
machine.wait_for_unit("multi-user.target")
assert "/dev/mapper/cryptroot on / type ext4" in machine.succeed("mount")
assert "/dev/mapper/cryptroot2 on /cryptroot2 type ext4" in machine.succeed("mount")
'';
})

View file

@ -16,7 +16,7 @@ buildNpmPackage rec {
./package-lock.json.patch
];
npmDepsHash = "sha256-UF3pZ+SlrgDLqntciXRNbWfpPMtQw1DXl41x9r37QN4=";
npmDepsHash = "sha256-SGLcFjPnmhFoeXtP4gfGr4Qa1dTaXwSnzkweEvYW/1k=";
nativeBuildInputs = [
copyDesktopItems

View file

@ -59,8 +59,9 @@ python3.pkgs.buildPythonApplication rec {
rapidfuzz
];
# the file should be executable but it isn't so our wrapper doesn't run
preFixup = ''
makeWrapperArgs+=("''${qtWrapperArgs[@]}")
chmod 555 $out/bin/puddletag
'';
doCheck = false; # there are no tests

View file

@ -2,6 +2,7 @@
, python3
, fetchFromGitHub
, fetchpatch
, installShellFiles
, libcdio-paranoia
, cdrdao
, libsndfile
@ -35,6 +36,8 @@ in python3.pkgs.buildPythonApplication rec {
];
nativeBuildInputs = with python3.pkgs; [
installShellFiles
setuptools-scm
docutils
setuptoolsCheckHook
@ -65,6 +68,11 @@ in python3.pkgs.buildPythonApplication rec {
export SETUPTOOLS_SCM_PRETEND_VERSION="${version}"
'';
outputs = [ "out" "man" ];
postBuild = ''
make -C man
'';
preCheck = ''
# disable tests that require internet access
# https://github.com/JoeLametta/whipper/issues/291
@ -73,6 +81,10 @@ in python3.pkgs.buildPythonApplication rec {
export HOME=$TMPDIR
'';
postInstall = ''
installManPage man/*.1
'';
passthru.tests.version = testers.testVersion {
package = whipper;
command = "HOME=$TMPDIR whipper --version";

View file

@ -16,11 +16,11 @@ let
in stdenv.mkDerivation rec {
pname = "nano";
version = "6.4";
version = "7.0";
src = fetchurl {
url = "mirror://gnu/nano/${pname}-${version}.tar.xz";
sha256 = "QZmujKeKd5beVt4aQbgh3EeRLAMH6YFrVswxffNGYcA=";
sha256 = "jdbqw4srh4bYJoHw4a/YT2t1IQ0XORtkQ8Q35FFVIUk=";
};
nativeBuildInputs = [ texinfo ] ++ optional enableNls gettext;

View file

@ -1,22 +1,40 @@
{ mkDerivation, lib, fetchFromGitHub, pkg-config, which, qtbase, qtsvg, qttools, qtwebkit }:
{ mkDerivation
, lib
, fetchFromGitHub
, pkg-config
, which
, libuchardet
, qtbase
, qtsvg
, qttools
, qtwebengine
, qtwebsockets
}:
mkDerivation rec {
pname = "notepadqq";
version = "1.4.8";
# shipping a beta build as there's no proper release which supports qtwebengine
version = "2.0.0-beta";
src = fetchFromGitHub {
owner = "notepadqq";
repo = "notepadqq";
rev = "v${version}";
sha256 = "0lbv4s7ng31dkznzbkmp2cvkqglmfj6lv4mbg3r410fif2nrva7k";
sha256 = "sha256-XA9Ay9kJApY+bDeOf0iPv+BWYFuTmIuqsLEPgRTCZCE=";
};
nativeBuildInputs = [
pkg-config which qttools
pkg-config
which
qttools
];
buildInputs = [
qtbase qtsvg qtwebkit
libuchardet
qtbase
qtsvg
qtwebengine
qtwebsockets
];
preConfigure = ''

View file

@ -14,18 +14,18 @@ let
# ocaml-interface
# tree-sitter-ocaml-interface
# tree-sitter-ocaml_interface
builtGrammars = generatedGrammars // lib.listToAttrs
(lib.concatLists (lib.mapAttrsToList
(k: v:
let
replaced = lib.replaceStrings [ "_" ] [ "-" ] k;
in
map (lib.flip lib.nameValuePair v)
([ "tree-sitter-${k}" ] ++ lib.optionals (k != replaced) [
replaced
"tree-sitter-${replaced}"
]))
generatedDerivations));
builtGrammars = generatedGrammars // lib.concatMapAttrs
(k: v:
let
replaced = lib.replaceStrings [ "_" ] [ "-" ] k;
in
{
"tree-sitter-${k}" = v;
} // lib.optionalAttrs (k != replaced) {
${replaced} = v;
"tree-sitter-${replaced}" = v;
})
generatedDerivations;
allGrammars = lib.attrValues generatedDerivations;
@ -62,6 +62,10 @@ let
in
{
postPatch = ''
rm -r parser
'';
passthru = {
inherit builtGrammars allGrammars withPlugins withAllGrammars;

View file

@ -1,10 +1,28 @@
{ lib, fetchFromGitHub, gitUpdater
, meson, ninja, pkg-config, wrapGAppsHook
, desktop-file-utils, gsettings-desktop-schemas, libnotify, libhandy, webkitgtk
, python3Packages, gettext
, appstream-glib, gdk-pixbuf, glib, gobject-introspection, gspell, gtk3, gtksourceview4, gnome
, steam, xdg-utils, pciutils, cabextract
, freetype, p7zip, gamemode, mangohud
{ lib
, fetchFromGitHub
, fetchFromGitLab
, gitUpdater
, python3Packages
, blueprint-compiler
, meson
, ninja
, pkg-config
, wrapGAppsHook4
, appstream-glib
, desktop-file-utils
, librsvg
, gtk4
, gtksourceview5
, libadwaita
, steam
, cabextract
, p7zip
, xdpyinfo
, imagemagick
, procps
, gamescope
, mangohud
, vmtouch
, wine
, bottlesExtraLibraries ? pkgs: [ ] # extra packages to add to steam.run multiPkgs
, bottlesExtraPkgs ? pkgs: [ ] # extra packages to add to steam.run targetPkgs
@ -21,75 +39,77 @@ let
in
python3Packages.buildPythonApplication rec {
pname = "bottles";
version = "2022.5.28-trento-3";
version = "2022.10.14.1";
src = fetchFromGitHub {
owner = "bottlesdevs";
repo = pname;
rev = version;
sha256 = "sha256-KIDLRqDLFTsVAczRpTchnUtKJfVHqbYzf8MhIR5UdYY=";
sha256 = "sha256-FO91GSGlc2f3TSLrlmRDPi5p933/Y16tdEpX4RcKhL0=";
};
patches = [ ./vulkan_icd.patch ];
postPatch = ''
chmod +x build-aux/meson/postinstall.py
patchShebangs build-aux/meson/postinstall.py
substituteInPlace src/backend/wine/winecommand.py \
substituteInPlace bottles/backend/wine/winecommand.py \
--replace \
'self.__get_runner()' \
'(lambda r: (f"${steam-run}/bin/steam-run {r}", r)[r == "wine" or r == "wine64"])(self.__get_runner())'
'';
"command = f\"{runner} {command}\"" \
"command = f\"{''' if runner == 'wine' or runner == 'wine64' else '${steam-run}/bin/steam-run '}{runner} {command}\"" \
--replace \
"command = f\"{_picked['entry_point']} {command}\"" \
"command = f\"${steam-run}/bin/steam-run {_picked['entry_point']} {command}\""
'';
nativeBuildInputs = [
blueprint-compiler
meson
ninja
pkg-config
wrapGAppsHook
gettext
wrapGAppsHook4
gtk4 # gtk4-update-icon-cache
appstream-glib
desktop-file-utils
];
buildInputs = [
gdk-pixbuf
glib
gobject-introspection
gsettings-desktop-schemas
gspell
gtk3
gtksourceview4
libhandy
libnotify
webkitgtk
gnome.adwaita-icon-theme
librsvg
gtk4
gtksourceview5
libadwaita
];
propagatedBuildInputs = with python3Packages; [
pyyaml
pytoml
requests
pycairo
pygobject3
lxml
dbus-python
gst-python
liblarch
patool
markdown
fvs
pefile
urllib3
chardet
certifi
idna
pillow
orjson
icoextract
] ++ [
steam-run
xdg-utils
pciutils
cabextract
wine
freetype
p7zip
gamemode # programs.gamemode.enable
xdpyinfo
imagemagick
procps
gamescope
mangohud
vmtouch
wine
];
format = "other";
strictDeps = false; # broken with gobject-introspection setup hook, see https://github.com/NixOS/nixpkgs/issues/56943
dontWrapGApps = true; # prevent double wrapping
preFixup = ''

View file

@ -0,0 +1,13 @@
diff --git a/bottles/backend/utils/vulkan.py b/bottles/backend/utils/vulkan.py
index 6673493..07f70d1 100644
--- a/bottles/backend/utils/vulkan.py
+++ b/bottles/backend/utils/vulkan.py
@@ -29,6 +29,8 @@ class VulkanUtils:
"/etc/vulkan",
"/usr/local/share/vulkan",
"/usr/local/etc/vulkan"
+ "/run/opengl-driver/share/vulkan/",
+ "/run/opengl-driver-32/share/vulkan/",
]
if "FLATPAK_ID" in os.environ:
__vk_icd_dirs += [

View file

@ -0,0 +1,36 @@
{ lib, python3Packages, python3, xvfb-run }:
python3Packages.buildPythonApplication rec {
pname = "keepmenu";
version = "1.2.2";
src = python3Packages.fetchPypi {
inherit pname version;
sha256 = "SeVNtONH1bn2hb2pBOVM3Oafrb+jARgfvRe7vUu6Gto=";
};
preConfigure = ''
export HOME=$TMPDIR
mkdir -p $HOME/.config/keepmenu
cp config.ini.example $HOME/.config/keepmenu/config.ini
'';
propagatedBuildInputs = with python3Packages; [
pykeepass
pynput
];
checkInputs = [ xvfb-run ];
checkPhase = ''
xvfb-run python setup.py test
'';
pythonImportsCheck = [ "keepmenu" ];
meta = with lib; {
homepage = "https://github.com/firecat53/keepmenu";
description = "Dmenu/Rofi frontend for Keepass databases";
license = licenses.gpl3Only;
maintainers = with maintainers; [ elliot ];
};
}

View file

@ -9,10 +9,10 @@ python3Packages.buildPythonApplication rec {
owner = "ryanoasis";
repo = "nerd-fonts";
rev = "v${version}";
sparseCheckout = ''
font-patcher
/src/glyphs
'';
sparseCheckout = [
"font-patcher"
"/src/glyphs"
];
sha256 = "sha256-boZUd1PM8puc9BTgOwCJpkfk6VMdXLsIyp+fQmW/ZqI=";
};

View file

@ -1,17 +1,12 @@
{ lib
, buildPythonPackage
, python3
, fetchFromGitHub
, setuptools
, pyside2
, johnnycanencrypt
, pythonOlder
, wrapQtAppsHook
}:
buildPythonPackage rec {
python3.pkgs.buildPythonApplication rec {
pname = "tumpa";
version = "0.1.2";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "kushaldas";
@ -20,7 +15,7 @@ buildPythonPackage rec {
sha256 = "17nhdildapgic5l05f3q1wf5jvz3qqdjv543c8gij1x9rdm8hgxi";
};
propagatedBuildInputs = [
propagatedBuildInputs = with python3.pkgs; [
setuptools
johnnycanencrypt
pyside2
@ -42,5 +37,6 @@ buildPythonPackage rec {
homepage = "https://github.com/kushaldas/tumpa";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ _0x4A6F ];
broken = true;
};
}

View file

@ -0,0 +1,52 @@
{
lib,
stdenv,
fetchFromGitHub,
zig,
wayland,
pkg-config,
scdoc,
wayland-protocols,
libxkbcommon,
pam,
}:
stdenv.mkDerivation rec {
pname = "waylock";
version = "0.4.2";
src = fetchFromGitHub {
owner = "ifreund";
repo = pname;
rev = "v${version}";
sha256 = "sha256-yWjWcnGa4a+Dpc82H65yr8H7v88g/tDq0FSguubhbEI=";
fetchSubmodules = true;
};
nativeBuildInputs = [zig wayland scdoc pkg-config];
buildInputs = [
wayland-protocols
libxkbcommon
pam
];
dontConfigure = true;
preBuild = ''
export HOME=$TMPDIR
'';
installPhase = ''
runHook preInstall
zig build -Drelease-safe -Dman-pages --prefix $out install
runHook postInstall
'';
meta = with lib; {
homepage = "https://github.com/ifreund/waylock";
description = "A small screenlocker for Wayland compositors";
license = licenses.isc;
platforms = platforms.linux;
maintainers = with maintainers; [jordanisaacs];
};
}

View file

@ -10,13 +10,13 @@
buildGoModule rec {
pname = "werf";
version = "1.2.188";
version = "1.2.190";
src = fetchFromGitHub {
owner = "werf";
repo = "werf";
rev = "v${version}";
hash = "sha256-C8y86q+uf+8EZ9kBAZehld7PpGByJLjhYQOrc3YKH1A=";
hash = "sha256-xjZVBLdDLLlfnXX87lwgIeQ6ySI9cNoE5nrRJVBS/l0=";
};
vendorHash = "sha256-GjcmpHyjhjCWE5gQR/oTHfhHYg5WRu8uhgAuWhdxlYk=";

View file

@ -3,7 +3,7 @@
}:
let
stableVersion = "2.2.34";
stableVersion = "2.2.35.1";
previewVersion = stableVersion;
addVersion = args:
let version = if args.stable then stableVersion else previewVersion;
@ -12,23 +12,18 @@ let
extraArgs = rec {
mkOverride = attrname: version: sha256:
self: super: {
${attrname} = super.${attrname}.overridePythonAttrs (oldAttrs: {
"${attrname}" = super."${attrname}".overridePythonAttrs (oldAttrs: {
inherit version;
src = oldAttrs.src.override {
inherit version sha256;
};
});
};
commonOverrides = [
(self: super: {
jsonschema = super.jsonschema_3;
})
];
};
mkGui = args: libsForQt5.callPackage (import ./gui.nix (addVersion args // extraArgs)) { };
mkServer = args: callPackage (import ./server.nix (addVersion args // extraArgs)) { };
guiSrcHash = "sha256-1YsVMrUYI46lJZbPjf3jnOFDr9Hp54m8DVMz9y4dvVc=";
serverSrcHash = "sha256-h4d9s+QvqN/EFV97rPRhQiyC06wkZ9C2af9gx1Z/x/8=";
guiSrcHash = "sha256-iVvADwIp01HeZoDayvH1dilYRHRkRBTBR3Fh395JBq0=";
serverSrcHash = "sha256-41dbiSjvmsDNYr9/rRkeQVOnPSVND34xx1SNknCgHfc=";
in {
guiStable = mkGui {

View file

@ -3,25 +3,15 @@
, version
, sha256Hash
, mkOverride
, commonOverrides
}:
{ lib
, python3
, fetchFromGitHub
, wrapQtAppsHook
, packageOverrides ? self: super: {}
}:
let
defaultOverrides = commonOverrides ++ [
];
python = python3.override {
packageOverrides = lib.foldr lib.composeExtensions (self: super: { }) ([ packageOverrides ] ++ defaultOverrides);
};
in python.pkgs.buildPythonPackage rec {
python3.pkgs.buildPythonPackage rec {
pname = "gns3-gui";
inherit version;
@ -36,7 +26,7 @@ in python.pkgs.buildPythonPackage rec {
wrapQtAppsHook
];
propagatedBuildInputs = with python.pkgs; [
propagatedBuildInputs = with python3.pkgs; [
distro
jsonschema
psutil
@ -55,10 +45,8 @@ in python.pkgs.buildPythonPackage rec {
postPatch = ''
substituteInPlace requirements.txt \
--replace "sentry-sdk==" "sentry-sdk>=" \
--replace "psutil==" "psutil>=" \
--replace "distro==" "distro>=" \
--replace "setuptools==" "setuptools>="
--replace "jsonschema>=4.17.0,<4.18" "jsonschema"
'';
meta = with lib; {

View file

@ -3,24 +3,14 @@
, version
, sha256Hash
, mkOverride
, commonOverrides
}:
{ lib
, python3
, fetchFromGitHub
, packageOverrides ? self: super: {}
}:
let
defaultOverrides = commonOverrides ++ [
];
python = python3.override {
packageOverrides = lib.foldr lib.composeExtensions (self: super: { }) ([ packageOverrides ] ++ defaultOverrides);
};
in python.pkgs.buildPythonApplication {
python3.pkgs.buildPythonApplication {
pname = "gns3-server";
inherit version;
@ -33,23 +23,17 @@ in python.pkgs.buildPythonApplication {
postPatch = ''
substituteInPlace requirements.txt \
--replace "aiohttp==" "aiohttp>=" \
--replace "aiofiles==" "aiofiles>=" \
--replace "Jinja2==" "Jinja2>=" \
--replace "sentry-sdk==" "sentry-sdk>=" \
--replace "async-timeout==" "async-timeout>=" \
--replace "psutil==" "psutil>=" \
--replace "distro==" "distro>=" \
--replace "py-cpuinfo==" "py-cpuinfo>=" \
--replace "setuptools==" "setuptools>="
--replace "jsonschema>=4.17.0,<4.18" "jsonschema"
'';
propagatedBuildInputs = with python.pkgs; [
propagatedBuildInputs = with python3.pkgs; [
aiofiles
aiohttp
aiohttp-cors
async_generator
distro
importlib-resources
jinja2
jsonschema
multidict

View file

@ -43,11 +43,11 @@ in
stdenv.mkDerivation rec {
pname = "mullvad-vpn";
version = "2022.4";
version = "2022.5";
src = fetchurl {
url = "https://github.com/mullvad/mullvadvpn-app/releases/download/${version}/MullvadVPN-${version}_amd64.deb";
sha256 = "sha256-OwTtWzlZjHNFSN5/UjFJbcrPCv9+ucWYEL2idYjeozU=";
sha256 = "sha256-G3B4kb+ugukYtCVH3HHI43u3n9G0dX6WyYUA3X/sZ+o=";
};
nativeBuildInputs = [
@ -75,7 +75,6 @@ stdenv.mkDerivation rec {
mv opt/Mullvad\ VPN/* $out/share/mullvad
ln -s $out/share/mullvad/mullvad-{gui,vpn} $out/bin/
ln -s $out/share/mullvad/resources/mullvad-daemon $out/bin/mullvad-daemon
ln -sf $out/share/mullvad/resources/mullvad-problem-report $out/bin/mullvad-problem-report
wrapProgram $out/bin/mullvad-vpn --set MULLVAD_DISABLE_UPDATE_NOTIFICATION 1
@ -92,7 +91,7 @@ stdenv.mkDerivation rec {
sourceProvenance = with sourceTypes; [ binaryNativeCode ];
license = licenses.gpl3Only;
platforms = [ "x86_64-linux" ];
maintainers = with maintainers; [ Br1ght0ne ymarkus ];
maintainers = with maintainers; [ Br1ght0ne ymarkus ataraxiasjel ];
};
}

View file

@ -13,7 +13,7 @@ buildGoModule {
sourceRoot = "source/wireguard/libwg";
vendorSha256 = "qvymWCdJ+GY90W/Fpdp+r1+mTq6O4LyN2Yw/PjKdFm0=";
vendorSha256 = "QNde5BqkSuqp3VJQOhn7aG6XknRDZQ62PE3WGhEJ5LU=";
# XXX: hack to make the ar archive go to the correct place
# This is necessary because passing `-o ...` to `ldflags` does not work

View file

@ -15,16 +15,16 @@
}:
rustPlatform.buildRustPackage rec {
pname = "mullvad";
version = "2022.4";
version = "2022.5";
src = fetchFromGitHub {
owner = "mullvad";
repo = "mullvadvpn-app";
rev = version;
hash = "sha256-s0Cmeh10lQUB5BK4i1qxkDy/ylx/3c6V66dxH+kcnLs=";
hash = "sha256-LiaELeEBIn/GZibKf25W3DHe+IkpaTY8UC7ca/7lp8k=";
};
cargoHash = "sha256-HPURL+CFUVLWRq8nzLiZxDhckgH76b6JBUObLGtoEEw=";
cargoHash = "sha256-KpBhdZce8Ug3ws7f1qg+5LtOMQw2Mf/uJsBg/TZSYyk=";
nativeBuildInputs = [
pkg-config

View file

@ -4,14 +4,14 @@
stdenv.mkDerivation rec {
pname = "xterm";
version = "374";
version = "375";
src = fetchurl {
urls = [
"ftp://ftp.invisible-island.net/xterm/${pname}-${version}.tgz"
"https://invisible-mirror.net/archives/xterm/${pname}-${version}.tgz"
];
sha256 = "sha256-EdTWJmcNTW17aft0Z+nsIxgX5a0iUC+RZ3aP2IrBvfU=";
sha256 = "sha256-MCxZor+B55xqcBUl13gWGiGNEjnyFWjYnivdMcAVIX8=";
};
strictDeps = true;

View file

@ -0,0 +1,57 @@
{ lib
, stdenv
, buildGoModule
, fetchFromGitHub
, Carbon
, Cocoa
, Kernel
, UserNotifications
, xorg
, libglvnd
, pkg-config
, withGui ? true
}:
buildGoModule rec {
pname = "go2tv" + lib.optionalString (!withGui) "-lite";
version = "1.13.0";
src = fetchFromGitHub {
owner = "alexballas";
repo = "go2tv";
rev = "v${version}";
sha256 = "sha256-ZHKfBKOX3/kVR6Nc+jSmLgfmpihc6QMb6NvTFlsBr5E=";
};
vendorSha256 = "sha256-msXfXFWXyZeT6zrRPZkBV7PEyPqYkx+JlpTWUwgFavI=";
nativeBuildInputs = [ pkg-config ];
buildInputs = [
xorg.libX11
xorg.libXcursor
xorg.libXrandr
xorg.libXinerama
xorg.libXi
xorg.libXext
xorg.libXxf86vm
libglvnd
] ++ lib.optionals stdenv.isDarwin [ Carbon Cocoa Kernel UserNotifications ];
ldflags = [
"-s" "-w"
"-linkmode=external"
];
# conditionally build with GUI or not (go2tv or go2tv-lite sub-packages)
subPackages = [ "cmd/${pname}" ];
doCheck = false;
meta = with lib; {
description = "Cast media files to UPnP/DLNA Media Renderers and Smart TVs";
homepage = "https://github.com/alexballas/go2tv";
license = licenses.mit;
maintainers = with maintainers; [ gdamjan ];
};
}

View file

@ -1,6 +1,7 @@
{ lib
, buildPythonApplication
, fetchPypi
, gobject-introspection
, jellyfin-apiclient-python
, jinja2
, mpv
@ -9,6 +10,7 @@
, python-mpv-jsonipc
, pywebview
, tkinter
, wrapGAppsHook
}:
buildPythonApplication rec {
@ -20,6 +22,11 @@ buildPythonApplication rec {
sha256 = "sha256-JiSC6WjrLsWk3/m/EHq7KNXaJ6rqT2fG9TT1jPvYlK0=";
};
nativeBuildInputs = [
wrapGAppsHook
gobject-introspection
];
propagatedBuildInputs = [
jellyfin-apiclient-python
mpv
@ -52,6 +59,12 @@ buildPythonApplication rec {
--replace "notify_updates: bool = True" "notify_updates: bool = False"
'';
# needed for pystray to access appindicator using GI
preFixup = ''
makeWrapperArgs+=("''${gappsWrapperArgs[@]}")
'';
dontWrapGApps = true;
# no tests
doCheck = false;
pythonImportsCheck = [ "jellyfin_mpv_shim" ];

View file

@ -1,4 +1,5 @@
{ lib, buildPythonApplication, fetchFromGitHub, mpv, requests, python-mpv-jsonipc, pystray, tkinter }:
{ lib, buildPythonApplication, fetchFromGitHub, mpv, requests, python-mpv-jsonipc, pystray, tkinter
, wrapGAppsHook, gobject-introspection }:
buildPythonApplication rec {
pname = "plex-mpv-shim";
@ -11,8 +12,19 @@ buildPythonApplication rec {
sha256 = "0hgv9g17dkrh3zbsx27n80yvkgix9j2x0rgg6d3qsf7hp5j3xw4r";
};
nativeBuildInputs = [
wrapGAppsHook
gobject-introspection
];
propagatedBuildInputs = [ mpv requests python-mpv-jsonipc pystray tkinter ];
# needed for pystray to access appindicator using GI
preFixup = ''
makeWrapperArgs+=("''${gappsWrapperArgs[@]}")
'';
dontWrapGApps = true;
# does not contain tests
doCheck = false;

View file

@ -4,7 +4,6 @@
}:
stdenv.mkDerivation rec {
pname = "i3ipc-glib";
version = "1.0.1";
@ -15,10 +14,10 @@ stdenv.mkDerivation rec {
sha256 = "01fzvrbnzcwx0vxw29igfpza9zwzp2s7msmzb92v01z0rz0y5m0p";
};
nativeBuildInputs = [ autoreconfHook which pkg-config ];
buildInputs = [ libxcb json-glib gtk-doc xorgproto gobject-introspection ];
strictDeps = true;
nativeBuildInputs = [ autoreconfHook which pkg-config gtk-doc gobject-introspection ];
buildInputs = [ libxcb json-glib xorgproto ];
preAutoreconf = ''
gtkdocize

View file

@ -9,7 +9,7 @@
, postPhpize ? ""
, makeFlags ? [ ]
, src ? fetchurl {
url = "http://pecl.php.net/get/${pname}-${version}.tgz";
url = "https://pecl.php.net/get/${pname}-${version}.tgz";
inherit (args) sha256;
}
, ...

View file

@ -547,10 +547,14 @@ rec {
pure = writeText "${baseName}-config.json" (builtins.toJSON {
inherit created config;
architecture = defaultArch;
preferLocalBuild = true;
os = "linux";
});
impure = runCommand "${baseName}-config.json"
{ nativeBuildInputs = [ jq ]; }
{
nativeBuildInputs = [ jq ];
preferLocalBuild = true;
}
''
jq ".created = \"$(TZ=utc date --iso-8601="seconds")\"" ${pure} > $out
'';
@ -925,6 +929,7 @@ rec {
{
inherit fromImage maxLayers created;
imageName = lib.toLower name;
preferLocalBuild = true;
passthru.imageTag =
if tag != null
then tag
@ -1015,6 +1020,7 @@ rec {
result = runCommand "stream-${baseName}"
{
inherit (conf) imageName;
preferLocalBuild = true;
passthru = passthru // {
inherit (conf) imageTag;

View file

@ -15,7 +15,7 @@ in
{ url, rev ? "HEAD", md5 ? "", sha256 ? "", hash ? "", leaveDotGit ? deepClone
, fetchSubmodules ? true, deepClone ? false
, branchName ? null
, sparseCheckout ? ""
, sparseCheckout ? []
, nonConeMode ? false
, name ? urlToName url rev
, # Shell code executed after the file has been fetched
@ -55,13 +55,16 @@ in
*/
assert deepClone -> leaveDotGit;
assert nonConeMode -> (sparseCheckout != "");
assert nonConeMode -> !(sparseCheckout == "" || sparseCheckout == []);
if md5 != "" then
throw "fetchgit does not support md5 anymore, please use sha256"
else if hash != "" && sha256 != "" then
throw "Only one of sha256 or hash can be set"
else
# Added 2022-11-12
lib.warnIf (builtins.isString sparseCheckout)
"Please provide directories/patterns for sparse checkout as a list of strings. Support for passing a (multi-line) string is deprecated and will be removed in the next release."
stdenvNoCC.mkDerivation {
inherit name;
builder = ./builder.sh;
@ -79,7 +82,12 @@ stdenvNoCC.mkDerivation {
else
lib.fakeSha256;
inherit url rev leaveDotGit fetchLFS fetchSubmodules deepClone branchName sparseCheckout nonConeMode postFetch;
# git-sparse-checkout(1) says:
# > When the --stdin option is provided, the directories or patterns are read
# > from standard in as a newline-delimited list instead of from the arguments.
sparseCheckout = if builtins.isString sparseCheckout then sparseCheckout else builtins.concatStringsSep "\n" sparseCheckout;
inherit url rev leaveDotGit fetchLFS fetchSubmodules deepClone branchName nonConeMode postFetch;
postHook = if netrcPhase == null then null else ''
${netrcPhase}

View file

@ -12,10 +12,10 @@
name = "nix-source";
url = "https://github.com/NixOS/nix";
rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a";
sparseCheckout = ''
src
tests
'';
sparseCheckout = [
"src"
"tests"
];
sha256 = "sha256-g1PHGTWgAcd/+sXHo1o6AjVWCvC6HiocOfMbMh873LQ=";
};
@ -23,10 +23,10 @@
name = "nix-source";
url = "https://github.com/NixOS/nix";
rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a";
sparseCheckout = ''
src
tests
'';
sparseCheckout = [
"src"
"tests"
];
nonConeMode = true;
sha256 = "sha256-FknO6C/PSnMPfhUqObD4vsW4PhkwdmPa9blNzcNvJQ4=";
};

View file

@ -3,7 +3,7 @@
{ owner, repo, rev, name ? "source"
, fetchSubmodules ? false, leaveDotGit ? null
, deepClone ? false, private ? false, forceFetchGit ? false
, sparseCheckout ? ""
, sparseCheckout ? []
, githubBase ? "github.com", varPrefix ? null
, meta ? { }
, ... # For hash agility
@ -24,7 +24,7 @@ let
};
passthruAttrs = removeAttrs args [ "owner" "repo" "rev" "fetchSubmodules" "forceFetchGit" "private" "githubBase" "varPrefix" ];
varBase = "NIX${if varPrefix == null then "" else "_${varPrefix}"}_GITHUB_PRIVATE_";
useFetchGit = fetchSubmodules || (leaveDotGit == true) || deepClone || forceFetchGit || (sparseCheckout != "");
useFetchGit = fetchSubmodules || (leaveDotGit == true) || deepClone || forceFetchGit || !(sparseCheckout == "" || sparseCheckout == []);
# We prefer fetchzip in cases we don't need submodules as the hash
# is more stable in that case.
fetcher = if useFetchGit then fetchgit else fetchzip;

View file

@ -17,7 +17,7 @@
, npmBuildScript ? "build"
# Flags to pass to all npm commands.
, npmFlags ? [ ]
# Flags to pass to `npm ci`.
# Flags to pass to `npm ci` and `npm prune`.
, npmInstallFlags ? [ ]
# Flags to pass to `npm rebuild`.
, npmRebuildFlags ? [ ]

View file

@ -7,10 +7,11 @@
substitutions = {
nodeSrc = srcOnly nodejs;
# Specify the stdenv's `diff` and `jq` by abspath to ensure that the user's build
# Specify `diff`, `jq`, and `prefetch-npm-deps` by abspath to ensure that the user's build
# inputs do not cause us to find the wrong binaries.
diff = "${buildPackages.diffutils}/bin/diff";
jq = "${buildPackages.jq}/bin/jq";
prefetchNpmDeps = "${buildPackages.prefetch-npm-deps}/bin/prefetch-npm-deps";
nodeVersion = nodejs.version;
nodeVersionMajor = lib.versions.major nodejs.version;

View file

@ -20,6 +20,7 @@ npmBuildHook() {
echo
echo "Here are a few things you can try, depending on the error:"
echo "1. Make sure your build script ($npmBuildScript) exists"
echo " If there is none, set `dontNpmBuild = true`."
echo '2. If the error being thrown is something similar to "error:0308010C:digital envelope routines::unsupported", add `NODE_OPTIONS = "--openssl-legacy-provider"` to your derivation'
echo " See https://github.com/webpack/webpack/issues/14532 for more information."
echo

View file

@ -5,9 +5,18 @@ npmConfigHook() {
echo "Configuring npm"
export HOME=$TMPDIR
export HOME="$TMPDIR"
export npm_config_nodedir="@nodeSrc@"
if [ -z "${npmDeps-}" ]; then
echo
echo "ERROR: no dependencies were specified"
echo 'Hint: set `npmDeps` if using these hooks individually. If this is happening with `buildNpmPackage`, please open an issue.'
echo
exit 1
fi
local -r cacheLockfile="$npmDeps/package-lock.json"
local -r srcLockfile="$PWD/package-lock.json"
@ -47,15 +56,17 @@ npmConfigHook() {
exit 1
fi
@prefetchNpmDeps@ --fixup-lockfile "$srcLockfile"
local cachePath
if [ -z "${makeCacheWritable-}" ]; then
cachePath=$npmDeps
cachePath="$npmDeps"
else
echo "Making cache writable"
cp -r "$npmDeps" "$TMPDIR/cache"
chmod -R 700 "$TMPDIR/cache"
cachePath=$TMPDIR/cache
cachePath="$TMPDIR/cache"
fi
npm config set cache "$cachePath"
@ -71,7 +82,7 @@ npmConfigHook() {
echo "Here are a few things you can try, depending on the error:"
echo '1. Set `makeCacheWritable = true`'
echo " Note that this won't help if npm is complaining about not being able to write to the logs directory -- look above that for the actual error."
echo '2. Set `npmInstallFlags = [ "--legacy-peer-deps" ]`'
echo '2. Set `npmFlags = [ "--legacy-peer-deps" ]`'
echo
exit 1
@ -96,6 +107,8 @@ npmConfigHook() {
rm node_modules/.meow
fi
patchShebangs node_modules
echo "Finished npmConfigHook"
}

View file

@ -27,7 +27,7 @@ npmInstallHook() {
local -r nodeModulesPath="$packageOut/node_modules"
if [ ! -d "$nodeModulesPath" ]; then
npm prune --omit dev
npm prune --omit dev $npmInstallFlags "${npmInstallFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"
find node_modules -maxdepth 1 -type d -empty -delete
cp -r node_modules "$nodeModulesPath"

View file

@ -1,4 +1,4 @@
{ lib, stdenvNoCC, rustPlatform, Security, testers, fetchurl, prefetch-npm-deps, fetchNpmDeps }:
{ lib, stdenvNoCC, rustPlatform, makeWrapper, Security, gnutar, gzip, testers, fetchurl, prefetch-npm-deps, fetchNpmDeps }:
{
prefetch-npm-deps = rustPlatform.buildRustPackage {
@ -16,8 +16,13 @@
cargoLock.lockFile = ./Cargo.lock;
nativeBuildInputs = [ makeWrapper ];
buildInputs = lib.optional stdenvNoCC.isDarwin Security;
postInstall = ''
wrapProgram "$out/bin/prefetch-npm-deps" --prefix PATH : ${lib.makeBinPath [ gnutar gzip ]}
'';
passthru.tests =
let
makeTestSrc = { name, src }: stdenvNoCC.mkDerivation {
@ -46,7 +51,7 @@
hash = "sha256-uQmc+S+V1co1Rfc4d82PpeXjmd1UqdsG492ADQFcZGA=";
};
hash = "sha256-fk7L9vn8EHJsGJNMAjYZg9h0PT6dAwiahdiEeXVrMB8=";
hash = "sha256-wca1QvxUw3OrLStfYN9Co6oVBR1LbfcNUKlDqvObps4=";
};
lockfileV2 = makeTest {
@ -57,7 +62,7 @@
hash = "sha256-qS29tq5QPnGxV+PU40VgMAtdwVLtLyyhG2z9GMeYtC4=";
};
hash = "sha256-s8SpZY/1tKZVd3vt7sA9vsqHvEaNORQBMrSyhWpj048=";
hash = "sha256-tuEfyePwlOy2/mOPdXbqJskO6IowvAP4DWg8xSZwbJw=";
};
hashPrecedence = makeTest {
@ -68,7 +73,7 @@
hash = "sha256-1+0AQw9EmbHiMPA/H8OP8XenhrkhLRYBRhmd1cNPFjk=";
};
hash = "sha256-KRxwrEij3bpZ5hbQhX67KYpnY2cRS7u2EVZIWO1FBPM=";
hash = "sha256-oItUls7AXcCECuyA+crQO6B0kv4toIr8pBubNwB7kAM=";
};
hostedGitDeps = makeTest {
@ -79,7 +84,30 @@
hash = "sha256-X9mCwPqV5yP0S2GonNvpYnLSLJMd/SUIked+hMRxDpA=";
};
hash = "sha256-oIM05TGHstX1D4k2K4TJ+SHB7H/tNKzxzssqf0GJwvY=";
hash = "sha256-5Mg7KDJLMM5e/7BCHGinGAnBRft2ySQzvKW06p3u/0o=";
};
linkDependencies = makeTest {
name = "link-dependencies";
src = fetchurl {
url = "https://raw.githubusercontent.com/evcc-io/evcc/0.106.3/package-lock.json";
hash = "sha256-6ZTBMyuyPP/63gpQugggHhKVup6OB4hZ2rmSvPJ0yEs=";
};
hash = "sha256-VzQhArHoznYSXUT7l9HkJV4yoSOmoP8eYTLel1QwmB4=";
};
# This package contains both hosted Git shorthand, and a bundled dependency that happens to override an existing one.
etherpadLite1818 = makeTest {
name = "etherpad-lite-1.8.18";
src = fetchurl {
url = "https://raw.githubusercontent.com/ether/etherpad-lite/1.8.18/src/package-lock.json";
hash = "sha256-1fGNxYJi1I4cXK/jinNG+Y6tPEOhP3QAqWOBEQttS9E=";
};
hash = "sha256-8xF8F74nHwL9KPN2QLsxnfvsk0rNCKOZniYJQCD5u/I=";
};
};

View file

@ -109,7 +109,7 @@ impl Cache {
let mut file = File::options().append(true).create(true).open(index_path)?;
write!(file, "\n{:x}\t{data}", Sha1::new().chain(&data).finalize())?;
write!(file, "{:x}\t{data}", Sha1::new().chain(&data).finalize())?;
Ok(())
}

View file

@ -1,19 +1,22 @@
#![warn(clippy::pedantic)]
use crate::cacache::Cache;
use anyhow::anyhow;
use anyhow::{anyhow, Context};
use rayon::prelude::*;
use serde::Deserialize;
use serde_json::{Map, Value};
use std::{
collections::HashMap,
env, fs,
collections::{HashMap, HashSet},
env, fmt, fs, io,
path::Path,
process::{self, Command},
process::{self, Command, Stdio},
};
use tempfile::tempdir;
use url::Url;
mod cacache;
#[cfg(test)]
mod tests;
#[derive(Deserialize)]
struct PackageLock {
@ -25,38 +28,93 @@ struct PackageLock {
#[derive(Deserialize)]
struct OldPackage {
version: String,
resolved: Option<String>,
version: UrlOrString,
#[serde(default)]
bundled: bool,
resolved: Option<UrlOrString>,
integrity: Option<String>,
dependencies: Option<HashMap<String, OldPackage>>,
}
#[derive(Deserialize)]
#[derive(Debug, Deserialize, PartialEq, Eq)]
struct Package {
resolved: Option<Url>,
resolved: Option<UrlOrString>,
integrity: Option<String>,
}
#[derive(Debug, Deserialize, PartialEq, Eq)]
#[serde(untagged)]
enum UrlOrString {
Url(Url),
String(String),
}
impl fmt::Display for UrlOrString {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
UrlOrString::Url(url) => url.fmt(f),
UrlOrString::String(string) => string.fmt(f),
}
}
}
#[allow(clippy::case_sensitive_file_extension_comparisons)]
fn to_new_packages(
old_packages: HashMap<String, OldPackage>,
initial_url: &Url,
) -> anyhow::Result<HashMap<String, Package>> {
let mut new = HashMap::new();
for (name, package) in old_packages {
for (name, mut package) in old_packages {
// In some cases, a bundled dependency happens to have the same version as a non-bundled one, causing
// the bundled one without a URL to override the entry for the non-bundled instance, which prevents the
// dependency from being downloaded.
if package.bundled {
continue;
}
if let UrlOrString::Url(v) = &package.version {
for (scheme, host) in [
("github", "github.com"),
("bitbucket", "bitbucket.org"),
("gitlab", "gitlab.com"),
] {
if v.scheme() == scheme {
package.version = {
let mut new_url = initial_url.clone();
new_url.set_host(Some(host))?;
if v.path().ends_with(".git") {
new_url.set_path(v.path());
} else {
new_url.set_path(&format!("{}.git", v.path()));
}
new_url.set_fragment(v.fragment());
UrlOrString::Url(new_url)
};
break;
}
}
}
new.insert(
format!("{name}-{}", package.version),
Package {
resolved: if let Ok(url) = Url::parse(&package.version) {
Some(url)
resolved: if matches!(package.version, UrlOrString::Url(_)) {
Some(package.version)
} else {
package.resolved.as_deref().map(Url::parse).transpose()?
package.resolved
},
integrity: package.integrity,
},
);
if let Some(dependencies) = package.dependencies {
new.extend(to_new_packages(dependencies)?);
new.extend(to_new_packages(dependencies, initial_url)?);
}
}
@ -184,6 +242,59 @@ fn get_ideal_hash(integrity: &str) -> anyhow::Result<&str> {
}
}
fn get_initial_url() -> anyhow::Result<Url> {
Url::parse("git+ssh://git@a.b").context("initial url should be valid")
}
/// `fixup_lockfile` removes the `integrity` field from Git dependencies.
///
/// Git dependencies from specific providers can be retrieved from those providers' automatic tarball features.
/// When these dependencies are specified with a commit identifier, npm generates a tarball, and inserts the integrity hash of that
/// tarball into the lockfile.
///
/// Thus, we remove this hash, to replace it with our own determinstic copies of dependencies from hosted Git providers.
fn fixup_lockfile(mut lock: Map<String, Value>) -> anyhow::Result<Option<Map<String, Value>>> {
if lock
.get("lockfileVersion")
.ok_or_else(|| anyhow!("couldn't get lockfile version"))?
.as_i64()
.ok_or_else(|| anyhow!("lockfile version isn't an int"))?
< 2
{
return Ok(None);
}
let mut fixed = false;
for package in lock
.get_mut("packages")
.ok_or_else(|| anyhow!("couldn't get packages"))?
.as_object_mut()
.ok_or_else(|| anyhow!("packages isn't a map"))?
.values_mut()
{
if let Some(Value::String(resolved)) = package.get("resolved") {
if resolved.starts_with("git+ssh://") && package.get("integrity").is_some() {
fixed = true;
package
.as_object_mut()
.ok_or_else(|| anyhow!("package isn't a map"))?
.remove("integrity");
}
}
}
if fixed {
lock.remove("dependencies");
Ok(Some(lock))
} else {
Ok(None)
}
}
#[allow(clippy::too_many_lines)]
fn main() -> anyhow::Result<()> {
let args = env::args().collect::<Vec<_>>();
@ -195,6 +306,18 @@ fn main() -> anyhow::Result<()> {
process::exit(1);
}
if args[1] == "--fixup-lockfile" {
let lock = serde_json::from_str(&fs::read_to_string(&args[2])?)?;
if let Some(fixed) = fixup_lockfile(lock)? {
println!("Fixing lockfile");
fs::write(&args[2], serde_json::to_string(&fixed)?)?;
}
return Ok(());
}
let lock_content = fs::read_to_string(&args[1])?;
let lock: PackageLock = serde_json::from_str(&lock_content)?;
@ -213,7 +336,13 @@ fn main() -> anyhow::Result<()> {
eprintln!("lockfile version: {}", lock.version);
let packages = match lock.version {
1 => lock.dependencies.map(to_new_packages).transpose()?,
1 => {
let initial_url = get_initial_url()?;
lock.dependencies
.map(|p| to_new_packages(p, &initial_url))
.transpose()?
}
2 | 3 => lock.packages,
_ => panic!(
"We don't support lockfile version {}, please file an issue.",
@ -225,31 +354,90 @@ fn main() -> anyhow::Result<()> {
return Ok(());
}
let packages = {
let mut seen = HashSet::new();
let mut new_packages = HashMap::new();
for (dep, package) in packages.unwrap().drain() {
if let (false, Some(UrlOrString::Url(resolved))) = (dep.is_empty(), &package.resolved) {
if !seen.contains(resolved) {
seen.insert(resolved.clone());
new_packages.insert(dep, package);
}
}
}
new_packages
};
let cache = Cache::new(out.join("_cacache"));
packages
.unwrap()
.into_par_iter()
.try_for_each(|(dep, package)| {
if dep.is_empty() || package.resolved.is_none() {
return Ok::<_, anyhow::Error>(());
}
.try_for_each(|(dep, mut package)| {
eprintln!("{dep}");
let mut resolved = package.resolved.unwrap();
let mut resolved = match package.resolved {
Some(UrlOrString::Url(url)) => url,
_ => unreachable!(),
};
let mut hosted = false;
if let Some(hosted_git_url) = get_hosted_git_url(&resolved) {
resolved = hosted_git_url;
package.integrity = None;
hosted = true;
}
let mut data = Vec::new();
agent
.get(resolved.as_str())
.call()?
.into_reader()
.read_to_end(&mut data)?;
let mut body = agent.get(resolved.as_str()).call()?.into_reader();
if hosted {
let workdir = tempdir()?;
let tar_path = workdir.path().join("package");
fs::create_dir(&tar_path)?;
let mut cmd = Command::new("tar")
.args(["--extract", "--gzip", "--strip-components=1", "-C"])
.arg(&tar_path)
.stdin(Stdio::piped())
.spawn()?;
io::copy(&mut body, &mut cmd.stdin.take().unwrap())?;
let exit = cmd.wait()?;
if !exit.success() {
return Err(anyhow!(
"failed to extract tarball for {dep}: tar exited with status code {}",
exit.code().unwrap()
));
}
data = Command::new("tar")
.args([
"--sort=name",
"--mtime=0",
"--owner=0",
"--group=0",
"--numeric-owner",
"--format=gnu",
"-I",
"gzip -n -9",
"--create",
"-C",
])
.arg(workdir.path())
.arg("package")
.output()?
.stdout;
} else {
body.read_to_end(&mut data)?;
}
cache
.put(
@ -263,7 +451,7 @@ fn main() -> anyhow::Result<()> {
)
.map_err(|e| anyhow!("couldn't insert cache entry for {dep}: {e:?}"))?;
Ok(())
Ok::<_, anyhow::Error>(())
})?;
fs::write(out.join("package-lock.json"), lock_content)?;
@ -277,58 +465,3 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
#[cfg(test)]
mod tests {
use super::{get_hosted_git_url, get_ideal_hash};
use url::Url;
#[test]
fn hosted_git_urls() {
for (input, expected) in [
(
"git+ssh://git@github.com/castlabs/electron-releases.git#fc5f78d046e8d7cdeb66345a2633c383ab41f525",
Some("https://codeload.github.com/castlabs/electron-releases/tar.gz/fc5f78d046e8d7cdeb66345a2633c383ab41f525"),
),
(
"https://user@github.com/foo/bar#fix/bug",
Some("https://codeload.github.com/foo/bar/tar.gz/fix/bug")
),
(
"https://github.com/eligrey/classList.js/archive/1.2.20180112.tar.gz",
None
),
(
"git+ssh://bitbucket.org/foo/bar#branch",
Some("https://bitbucket.org/foo/bar/get/branch.tar.gz")
),
(
"ssh://git@gitlab.com/foo/bar.git#fix/bug",
Some("https://gitlab.com/foo/bar/repository/archive.tar.gz?ref=fix/bug")
),
(
"git+ssh://git.sr.ht/~foo/bar#branch",
Some("https://git.sr.ht/~foo/bar/archive/branch.tar.gz")
),
] {
assert_eq!(
get_hosted_git_url(&Url::parse(input).unwrap()),
expected.map(|u| Url::parse(u).unwrap())
);
}
}
#[test]
fn ideal_hashes() {
for (input, expected) in [
("sha512-foo sha1-bar", Some("sha512-foo")),
("sha1-bar md5-foo", Some("sha1-bar")),
("sha1-bar", Some("sha1-bar")),
("sha512-foo", Some("sha512-foo")),
("foo-bar sha1-bar", Some("sha1-bar")),
("foo-bar baz-foo", None),
] {
assert_eq!(get_ideal_hash(input).ok(), expected);
}
}
}

View file

@ -0,0 +1,141 @@
use super::{
fixup_lockfile, get_hosted_git_url, get_ideal_hash, get_initial_url, to_new_packages,
OldPackage, Package, UrlOrString,
};
use serde_json::json;
use std::collections::HashMap;
use url::Url;
#[test]
fn hosted_git_urls() {
for (input, expected) in [
(
"git+ssh://git@github.com/castlabs/electron-releases.git#fc5f78d046e8d7cdeb66345a2633c383ab41f525",
Some("https://codeload.github.com/castlabs/electron-releases/tar.gz/fc5f78d046e8d7cdeb66345a2633c383ab41f525"),
),
(
"https://user@github.com/foo/bar#fix/bug",
Some("https://codeload.github.com/foo/bar/tar.gz/fix/bug")
),
(
"https://github.com/eligrey/classList.js/archive/1.2.20180112.tar.gz",
None
),
(
"git+ssh://bitbucket.org/foo/bar#branch",
Some("https://bitbucket.org/foo/bar/get/branch.tar.gz")
),
(
"ssh://git@gitlab.com/foo/bar.git#fix/bug",
Some("https://gitlab.com/foo/bar/repository/archive.tar.gz?ref=fix/bug")
),
(
"git+ssh://git.sr.ht/~foo/bar#branch",
Some("https://git.sr.ht/~foo/bar/archive/branch.tar.gz")
),
] {
assert_eq!(
get_hosted_git_url(&Url::parse(input).unwrap()),
expected.map(|u| Url::parse(u).unwrap())
);
}
}
#[test]
fn ideal_hashes() {
for (input, expected) in [
("sha512-foo sha1-bar", Some("sha512-foo")),
("sha1-bar md5-foo", Some("sha1-bar")),
("sha1-bar", Some("sha1-bar")),
("sha512-foo", Some("sha512-foo")),
("foo-bar sha1-bar", Some("sha1-bar")),
("foo-bar baz-foo", None),
] {
assert_eq!(get_ideal_hash(input).ok(), expected);
}
}
#[test]
fn git_shorthand_v1() -> anyhow::Result<()> {
let old = {
let mut o = HashMap::new();
o.insert(
String::from("sqlite3"),
OldPackage {
version: UrlOrString::Url(
Url::parse(
"github:mapbox/node-sqlite3#593c9d498be2510d286349134537e3bf89401c4a",
)
.unwrap(),
),
bundled: false,
resolved: None,
integrity: None,
dependencies: None,
},
);
o
};
let initial_url = get_initial_url()?;
let new = to_new_packages(old, &initial_url)?;
assert_eq!(new.len(), 1, "new packages map should contain 1 value");
assert_eq!(new.into_values().next().unwrap(), Package {
resolved: Some(UrlOrString::Url(Url::parse("git+ssh://git@github.com/mapbox/node-sqlite3.git#593c9d498be2510d286349134537e3bf89401c4a").unwrap())),
integrity: None
});
Ok(())
}
#[test]
fn lockfile_fixup() -> anyhow::Result<()> {
let input = json!({
"lockfileVersion": 2,
"name": "foo",
"packages": {
"": {
},
"foo": {
"resolved": "https://github.com/NixOS/nixpkgs",
"integrity": "aaa"
},
"bar": {
"resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
"integrity": "bbb"
}
}
});
let expected = json!({
"lockfileVersion": 2,
"name": "foo",
"packages": {
"": {
},
"foo": {
"resolved": "https://github.com/NixOS/nixpkgs",
"integrity": "aaa"
},
"bar": {
"resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
}
}
});
assert_eq!(
fixup_lockfile(input.as_object().unwrap().clone())?,
Some(expected.as_object().unwrap().clone())
);
assert_eq!(
fixup_lockfile(json!({"lockfileVersion": 1}).as_object().unwrap().clone())?,
None
);
Ok(())
}

View file

@ -5,6 +5,7 @@ path: runCommand "closure-paths"
{
exportReferencesGraph.graph = path;
__structuredAttrs = true;
preferLocalBuild = true;
PATH = "${coreutils}/bin:${python3}/bin";
builder = builtins.toFile "builder"
''

View file

@ -1,8 +1,60 @@
{ pkgs, lib, callPackage, runCommand, stdenv }:
{ pkgs, buildPackages, lib, callPackage, runCommand, stdenv, substituteAll, }:
# Documentation is in doc/builders/testers.chapter.md
{
# See https://nixos.org/manual/nixpkgs/unstable/#tester-testBuildFailure
# or doc/builders/testers.chapter.md
testBuildFailure = drv: drv.overrideAttrs (orig: {
builder = buildPackages.bash;
args = [
(substituteAll { coreutils = buildPackages.coreutils; src = ./expect-failure.sh; })
orig.realBuilder or stdenv.shell
] ++ orig.args or ["-e" (orig.builder or ../../stdenv/generic/default-builder.sh)];
});
# See https://nixos.org/manual/nixpkgs/unstable/#tester-testEqualDerivation
# or doc/builders/testers.chapter.md
testEqualDerivation = callPackage ./test-equal-derivation.nix { };
# See https://nixos.org/manual/nixpkgs/unstable/#tester-testEqualContents
# or doc/builders/testers.chapter.md
testEqualContents = {
assertion,
actual,
expected,
}: runCommand "equal-contents-${lib.strings.toLower assertion}" {
inherit assertion actual expected;
} ''
echo "Checking:"
echo "$assertion"
if ! diff -U5 -r "$actual" "$expected" --color=always
then
echo
echo 'Contents must be equal, but were not!'
echo
echo "+: expected, at $expected"
echo "-: unexpected, at $actual"
exit 1
else
find "$expected" -type f -executable > expected-executables | sort
find "$actual" -type f -executable > actual-executables | sort
if ! diff -U0 actual-executables expected-executables --color=always
then
echo
echo "Contents must be equal, but some files' executable bits don't match"
echo
echo "+: make this file executable in the actual contents"
echo "-: make this file non-executable in the actual contents"
exit 1
else
echo "expected $expected and actual $actual match."
echo 'OK'
touch $out
fi
fi
'';
# See https://nixos.org/manual/nixpkgs/unstable/#tester-testVersion
# or doc/builders/testers.chapter.md
testVersion =
{ package,
command ? "${package.meta.mainProgram or package.pname or package.name} --version",

View file

@ -0,0 +1,62 @@
# Run a builder, flip exit code, save log and fix outputs
#
# Sub-goals:
# - Delegate to another original builder passed via args
# - Save the build log to output for further checks
# - Make the derivation succeed if the original builder fails
# - Make the derivation fail if the original builder returns exit code 0
#
# Requirements:
# This runs before, without and after stdenv. Do not modify the environment;
# especially not before invoking the original builder. For example, use
# "@" substitutions instead of PATH.
# Do not export any variables.
# Stricter bash
set -eu
# ------------------------
# Run the original builder
echo "testBuildFailure: Expecting non-zero exit from builder and args: ${*@Q}"
("$@" 2>&1) | @coreutils@/bin/tee $TMPDIR/testBuildFailure.log \
| while read ln; do
echo "original builder: $ln"
done
r=${PIPESTATUS[0]}
if [[ $r = 0 ]]; then
echo "testBuildFailure: The builder did not fail, but a failure was expected!"
exit 1
fi
echo "testBuildFailure: Original builder produced exit code: $r"
# -----------------------------------------
# Write the build log to the default output
outs=( $outputs )
defOut=${outs[0]}
defOutPath=${!defOut}
if [[ ! -d $defOutPath ]]; then
if [[ -e $defOutPath ]]; then
@coreutils@/bin/mv $defOutPath $TMPDIR/out-node
@coreutils@/bin/mkdir $defOutPath
@coreutils@/bin/mv $TMPDIR/out-node $defOutPath/result
fi
fi
@coreutils@/bin/mkdir -p $defOutPath
@coreutils@/bin/mv $TMPDIR/testBuildFailure.log $defOutPath/testBuildFailure.log
echo $r >$defOutPath/testBuildFailure.exit
# ------------------------------------------------------
# Put empty directories in place for any missing outputs
for outputName in ${outputs:-out}; do
outputPath="${!outputName}"
if [[ ! -e "${outputPath}" ]]; then
@coreutils@/bin/mkdir "${outputPath}";
fi
done

View file

@ -1,4 +1,4 @@
{ testers, lib, pkgs, ... }:
{ testers, lib, pkgs, hello, runCommand, ... }:
let
pkgs-with-overlay = pkgs.extend(final: prev: {
proof-of-overlay-hello = prev.hello;
@ -24,4 +24,167 @@ lib.recurseIntoAttrs {
machine.succeed("hello | figlet >/dev/console")
'';
});
testBuildFailure = lib.recurseIntoAttrs {
happy = runCommand "testBuildFailure-happy" {
failed = testers.testBuildFailure (runCommand "fail" {} ''
echo ok-ish >$out
echo failing though
echo also stderr 1>&2
exit 3
'');
} ''
grep -F 'failing though' $failed/testBuildFailure.log
grep -F 'also stderr' $failed/testBuildFailure.log
grep -F 'ok-ish' $failed/result
[[ 3 = $(cat $failed/testBuildFailure.exit) ]]
touch $out
'';
helloDoesNotFail = runCommand "testBuildFailure-helloDoesNotFail" {
failed = testers.testBuildFailure (testers.testBuildFailure hello);
# Add hello itself as a prerequisite, so we don't try to run this test if
# there's an actual failure in hello.
inherit hello;
} ''
echo "Checking $failed/testBuildFailure.log"
grep -F 'testBuildFailure: The builder did not fail, but a failure was expected' $failed/testBuildFailure.log
[[ 1 = $(cat $failed/testBuildFailure.exit) ]]
touch $out
'';
multiOutput = runCommand "testBuildFailure-multiOutput" {
failed = testers.testBuildFailure (runCommand "fail" {
# dev will be the default output
outputs = ["dev" "doc" "out"];
} ''
echo i am failing
exit 1
'');
} ''
grep -F 'i am failing' $failed/testBuildFailure.log >/dev/null
[[ 1 = $(cat $failed/testBuildFailure.exit) ]]
# Checking our note that dev is the default output
echo $failed/_ | grep -- '-dev/_' >/dev/null
echo 'All good.'
touch $out
'';
};
testEqualContents = lib.recurseIntoAttrs {
happy = testers.testEqualContents {
assertion = "The same directory contents at different paths are recognized as equal";
expected = runCommand "expected" {} ''
mkdir -p $out/c
echo a >$out/a
echo b >$out/b
echo d >$out/c/d
'';
actual = runCommand "actual" {} ''
mkdir -p $out/c
echo a >$out/a
echo b >$out/b
echo d >$out/c/d
'';
};
unequalExe =
runCommand "testEqualContents-unequalExe" {
log = testers.testBuildFailure (testers.testEqualContents {
assertion = "The same directory contents at different paths are recognized as equal";
expected = runCommand "expected" {} ''
mkdir -p $out/c
echo a >$out/a
chmod a+x $out/a
echo b >$out/b
echo d >$out/c/d
'';
actual = runCommand "actual" {} ''
mkdir -p $out/c
echo a >$out/a
echo b >$out/b
chmod a+x $out/b
echo d >$out/c/d
'';
});
} ''
(
set -x
grep -F -- "executable bits don't match" $log/testBuildFailure.log
grep -E -- '+.*-actual/a' $log/testBuildFailure.log
grep -E -- '-.*-actual/b' $log/testBuildFailure.log
grep -F -- "--- actual-executables" $log/testBuildFailure.log
grep -F -- "+++ expected-executables" $log/testBuildFailure.log
) || {
echo "Test failed: could not find pattern in build log $log"
exit 1
}
echo 'All good.'
touch $out
'';
fileDiff =
runCommand "testEqualContents-fileDiff" {
log = testers.testBuildFailure (testers.testEqualContents {
assertion = "The same directory contents at different paths are recognized as equal";
expected = runCommand "expected" {} ''
mkdir -p $out/c
echo a >$out/a
echo b >$out/b
echo d >$out/c/d
'';
actual = runCommand "actual" {} ''
mkdir -p $out/c
echo a >$out/a
echo B >$out/b
echo d >$out/c/d
'';
});
} ''
(
set -x
grep -F -- "Contents must be equal but were not" $log/testBuildFailure.log
grep -E -- '+++ .*-actual/b' $log/testBuildFailure.log
grep -E -- '--- .*-actual/b' $log/testBuildFailure.log
grep -F -- "-B" $log/testBuildFailure.log
grep -F -- "+b" $log/testBuildFailure.log
) || {
echo "Test failed: could not find pattern in build log $log"
exit 1
}
echo 'All good.'
touch $out
'';
fileMissing =
runCommand "testEqualContents-fileMissing" {
log = testers.testBuildFailure (testers.testEqualContents {
assertion = "The same directory contents at different paths are recognized as equal";
expected = runCommand "expected" {} ''
mkdir -p $out/c
echo a >$out/a
echo b >$out/b
echo d >$out/c/d
'';
actual = runCommand "actual" {} ''
mkdir -p $out/c
echo a >$out/a
echo d >$out/c/d
'';
});
} ''
(
set -x
grep -F -- "Contents must be equal but were not" $log/testBuildFailure.log
grep -E -- 'Only in .*-expected: b' $log/testBuildFailure.log
) || {
echo "Test failed: could not find pattern in build log $log"
exit 1
}
echo 'All good.'
touch $out
'';
};
}

View file

@ -1,22 +1,27 @@
{ lib, fetchFromGitHub }:
let
version = "1.008";
fetchFromGitHub rec {
pname = "b612";
in fetchFromGitHub {
name = "${pname}-font-${version}";
version = "1.008";
owner = "polarsys";
repo = "b612";
rev = version;
postFetch = ''
tar xf $downloadedFile --strip=1
mkdir -p $out/share/fonts/truetype/${pname}
cp fonts/ttf/*.ttf $out/share/fonts/truetype/${pname}
mkdir -p $out/share/fonts/truetype
mv $out/fonts/ttf/*.ttf $out/share/fonts/truetype
shopt -s extglob dotglob
rm -rf $out/!(share)
shopt -u extglob dotglob
'';
sha256 = "0r3lana1q9w3siv8czb3p9rrb5d9svp628yfbvvmnj7qvjrmfsiq";
hash = "sha256-aJ3XzWQauPsWwEDAHT2rD9a8RvLv1kqU3krFXprmypk=";
meta = with lib; {
homepage = "http://b612-font.com/";
homepage = "https://b612-font.com/";
description = "Highly legible font family for use on aircraft cockpit screens";
longDescription = ''
B612 is the result of a research project initiated by Airbus. The font

View file

@ -1,21 +1,25 @@
{ lib, fetchFromGitHub }:
let
fetchFromGitHub rec {
pname = "montserrat";
version = "7.222";
in fetchFromGitHub {
name = "${pname}-${version}";
owner = "JulietaUla";
repo = pname;
rev = "v${version}";
sha256 = "sha256-MeNnc1e5X5f0JyaLY6fX22rytHkvL++eM2ygsdlGMv0=";
postFetch = ''
tar xf $downloadedFile --strip 1
install -Dm 444 fonts/otf/*.otf -t $out/share/fonts/otf
install -Dm 444 fonts/ttf/*.ttf -t $out/share/fonts/ttf
install -Dm 444 fonts/webfonts/*.woff -t $out/share/fonts/woff
install -Dm 444 fonts/webfonts/*.woff2 -t $out/share/fonts/woff2
mkdir -p $out/share/fonts/{otf,ttf,woff,woff2}
mv $out/fonts/otf/*.otf $out/share/fonts/otf
mv $out/fonts/ttf/*.ttf $out/share/fonts/ttf
mv $out/fonts/webfonts/*.woff $out/share/fonts/woff
mv $out/fonts/webfonts/*.woff2 $out/share/fonts/woff2
shopt -s extglob dotglob
rm -rf $out/!(share)
shopt -u extglob dotglob
'';
meta = with lib; {

View file

@ -71,7 +71,7 @@ let
owner = "googlefonts";
repo = "noto-cjk";
inherit rev sha256;
sparseCheckout = "${typeface}/Variable/OTC";
sparseCheckout = [ "${typeface}/Variable/OTC" ];
};
installPhase = ''

View file

@ -18,9 +18,9 @@ stdenvNoCC.mkDerivation rec {
owner = "gvolpe";
repo = pname;
rev = version;
sparseCheckout = ''
BeautyLine-V3
'';
sparseCheckout = [
"BeautyLine-V3"
];
sha256 = "sha256-IkkypAj250+OXbf19TampCnqYsSbJVIjeYlxJoyhpzk=";
};

View file

@ -12,7 +12,7 @@ stdenvNoCC.mkDerivation rec {
repo = "mod-dh";
rev = "e846a5bd24d59ed15ba70b3a9d5363a38ca51d09";
sha256 = "sha256-RFOpN+tIMfakb7AZN0ock9eq2mytvL0DWedvQV67+ks=";
sparseCheckout = "console";
sparseCheckout = [ "console" ];
};
phases = [ "unpackPhase" "installPhase" ];

View file

@ -1,207 +0,0 @@
{ lib, stdenv, fetchpatch, fetchurl, fetchzip
# build tools
, gfortran, m4, makeWrapper, patchelf, perl, which, python3
, cmake
# libjulia dependencies
, libunwind, readline, utf8proc, zlib
# standard library dependencies
, curl, fftwSinglePrec, fftw, gmp, libgit2, mpfr, openlibm, openspecfun, pcre2
# linear algebra
, blas, lapack, arpack
# Darwin frameworks
, CoreServices, ApplicationServices
}:
let
majorVersion = "1";
minorVersion = "0";
maintenanceVersion = "4";
src_sha256 = "1dfx68wbrrzpbh74rla7i2g3r5z6wa1pxq3ahyfm5m27vfyjbkhg";
libuvVersion = "ed3700c849289ed01fe04273a7bf865340b2bd7e";
libuvSha256 = "137w666zsjw1p0ma3lf94d75hr1q45sgkfmbizkyji2qm57cnxjs";
dsfmtVersion = "2.2.3";
dsfmt = fetchurl {
url = "http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/SFMT/dSFMT-src-${dsfmtVersion}.tar.gz";
sha256 = "03kaqbjbi6viz0n33dk5jlf6ayxqlsq4804n7kwkndiga9s4hd42";
};
libuv = fetchurl {
url = "https://api.github.com/repos/JuliaLang/libuv/tarball/${libuvVersion}";
sha256 = libuvSha256;
};
rmathVersion = "0.1";
rmath-julia = fetchurl {
url = "https://api.github.com/repos/JuliaLang/Rmath-julia/tarball/v${rmathVersion}";
sha256 = "1qyps217175qhid46l8f5i1v8i82slgp23ia63x2hzxwfmx8617p";
};
virtualenvVersion = "15.0.0";
virtualenv = fetchurl {
url = "mirror://pypi/v/virtualenv/virtualenv-${virtualenvVersion}.tar.gz";
sha256 = "06fw4liazpx5vf3am45q2pdiwrv0id7ckv7n6zmpml29x6vkzmkh";
};
libwhichVersion = "81e9723c0273d78493dc8c8ed570f68d9ce7e89e";
libwhich = fetchurl {
url = "https://api.github.com/repos/vtjnash/libwhich/tarball/${libwhichVersion}";
sha256 = "1p7zg31kpmpbmh1znrk1xrbd074agx13b9q4dcw8n2zrwwdlbz3b";
};
llvmVersion = "6.0.0";
llvm = fetchurl {
url = "http://releases.llvm.org/6.0.0/llvm-${llvmVersion}.src.tar.xz";
sha256 = "0224xvfg6h40y5lrbnb9qaq3grmdc5rg00xq03s1wxjfbf8krx8z";
};
suitesparseVersion = "4.4.5";
suitesparse = fetchurl {
url = "http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-${suitesparseVersion}.tar.gz";
sha256 = "1jcbxb8jx5wlcixzf6n5dca2rcfx6mlcms1k2rl5gp67ay3bix43";
};
version = "${majorVersion}.${minorVersion}.${maintenanceVersion}";
in
stdenv.mkDerivation rec {
pname = "julia";
inherit version;
src = fetchzip {
url = "https://github.com/JuliaLang/${pname}/releases/download/v${version}/${pname}-${version}.tar.gz";
sha256 = src_sha256;
};
nativeBuildInputs = [ cmake curl gfortran m4 makeWrapper patchelf perl python3 which ];
# cmake is only used to build the bundled deps
dontUseCmakeConfigure = true;
# We assert that compatible blas and lapack are used.
buildInputs = assert (blas.isILP64 == lapack.isILP64); [
arpack fftw fftwSinglePrec gmp libgit2 libunwind mpfr
pcre2.dev blas lapack openlibm openspecfun readline utf8proc
zlib
]
++ lib.optionals stdenv.isDarwin [CoreServices ApplicationServices]
;
patches = [
./patches/1.0/use-system-utf8proc-julia-1.0.patch
];
postPatch = ''
patchShebangs . contrib
for i in backtrace cmdlineargs; do
mv test/$i.jl{,.off}
touch test/$i.jl
done
rm stdlib/Sockets/test/runtests.jl && touch stdlib/Sockets/test/runtests.jl
rm stdlib/Distributed/test/runtests.jl && touch stdlib/Distributed/test/runtests.jl
sed -e 's/Invalid Content-Type:/invalid Content-Type:/g' -i ./stdlib/LibGit2/test/libgit2.jl
sed -e 's/Failed to resolve /failed to resolve /g' -i ./stdlib/LibGit2/test/libgit2.jl
'';
prePatch = ''
mkdir deps/srccache
cp "${dsfmt}" "./deps/srccache/dsfmt-${dsfmtVersion}.tar.gz"
cp "${rmath-julia}" "./deps/srccache/Rmath-julia-${rmathVersion}.tar.gz"
cp "${libuv}" "./deps/srccache/libuv-${libuvVersion}.tar.gz"
cp "${virtualenv}" "./deps/srccache/virtualenv-${virtualenvVersion}.tar.gz"
cp "${libwhich}" "./deps/srccache/libwhich-${libwhichVersion}.tar.gz"
cp "${llvm}" "./deps/srccache/llvm-${llvmVersion}.src.tar.xz"
cp "${suitesparse}" "./deps/srccache/SuiteSparse-${suitesparseVersion}.tar.gz"
'';
makeFlags =
let
arch = lib.head (lib.splitString "-" stdenv.system);
march = {
x86_64 = stdenv.hostPlatform.gcc.arch or "x86-64";
i686 = "pentium4";
aarch64 = "armv8-a";
}.${arch}
or (throw "unsupported architecture: ${arch}");
# Julia requires Pentium 4 (SSE2) or better
cpuTarget = { x86_64 = "x86-64"; i686 = "pentium4"; aarch64 = "generic"; }.${arch}
or (throw "unsupported architecture: ${arch}");
in [
"ARCH=${arch}"
"MARCH=${march}"
"JULIA_CPU_TARGET=${cpuTarget}"
"PREFIX=$(out)"
"prefix=$(out)"
"SHELL=${stdenv.shell}"
(lib.optionalString (!stdenv.isDarwin) "USE_SYSTEM_BLAS=1")
"USE_BLAS64=${if blas.isILP64 then "1" else "0"}"
"USE_SYSTEM_LAPACK=1"
"USE_SYSTEM_ARPACK=1"
"USE_SYSTEM_FFTW=1"
"USE_SYSTEM_GMP=1"
"USE_SYSTEM_LIBGIT2=1"
"USE_SYSTEM_LIBUNWIND=1"
# We will probably never do that
#"USE_SYSTEM_LLVM=1"
"LLVM_VER=6.0.0"
"USE_SYSTEM_MPFR=1"
"USE_SYSTEM_OPENLIBM=1"
"USE_SYSTEM_OPENSPECFUN=1"
"USE_SYSTEM_PATCHELF=1"
"USE_SYSTEM_PCRE=1"
"PCRE_CONFIG=${pcre2.dev}/bin/pcre2-config"
"PCRE_INCL_PATH=${pcre2.dev}/include/pcre2.h"
"USE_SYSTEM_READLINE=1"
"USE_SYSTEM_UTF8PROC=1"
"USE_SYSTEM_ZLIB=1"
];
LD_LIBRARY_PATH = assert (blas.isILP64 == lapack.isILP64); (lib.makeLibraryPath [
arpack fftw fftwSinglePrec gmp libgit2 mpfr blas lapack openlibm
openspecfun pcre2
]);
doCheck = !stdenv.isDarwin;
checkTarget = "testall";
# Julia's tests require read/write access to $HOME
preCheck = ''
export HOME="$NIX_BUILD_TOP"
'';
preBuild = ''
sed -e '/^install:/s@[^ ]*/doc/[^ ]*@@' -i Makefile
sed -e '/[$](DESTDIR)[$](docdir)/d' -i Makefile
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
'';
enableParallelBuilding = true;
postInstall = ''
# Symlink shared libraries from LD_LIBRARY_PATH into lib/julia,
# as using a wrapper with LD_LIBRARY_PATH causes segmentation
# faults when program returns an error:
# $ julia -e 'throw(Error())'
find $(echo $LD_LIBRARY_PATH | sed 's|:| |g') -maxdepth 1 -name '*.${if stdenv.isDarwin then "dylib" else "so"}*' | while read lib; do
if [[ ! -e $out/lib/julia/$(basename $lib) ]]; then
ln -sv $lib $out/lib/julia/$(basename $lib)
fi
done
'';
passthru = {
inherit majorVersion minorVersion maintenanceVersion;
site = "share/julia/site/v${majorVersion}.${minorVersion}";
};
meta = {
description = "High-level performance-oriented dynamical language for technical computing";
homepage = "https://julialang.org/";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ raskin rob garrison ];
platforms = [ "i686-linux" "x86_64-linux" "x86_64-darwin" "aarch64-linux" ];
};
}

View file

@ -1,147 +0,0 @@
{ lib, stdenv, fetchzip
# build tools
, gfortran, m4, makeWrapper, patchelf, perl, which, python3, cmake
# libjulia dependencies
, libunwind, readline, utf8proc, zlib
# standard library dependencies
, curl, fftwSinglePrec, fftw, libgit2, mpfr, openlibm, openspecfun, pcre2
# linear algebra
, blas, lapack, arpack
# Darwin frameworks
, CoreServices, ApplicationServices
}:
assert (!blas.isILP64) && (!lapack.isILP64);
with lib;
let
majorVersion = "1";
minorVersion = "5";
maintenanceVersion = "4";
src_sha256 = "1ba1v7hakgj95xvhyff0zcp0574qv6vailjl48wl1f8w5k54lsw2";
version = "${majorVersion}.${minorVersion}.${maintenanceVersion}";
in
stdenv.mkDerivation rec {
pname = "julia";
inherit version;
src = fetchzip {
url = "https://github.com/JuliaLang/julia/releases/download/v${version}/julia-${version}-full.tar.gz";
sha256 = src_sha256;
};
patches = [
./patches/1.5/use-system-utf8proc-julia-1.3.patch
];
postPatch = ''
patchShebangs . contrib
'';
dontUseCmakeConfigure = true;
buildInputs = [
arpack fftw fftwSinglePrec libgit2 libunwind mpfr
pcre2.dev blas lapack openlibm openspecfun readline utf8proc
zlib
] ++ lib.optionals stdenv.isDarwin [CoreServices ApplicationServices];
nativeBuildInputs = [ curl gfortran m4 makeWrapper patchelf perl python3 which cmake ];
makeFlags =
let
arch = head (splitString "-" stdenv.system);
march = {
x86_64 = stdenv.hostPlatform.gcc.arch or "x86-64";
i686 = "pentium4";
aarch64 = "armv8-a";
}.${arch}
or (throw "unsupported architecture: ${arch}");
# Julia requires Pentium 4 (SSE2) or better
cpuTarget = { x86_64 = "x86-64"; i686 = "pentium4"; aarch64 = "generic"; }.${arch}
or (throw "unsupported architecture: ${arch}");
# Julia applies a lot of patches to its dependencies, so for now do not use the system LLVM
# https://github.com/JuliaLang/julia/tree/master/deps/patches
in [
"ARCH=${arch}"
"MARCH=${march}"
"JULIA_CPU_TARGET=${cpuTarget}"
"PREFIX=$(out)"
"prefix=$(out)"
"SHELL=${stdenv.shell}"
(lib.optionalString (!stdenv.isDarwin) "USE_SYSTEM_BLAS=1")
"USE_BLAS64=${if blas.isILP64 then "1" else "0"}"
"USE_SYSTEM_LAPACK=1"
"USE_SYSTEM_ARPACK=1"
"USE_SYSTEM_FFTW=1"
"USE_SYSTEM_GMP=0"
"USE_SYSTEM_LIBGIT2=1"
"USE_SYSTEM_LIBUNWIND=1"
"USE_SYSTEM_MPFR=1"
"USE_SYSTEM_OPENLIBM=1"
"USE_SYSTEM_OPENSPECFUN=1"
"USE_SYSTEM_PATCHELF=1"
"USE_SYSTEM_PCRE=1"
"PCRE_CONFIG=${pcre2.dev}/bin/pcre2-config"
"PCRE_INCL_PATH=${pcre2.dev}/include/pcre2.h"
"USE_SYSTEM_READLINE=1"
"USE_SYSTEM_UTF8PROC=1"
"USE_SYSTEM_ZLIB=1"
"USE_BINARYBUILDER=0"
];
LD_LIBRARY_PATH = makeLibraryPath [
arpack fftw fftwSinglePrec libgit2 mpfr blas openlibm
openspecfun pcre2 lapack
];
preBuild = ''
sed -e '/^install:/s@[^ ]*/doc/[^ ]*@@' -i Makefile
sed -e '/[$](DESTDIR)[$](docdir)/d' -i Makefile
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
'';
enableParallelBuilding = true;
# Julia's tests require read/write access to $HOME
preCheck = ''
export HOME="$NIX_BUILD_TOP"
'';
doCheck = true;
checkTarget = "test";
postInstall = ''
# Symlink shared libraries from LD_LIBRARY_PATH into lib/julia,
# as using a wrapper with LD_LIBRARY_PATH causes segmentation
# faults when program returns an error:
# $ julia -e 'throw(Error())'
find $(echo $LD_LIBRARY_PATH | sed 's|:| |g') -maxdepth 1 -name '*.${if stdenv.isDarwin then "dylib" else "so"}*' | while read lib; do
if [[ ! -e $out/lib/julia/$(basename $lib) ]]; then
ln -sv $lib $out/lib/julia/$(basename $lib)
fi
done
'';
passthru = {
inherit majorVersion minorVersion maintenanceVersion;
site = "share/julia/site/v${majorVersion}.${minorVersion}";
};
meta = {
description = "High-level performance-oriented dynamical language for technical computing";
homepage = "https://julialang.org/";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ raskin rob garrison ];
platforms = [ "i686-linux" "x86_64-linux" "x86_64-darwin" "aarch64-linux" ];
# Unfortunately, this derivation does not pass Julia's test suite. See
# https://github.com/NixOS/nixpkgs/pull/121114.
broken = true;
};
}

View file

@ -2,16 +2,16 @@
stdenv.mkDerivation rec {
pname = "julia-bin";
version = "1.8.2";
version = "1.8.3";
src = {
x86_64-linux = fetchurl {
url = "https://julialang-s3.julialang.org/bin/linux/x64/${lib.versions.majorMinor version}/julia-${version}-linux-x86_64.tar.gz";
sha256 = "sha256-ZxzzpFC2OnF+Hu3X9pCH44VvAVsuFGy1SSjxmjwF55Y=";
sha256 = "sha256-M8Owk1b/qiXTMxw2RrHy1LCZROj5P8uZSVeAG4u/WKk=";
};
aarch64-linux = fetchurl {
url = "https://julialang-s3.julialang.org/bin/linux/aarch64/${lib.versions.majorMinor version}/julia-${version}-linux-aarch64.tar.gz";
sha256 = "sha256-+RwnZCj/swrMIJ4Os+cLHJEmDoh+EdS2b1VFCEtTBUc=";
sha256 = "sha256-2/+xNKQTtxLUqOHujmZepV7bCGVxmhutmXkSPWQzrMk=";
};
}.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}");

View file

@ -28,11 +28,11 @@
stdenv.mkDerivation rec {
pname = "julia";
version = "1.8.2";
version = "1.8.3";
src = fetchurl {
url = "https://github.com/JuliaLang/julia/releases/download/v${version}/julia-${version}-full.tar.gz";
sha256 = "sha256-5Xz8Lm2JF1Ckf3zwNVmk6PchK/VJAPJqnxL9bQCdTKk=";
hash = "sha256-UraJWp1K0v422yYe6MTIzJISuDehL5MAL6r1N6IVH1A=";
};
patches =

View file

@ -1,29 +0,0 @@
From 54a66b5728ec98f44a1768f064509be4fd3f2ef6 Mon Sep 17 00:00:00 2001
From: Thomas Tuegel <ttuegel@gmail.com>
Date: Sat, 10 Oct 2015 13:09:48 -0500
Subject: [PATCH 1/3] use system utf8proc
---
src/flisp/Makefile | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/flisp/Makefile b/src/flisp/Makefile
index bec8624..5437b5c 100644
--- a/src/flisp/Makefile
+++ b/src/flisp/Makefile
@@ -24,9 +24,9 @@ DOBJS = $(SRCS:%.c=$(BUILDDIR)/%.dbg.obj)
LLTDIR := ../support
LLT_release := $(BUILDDIR)/$(LLTDIR)/libsupport.a
LLT_debug := $(BUILDDIR)/$(LLTDIR)/libsupport-debug.a
-LIBFILES_release := $(LLT_release) $(LIBUV) $(LIBUTF8PROC)
-LIBFILES_debug := $(LLT_debug) $(LIBUV) $(LIBUTF8PROC)
-LIBS :=
+LIBFILES_release := $(LLT_release) $(LIBUV)
+LIBFILES_debug := $(LLT_debug) $(LIBUV)
+LIBS := $(LIBUTF8PROC)
ifneq ($(OS),WINNT)
LIBS += -lpthread
endif
--
2.5.2

View file

@ -1,24 +0,0 @@
---
src/flisp/Makefile | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/flisp/Makefile b/src/flisp/Makefile
index d97075e..6bebca7 100644
--- a/src/flisp/Makefile
+++ b/src/flisp/Makefile
@@ -32,9 +32,9 @@ OBJS := $(SRCS:%.c=$(BUILDDIR)/%.o)
DOBJS := $(SRCS:%.c=$(BUILDDIR)/%.dbg.obj)
LLT_release := $(LLT_BUILDDIR)/libsupport.a
LLT_debug := $(LLT_BUILDDIR)/libsupport-debug.a
-LIBFILES_release := $(LLT_release) $(LIBUV) $(LIBUTF8PROC)
-LIBFILES_debug := $(LLT_debug) $(LIBUV) $(LIBUTF8PROC)
-LIBS :=
+LIBFILES_release := $(LLT_release) $(LIBUV)
+LIBFILES_debug := $(LLT_debug) $(LIBUV)
+LIBS := $(LIBUTF8PROC)
ifneq ($(OS),WINNT)
LIBS += -lpthread
endif
--

View file

@ -43,7 +43,6 @@ let
mkdir "$rsrc"
ln -s "${cc.lib}/lib/clang/${release_version}/include" "$rsrc"
echo "-resource-dir=$rsrc" >> $out/nix-support/cc-cflags
echo "-B $rsrc/lib" >> $out/nix-support/cc-cflags
'';
mkExtraBuildCommands = cc: mkExtraBuildCommands0 cc + ''
ln -s "${targetLlvmLibraries.compiler-rt.out}/lib" "$rsrc/lib"

View file

@ -6,15 +6,15 @@
, libusb-compat-0_1
}:
stdenv.mkDerivation rec {
stdenv.mkDerivation (finalAttrs: {
pname = "teensy-loader-cli";
version = "2.1+unstable=2021-04-10";
version = "2.2";
src = fetchFromGitHub {
owner = "PaulStoffregen";
repo = "teensy_loader_cli";
rev = "9dbbfa3b367b6c37e91e8a42dae3c6edfceccc4d";
sha256 = "lQ1XtaWPr6nvE8NArD1980QVOH6NggO3FlfsntUjY7s=";
rev = finalAttrs.version;
sha256 = "sha256-C9Qhd6LhAES7X0sh4rofjAM+gxwuosahVQHeR76LyIo=";
};
nativeBuildInputs = [
@ -30,8 +30,8 @@ stdenv.mkDerivation rec {
runHook preInstall
install -Dm555 teensy_loader_cli $out/bin/teensy-loader-cli
install -Dm444 -t $out/share/doc/${pname} *.md *.txt
go-md2man -in README.md -out ${pname}.1
install -Dm444 -t $out/share/doc/teensy-loader-cli *.md *.txt
go-md2man -in README.md -out teensy-loader-cli.1
installManPage *.1
runHook postInstall
@ -43,4 +43,4 @@ stdenv.mkDerivation rec {
license = licenses.gpl3Only;
platforms = platforms.unix;
};
}
})

View file

@ -38,7 +38,7 @@ builder rec {
buildPackages.stdenv.cc
]
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform)
pkgsBuildBuild.guile;
pkgsBuildBuild.guile_2_2;
nativeBuildInputs = [
makeWrapper
pkg-config

View file

@ -38,7 +38,7 @@ builder rec {
depsBuildBuild = [
buildPackages.stdenv.cc
] ++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform)
pkgsBuildBuild.guile;
pkgsBuildBuild.guile_3_0;
nativeBuildInputs = [
makeWrapper
pkg-config

View file

@ -7,7 +7,6 @@
, mailcap, mimetypesSupport ? true
, ncurses
, openssl
, openssl_1_1
, readline
, sqlite
, tcl ? null, tk ? null, tix ? null, libX11 ? null, xorgproto ? null, x11Support ? false
@ -77,10 +76,6 @@ assert lib.assertMsg (reproducibleBuild -> (!rebuildBytecode))
with lib;
let
# cpython does support/build with openssl 3.0, but some libraries using the ssl module seem to have issues with it
# null check for Minimal
openssl' = if openssl != null then openssl_1_1 else null;
buildPackages = pkgsBuildHost;
inherit (passthru) pythonForBuild;
@ -121,7 +116,7 @@ let
];
buildInputs = filter (p: p != null) ([
zlib bzip2 expat xz libffi gdbm sqlite readline ncurses openssl' ]
zlib bzip2 expat xz libffi gdbm sqlite readline ncurses openssl ]
++ optionals x11Support [ tcl tk libX11 xorgproto ]
++ optionals (bluezSupport && stdenv.isLinux) [ bluez ]
++ optionals stdenv.isDarwin [ configd ])
@ -327,8 +322,8 @@ in with passthru; stdenv.mkDerivation {
"--with-threads"
] ++ optionals (sqlite != null && isPy3k) [
"--enable-loadable-sqlite-extensions"
] ++ optionals (openssl' != null) [
"--with-openssl=${openssl'.dev}"
] ++ optionals (openssl != null) [
"--with-openssl=${openssl.dev}"
] ++ optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"ac_cv_buggy_getaddrinfo=no"
# Assume little-endian IEEE 754 floating point when cross compiling
@ -493,7 +488,7 @@ in with passthru; stdenv.mkDerivation {
# Enforce that we don't have references to the OpenSSL -dev package, which we
# explicitly specify in our configure flags above.
disallowedReferences =
lib.optionals (openssl' != null && !static) [ openssl'.dev ]
lib.optionals (openssl != null && !static) [ openssl.dev ]
++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
# Ensure we don't have references to build-time packages.
# These typically end up in shebangs.

View file

@ -3,7 +3,21 @@ echo "Sourcing python-catch-conflicts-hook.sh"
pythonOutputDistPhase() {
echo "Executing pythonOutputDistPhase"
mv "dist" "$dist"
if [[ -d dist ]]; then
mv "dist" "$dist"
else
cat >&2 <<EOF
The build contains no ./dist directory.
If this project is not setuptools-based, pass
format = "other";
to buildPythonApplication/buildPythonPackage or another appropriate value as described here:
https://nixos.org/manual/nixpkgs/stable/#buildpythonpackage-function
EOF
false
fi
echo "Finished executing pythonOutputDistPhase"
}

View file

@ -14,13 +14,13 @@
stdenv.mkDerivation rec {
pname = "aws-c-auth";
version = "0.6.20";
version = "0.6.21";
src = fetchFromGitHub {
owner = "awslabs";
repo = "aws-c-auth";
rev = "v${version}";
sha256 = "sha256-d6o+ClQMhoSU8wjdP0nr+g6r4ZqzXhxBJcEfTOSlS7o=";
sha256 = "sha256-RVwSSr0EgVnZt/uUAldWQdjBw6/L+CHUutWcinysvNw=";
};
nativeBuildInputs = [

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "aws-c-io";
version = "0.13.9";
version = "0.13.11";
src = fetchFromGitHub {
owner = "awslabs";
repo = pname;
rev = "v${version}";
sha256 = "sha256-G7FYWrpg4TLsfK6C3JCTWkZ65iu3+gF3To5qYgUAqb0=";
sha256 = "sha256-3HNfEZ/PMHLhPp7boYyY/x5wXN9Eg+G1400q1riG2oI=";
};
nativeBuildInputs = [ cmake ];

View file

@ -7,13 +7,13 @@
stdenv.mkDerivation rec {
pname = "aws-c-sdkutils";
version = "0.1.6";
version = "0.1.7";
src = fetchFromGitHub {
owner = "awslabs";
repo = "aws-c-sdkutils";
rev = "v${version}";
sha256 = "sha256-KfQKOQrSgTy9Z+iUGnLblRArkq3xnFEnSq6HajcbysQ=";
sha256 = "sha256-qu/+xYorB+QXP5Ixj5ZFP9ZenVYV6hcmxHnH14DEgrU=";
};
nativeBuildInputs = [

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchurl, openssl, openldap, libkrb5, db, gettext
{ lib, stdenv, fetchurl, fetchpatch, openssl, openldap, libkrb5, db, gettext
, pam, libxcrypt, fixDarwinDylibNames, autoreconfHook, enableLdap ? false
, buildPackages, pruneLibtoolFiles, nixosTests }:
@ -19,6 +19,11 @@ stdenv.mkDerivation rec {
patches = [
# Fix cross-compilation
./cyrus-sasl-ac-try-run-fix.patch
# make compatible with openssl3. can probably be dropped with any release after 2.1.28
(fetchpatch {
url = "https://github.com/cyrusimap/cyrus-sasl/compare/cb549ef71c5bb646fe583697ebdcaba93267a237...c2bd3afbca57f176d8c650670ce371444bb7fcc0.patch";
hash = "sha256-bYeIkvle1Ms7Lnoob4eLd4RbPFHtPkKRZvfHNCBJY/s=";
})
];
outputs = [ "bin" "dev" "out" "man" "devdoc" ];

View file

@ -69,6 +69,7 @@ stdenv.mkDerivation rec {
++ lib.optional (!avx2Support) "--disable-avx2"
++ lib.optional (!avx512Support) "--disable-avx512"
++ lib.optional (!stdenv.hostPlatform.isx86_64) "--disable-sse"
++ lib.optional (!stdenv.hostPlatform.isx86_64) "--disable-sse-assembly"
++ lib.optional stdenv.hostPlatform.isx86_64 "--enable-sse-assembly"
++ lib.optionals enableCuda [ "--enable-nvidia-gpu" "--with-NVIDIA-GPU-compute-capability=${nvidiaArch}" ];

View file

@ -1,4 +1,14 @@
{ config, lib, stdenv, fetchurl, gettext, meson, ninja, pkg-config, perl, python3
{ config
, lib
, stdenv
, fetchurl
, fetchpatch
, gettext
, meson
, ninja
, pkg-config
, perl
, python3
, libiconv, zlib, libffi, pcre2, libelf, gnome, libselinux, bash, gnum4, gtk-doc, docbook_xsl, docbook_xml_dtd_45, libxslt
# use util-linuxMinimal to avoid circular dependency (util-linux, systemd, glib)
, util-linuxMinimal ? null
@ -95,6 +105,14 @@ stdenv.mkDerivation (finalAttrs: {
# Disable flaky test.
# https://gitlab.gnome.org/GNOME/glib/-/issues/820
./skip-timer-test.patch
# Fix infinite loop (e.g. in gnome-keyring)
# https://github.com/NixOS/nixpkgs/pull/197754#issuecomment-1312805358
# https://gitlab.gnome.org/GNOME/glib/-/merge_requests/3039
(fetchpatch {
url = "https://gitlab.gnome.org/GNOME/glib/-/commit/2a36bb4b7e46f9ac043561c61f9a790786a5440c.patch";
sha256 = "b77Hxt6WiLxIGqgAj9ZubzPWrWmorcUOEe/dp01BcXA=";
})
];
outputs = [ "bin" "out" "dev" "devdoc" ];

View file

@ -138,14 +138,6 @@ stdenv.mkDerivation (finalAttrs: {
rm $out/lib/libregress-1.0${stdenv.targetPlatform.extensions.sharedLibrary}
'';
# add self to buildInputs to avoid needing to add gobject-introspection to buildInputs in addition to nativeBuildInputs
# builds use target-pkg-config to look for gobject-introspection instead of just looking for binaries in $PATH
# wrapper uses depsTargetTargetPropagated so ignore it
preFixup = lib.optionalString (!lib.hasSuffix "-wrapped" finalAttrs.pname) ''
mkdir -p $dev/nix-support
echo "$out" > $dev/nix-support/propagated-target-target-deps
'';
setupHook = ./setup-hook.sh;
passthru = {

View file

@ -9,50 +9,99 @@
# to build, run
# `nix build ".#pkgsCross.aarch64-multiplatform.buildPackages.gobject-introspection"`
# a comment for both depsTargetTargetPropagated's
# add self to buildInputs to avoid needing to add gobject-introspection to buildInputs in addition to nativeBuildInputs
# builds use target-pkg-config to look for gobject-introspection instead of just looking for binaries in $PATH
let
# ensure that `.override` works when gobject-introspection == gobject-introspection-wrapped
# ensure that `.override` works
args = builtins.removeAttrs _args [ "buildPackages" "targetPackages" "gobject-introspection-unwrapped" ];
# passing this stdenv to `targetPackages...` breaks due to splicing not working in `.override``
argsForTarget = builtins.removeAttrs args [ "stdenv" ];
overridenUnwrappedGir = gobject-introspection-unwrapped.override args;
# if we have targetPackages.gobject-introspection then propagate that
overridenTargetUnwrappedGir =
if targetPackages ? gobject-introspection-unwrapped
then targetPackages.gobject-introspection-unwrapped.override argsForTarget
else overridenUnwrappedGir;
in
(gobject-introspection-unwrapped.override args).overrideAttrs (previousAttrs: {
pname = "gobject-introspection-wrapped";
depsTargetTargetPropagated = [ gobject-introspection-unwrapped ];
postFixup = (previousAttrs.postFixup or "") + ''
mv $dev/bin/g-ir-compiler $dev/bin/.g-ir-compiler-wrapped
mv $dev/bin/g-ir-scanner $dev/bin/.g-ir-scanner-wrapped
# wrap both pkgsCrossX.buildPackages.gobject-introspection and {pkgs,pkgsSomethingExecutableOnBuildSystem).buildPackages.gobject-introspection
if (!stdenv.hostPlatform.canExecute stdenv.targetPlatform) && stdenv.targetPlatform.emulatorAvailable buildPackages
then
stdenv.mkDerivation
(builtins.removeAttrs overridenUnwrappedGir.drvAttrs [ "name" ] # so we can get a fresh name generated from the pname
// {
(
export bash="${buildPackages.bash}"
export emulator=${lib.escapeShellArg (stdenv.targetPlatform.emulator buildPackages)}
export emulatorwrapper="$dev/bin/g-ir-scanner-qemuwrapper"
export buildlddtree="${buildPackages.pax-utils}/bin/lddtree"
inherit (overridenUnwrappedGir) meta;
pname = "gobject-introspection-wrapped";
passthru = overridenUnwrappedGir.passthru // {
unwrapped = overridenUnwrappedGir;
};
phases = [ "fixupPhase" ]; # don't remove, it is valid to set phases here.
dontStrip = true;
depsTargetTargetPropagated = [ overridenTargetUnwrappedGir ];
postFixup = ''
${lib.concatMapStrings (output: ''
mkdir -p ${"$" + "${output}"}
${lib.getExe buildPackages.xorg.lndir} ${gobject-introspection-unwrapped.${output}} ${"$" + "${output}"}
'') gobject-introspection-unwrapped.outputs}
export targetgir="${lib.getDev (targetPackages.gobject-introspection-unwrapped.override argsForTarget)}"
cp $dev/bin/g-ir-compiler $dev/bin/.g-ir-compiler-wrapped
cp $dev/bin/g-ir-scanner $dev/bin/.g-ir-scanner-wrapped
substituteAll "${./wrappers/g-ir-compiler.sh}" "$dev/bin/g-ir-compiler"
substituteAll "${./wrappers/g-ir-scanner.sh}" "$dev/bin/g-ir-scanner"
substituteAll "${./wrappers/g-ir-scanner-lddwrapper.sh}" "$dev/bin/g-ir-scanner-lddwrapper"
substituteAll "${./wrappers/g-ir-scanner-qemuwrapper.sh}" "$dev/bin/g-ir-scanner-qemuwrapper"
chmod +x $dev/bin/g-ir-*
)
''
# when cross-compiling and using the wrapper then when a package looks up the g_ir_X
# variable with pkg-config they'll get the host version which can't be run
# override the variable to use the absolute path to g_ir_X in PATH which can be run
+ ''
cat >> $dev/nix-support/setup-hook <<-'EOF'
override-pkg-config-gir-variables() {
PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_SCANNER="$(type -p g-ir-scanner)"
PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_COMPILER="$(type -p g-ir-compiler)"
PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_GENERATE="$(type -p g-ir-generate)"
export PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_SCANNER
export PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_COMPILER
export PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_GENERATE
}
(
rm "$dev/bin/g-ir-compiler"
rm "$dev/bin/g-ir-scanner"
export bash="${buildPackages.bash}"
export emulator=${lib.escapeShellArg (stdenv.targetPlatform.emulator buildPackages)}
export emulatorwrapper="$dev/bin/g-ir-scanner-qemuwrapper"
export buildlddtree="${buildPackages.pax-utils}/bin/lddtree"
preConfigureHooks+=(override-pkg-config-gir-variables)
EOF
'';
})
export targetgir="${lib.getDev overridenTargetUnwrappedGir}"
substituteAll "${./wrappers/g-ir-compiler.sh}" "$dev/bin/g-ir-compiler"
substituteAll "${./wrappers/g-ir-scanner.sh}" "$dev/bin/g-ir-scanner"
substituteAll "${./wrappers/g-ir-scanner-lddwrapper.sh}" "$dev/bin/g-ir-scanner-lddwrapper"
substituteAll "${./wrappers/g-ir-scanner-qemuwrapper.sh}" "$dev/bin/g-ir-scanner-qemuwrapper"
chmod +x $dev/bin/g-ir-compiler
chmod +x $dev/bin/g-ir-scanner*
)
''
# when cross-compiling and using the wrapper then when a package looks up the g_ir_X
# variable with pkg-config they'll get the host version which can't be run
# override the variable to use the absolute path to g_ir_X in PATH which can be run
+ ''
cat >> $dev/nix-support/setup-hook <<-'EOF'
override-pkg-config-gir-variables() {
PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_SCANNER="$(type -p g-ir-scanner)"
PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_COMPILER="$(type -p g-ir-compiler)"
PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_GENERATE="$(type -p g-ir-generate)"
export PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_SCANNER
export PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_COMPILER
export PKG_CONFIG_GOBJECT_INTROSPECTION_1_0_G_IR_GENERATE
}
preConfigureHooks+=(override-pkg-config-gir-variables)
EOF
'';
})
else
stdenv.mkDerivation (builtins.removeAttrs overridenUnwrappedGir.drvAttrs [ "name" ] # so we can get a fresh name generated from the pname
// {
inherit (overridenUnwrappedGir) meta;
pname = "gobject-introspection-wrapped";
passthru = overridenUnwrappedGir.passthru // {
unwrapped = overridenUnwrappedGir;
};
phases = [ "fixupPhase" ]; # don't remove, it is valid to set phases here.
dontStrip = true;
depsTargetTargetPropagated = [ overridenTargetUnwrappedGir ];
postFixup = ''
${lib.concatMapStrings (output: ''
mkdir -p ${"$" + "${output}"}
${lib.getExe buildPackages.xorg.lndir} ${gobject-introspection-unwrapped.${output}} ${"$" + "${output}"}
'') gobject-introspection-unwrapped.outputs}
'';
})

View file

@ -70,6 +70,13 @@ stdenv.mkDerivation rec {
dbus
];
postPatch = ''
# https://gitlab.gnome.org/GNOME/gtksourceview/-/merge_requests/295
# build: drop unnecessary vapigen check
substituteInPlace meson.build \
--replace "if generate_vapi" "if false"
'';
# Broken by PCRE 2 bump in GLib.
# https://gitlab.gnome.org/GNOME/gtksourceview/-/issues/283
doCheck = false;

Some files were not shown because too many files have changed in this diff Show more