Merge remote-tracking branch 'upstream/master' into trustless-remote-builder-simple
This commit is contained in:
commit
7d703f2106
50 changed files with 398 additions and 140 deletions
|
@ -9,6 +9,7 @@ CXXFLAGS = @CXXFLAGS@
|
|||
EDITLINE_LIBS = @EDITLINE_LIBS@
|
||||
ENABLE_S3 = @ENABLE_S3@
|
||||
GTEST_LIBS = @GTEST_LIBS@
|
||||
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
||||
HAVE_SECCOMP = @HAVE_SECCOMP@
|
||||
LDFLAGS = @LDFLAGS@
|
||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
||||
|
|
|
@ -218,6 +218,14 @@ LDFLAGS="-lz $LDFLAGS"
|
|||
# Look for libbrotli{enc,dec}.
|
||||
PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"])
|
||||
|
||||
# Look for libcpuid.
|
||||
if test "$machine_name" = "x86_64"; then
|
||||
PKG_CHECK_MODULES([LIBCPUID], [libcpuid], [CXXFLAGS="$LIBCPUID_CFLAGS $CXXFLAGS"])
|
||||
have_libcpuid=1
|
||||
AC_DEFINE([HAVE_LIBCPUID], [1], [Use libcpuid])
|
||||
fi
|
||||
AC_SUBST(HAVE_LIBCPUID, [$have_libcpuid])
|
||||
|
||||
|
||||
# Look for libseccomp, required for Linux sandboxing.
|
||||
if test "$sys_name" = linux; then
|
||||
|
|
|
@ -7,7 +7,10 @@ let
|
|||
|
||||
showCommand =
|
||||
{ command, def, filename }:
|
||||
"# Name\n\n"
|
||||
''
|
||||
**Warning**: This program is **experimental** and its interface is subject to change.
|
||||
''
|
||||
+ "# Name\n\n"
|
||||
+ "`${command}` - ${def.description}\n\n"
|
||||
+ "# Synopsis\n\n"
|
||||
+ showSynopsis { inherit command; args = def.args; }
|
||||
|
|
|
@ -232,22 +232,23 @@ terraform apply
|
|||
> in a nix-shell shebang.
|
||||
|
||||
Finally, using the merging of multiple nix-shell shebangs the following
|
||||
Haskell script uses a specific branch of Nixpkgs/NixOS (the 18.03 stable
|
||||
Haskell script uses a specific branch of Nixpkgs/NixOS (the 20.03 stable
|
||||
branch):
|
||||
|
||||
```haskell
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i runghc -p "haskellPackages.ghcWithPackages (ps: [ps.HTTP ps.tagsoup])"
|
||||
#! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-18.03.tar.gz
|
||||
#! nix-shell -i runghc -p "haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])"
|
||||
#! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-20.03.tar.gz
|
||||
|
||||
import Network.HTTP
|
||||
import Network.Curl.Download
|
||||
import Text.HTML.TagSoup
|
||||
import Data.Either
|
||||
import Data.ByteString.Char8 (unpack)
|
||||
|
||||
-- Fetch nixos.org and print all hrefs.
|
||||
main = do
|
||||
resp <- Network.HTTP.simpleHTTP (getRequest "http://nixos.org/")
|
||||
body <- getResponseBody resp
|
||||
let tags = filter (isTagOpenName "a") $ parseTags body
|
||||
resp <- openURI "https://nixos.org/"
|
||||
let tags = filter (isTagOpenName "a") $ parseTags $ unpack $ fromRight undefined resp
|
||||
let tags' = map (fromAttrib "href") tags
|
||||
mapM_ putStrLn $ filter (/= "") tags'
|
||||
```
|
||||
|
|
8
doc/manual/src/release-notes/rl-2.4.md
Normal file
8
doc/manual/src/release-notes/rl-2.4.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
# Release 2.4 (202X-XX-XX)
|
||||
|
||||
- It is now an error to modify the `plugin-files` setting via a
|
||||
command-line flag that appears after the first non-flag argument
|
||||
to any command, including a subcommand to `nix`. For example,
|
||||
`nix-instantiate default.nix --plugin-files ""` must now become
|
||||
`nix-instantiate --plugin-files "" default.nix`.
|
||||
- Plugins that add new `nix` subcommands are now actually respected.
|
|
@ -91,7 +91,8 @@
|
|||
gmock
|
||||
]
|
||||
++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal]
|
||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium;
|
||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||
++ lib.optional stdenv.isx86_64 libcpuid;
|
||||
|
||||
awsDeps = lib.optional (stdenv.isLinux || stdenv.isDarwin)
|
||||
(aws-sdk-cpp.override {
|
||||
|
|
|
@ -53,6 +53,9 @@ static int main_build_remote(int argc, char * * argv)
|
|||
unsetenv("DISPLAY");
|
||||
unsetenv("SSH_ASKPASS");
|
||||
|
||||
/* If we ever use the common args framework, make sure to
|
||||
remove initPlugins below and initialize settings first.
|
||||
*/
|
||||
if (argc != 2)
|
||||
throw UsageError("called without required arguments");
|
||||
|
||||
|
|
|
@ -118,10 +118,8 @@ void StorePathsCommand::run(ref<Store> store, std::vector<RealisedPath> paths)
|
|||
run(store, std::move(storePaths));
|
||||
}
|
||||
|
||||
void StorePathCommand::run(ref<Store> store)
|
||||
void StorePathCommand::run(ref<Store> store, std::vector<StorePath> storePaths)
|
||||
{
|
||||
auto storePaths = toStorePaths(store, Realise::Nothing, operateOn, installables);
|
||||
|
||||
if (storePaths.size() != 1)
|
||||
throw UsageError("this command requires exactly one store path");
|
||||
|
||||
|
|
|
@ -177,13 +177,13 @@ struct StorePathsCommand : public RealisedPathsCommand
|
|||
};
|
||||
|
||||
/* A command that operates on exactly one store path. */
|
||||
struct StorePathCommand : public InstallablesCommand
|
||||
struct StorePathCommand : public StorePathsCommand
|
||||
{
|
||||
using StoreCommand::run;
|
||||
using StorePathsCommand::run;
|
||||
|
||||
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
|
||||
|
||||
void run(ref<Store> store) override;
|
||||
void run(ref<Store> store, std::vector<StorePath> storePaths) override;
|
||||
};
|
||||
|
||||
/* A helper class for registering commands globally. */
|
||||
|
|
|
@ -496,6 +496,23 @@ static std::string showAttrPaths(const std::vector<std::string> & paths)
|
|||
return s;
|
||||
}
|
||||
|
||||
InstallableFlake::InstallableFlake(
|
||||
SourceExprCommand * cmd,
|
||||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
Strings && attrPaths,
|
||||
Strings && prefixes,
|
||||
const flake::LockFlags & lockFlags)
|
||||
: InstallableValue(state),
|
||||
flakeRef(flakeRef),
|
||||
attrPaths(attrPaths),
|
||||
prefixes(prefixes),
|
||||
lockFlags(lockFlags)
|
||||
{
|
||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
|
||||
}
|
||||
|
||||
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
|
||||
{
|
||||
auto lockedFlake = getLockedFlake();
|
||||
|
@ -628,9 +645,12 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
try {
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
|
||||
result.push_back(std::make_shared<InstallableFlake>(
|
||||
getEvalState(), std::move(flakeRef),
|
||||
this,
|
||||
getEvalState(),
|
||||
std::move(flakeRef),
|
||||
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
|
||||
getDefaultFlakeAttrPathPrefixes(), lockFlags));
|
||||
getDefaultFlakeAttrPathPrefixes(),
|
||||
lockFlags));
|
||||
continue;
|
||||
} catch (...) {
|
||||
ex = std::current_exception();
|
||||
|
|
|
@ -104,11 +104,13 @@ struct InstallableFlake : InstallableValue
|
|||
const flake::LockFlags & lockFlags;
|
||||
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
||||
|
||||
InstallableFlake(ref<EvalState> state, FlakeRef && flakeRef,
|
||||
Strings && attrPaths, Strings && prefixes, const flake::LockFlags & lockFlags)
|
||||
: InstallableValue(state), flakeRef(flakeRef), attrPaths(attrPaths),
|
||||
prefixes(prefixes), lockFlags(lockFlags)
|
||||
{ }
|
||||
InstallableFlake(
|
||||
SourceExprCommand * cmd,
|
||||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
Strings && attrPaths,
|
||||
Strings && prefixes,
|
||||
const flake::LockFlags & lockFlags);
|
||||
|
||||
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||
|
||||
|
|
|
@ -1381,10 +1381,10 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
|||
} else if (!i.def) {
|
||||
throwMissingArgumentError(i.pos, R"(cannot evaluate a function that has an argument without a value ('%1%')
|
||||
|
||||
nix attempted to evaluate a function as a top level expression; in this case it must have its
|
||||
arguments supplied either by default values, or passed explicitly with --arg or --argstr.
|
||||
|
||||
https://nixos.org/manual/nix/stable/#ss-functions)", i.name);
|
||||
Nix attempted to evaluate a function as a top level expression; in
|
||||
this case it must have its arguments supplied either by default
|
||||
values, or passed explicitly with '--arg' or '--argstr'. See
|
||||
https://nixos.org/manual/nix/stable/#ss-functions.)", i.name);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ MakeError(ThrownError, AssertionError);
|
|||
MakeError(Abort, EvalError);
|
||||
MakeError(TypeError, EvalError);
|
||||
MakeError(UndefinedVarError, Error);
|
||||
MakeError(MissingArgumentError, Error);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
MakeError(RestrictedPathError, Error);
|
||||
|
||||
|
||||
|
|
|
@ -79,4 +79,11 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
|||
hiddenCategories.insert(cat);
|
||||
}
|
||||
|
||||
void MixCommonArgs::initialFlagsProcessed()
|
||||
{
|
||||
initPlugins();
|
||||
pluginsInited();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -7,10 +7,14 @@ namespace nix {
|
|||
//static constexpr auto commonArgsCategory = "Miscellaneous common options";
|
||||
static constexpr auto loggingCategory = "Logging-related options";
|
||||
|
||||
struct MixCommonArgs : virtual Args
|
||||
class MixCommonArgs : public virtual Args
|
||||
{
|
||||
void initialFlagsProcessed() override;
|
||||
public:
|
||||
string programName;
|
||||
MixCommonArgs(const string & programName);
|
||||
protected:
|
||||
virtual void pluginsInited() {}
|
||||
};
|
||||
|
||||
struct MixDryRun : virtual Args
|
||||
|
|
|
@ -124,6 +124,17 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation
|
|||
, buildMode(buildMode)
|
||||
{
|
||||
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
|
||||
|
||||
auto outputHashes = staticOutputHashes(worker.store, drv);
|
||||
for (auto &[outputName, outputHash] : outputHashes)
|
||||
initialOutputs.insert({
|
||||
outputName,
|
||||
InitialOutput{
|
||||
.wanted = true, // Will be refined later
|
||||
.outputHash = outputHash
|
||||
}
|
||||
});
|
||||
|
||||
state = &DerivationGoal::haveDerivation;
|
||||
name = fmt(
|
||||
"building of '%s' from in-memory derivation",
|
||||
|
@ -258,8 +269,20 @@ void DerivationGoal::loadDerivation()
|
|||
|
||||
assert(worker.store.isValidPath(drvPath));
|
||||
|
||||
auto fullDrv = new Derivation(worker.store.derivationFromPath(drvPath));
|
||||
|
||||
auto outputHashes = staticOutputHashes(worker.store, *fullDrv);
|
||||
for (auto &[outputName, outputHash] : outputHashes)
|
||||
initialOutputs.insert({
|
||||
outputName,
|
||||
InitialOutput{
|
||||
.wanted = true, // Will be refined later
|
||||
.outputHash = outputHash
|
||||
}
|
||||
});
|
||||
|
||||
/* Get the derivation. */
|
||||
drv = std::unique_ptr<BasicDerivation>(new Derivation(worker.store.derivationFromPath(drvPath)));
|
||||
drv = std::unique_ptr<BasicDerivation>(fullDrv);
|
||||
|
||||
haveDerivation();
|
||||
}
|
||||
|
@ -506,6 +529,7 @@ void DerivationGoal::inputsRealised()
|
|||
Derivation drvResolved { *std::move(attempt) };
|
||||
|
||||
auto pathResolved = writeDerivation(worker.store, drvResolved);
|
||||
resolvedDrv = drvResolved;
|
||||
|
||||
auto msg = fmt("Resolved derivation: '%s' -> '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
|
@ -1019,7 +1043,37 @@ void DerivationGoal::buildDone()
|
|||
}
|
||||
|
||||
void DerivationGoal::resolvedFinished() {
|
||||
done(BuildResult::Built);
|
||||
assert(resolvedDrv);
|
||||
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, *resolvedDrv);
|
||||
|
||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
||||
auto realWantedOutputs = wantedOutputs;
|
||||
if (realWantedOutputs.empty())
|
||||
realWantedOutputs = resolvedDrv->outputNames();
|
||||
|
||||
for (auto & wantedOutput : realWantedOutputs) {
|
||||
assert(initialOutputs.count(wantedOutput) != 0);
|
||||
assert(resolvedHashes.count(wantedOutput) != 0);
|
||||
auto realisation = worker.store.queryRealisation(
|
||||
DrvOutput{resolvedHashes.at(wantedOutput), wantedOutput}
|
||||
);
|
||||
// We've just built it, but maybe the build failed, in which case the
|
||||
// realisation won't be there
|
||||
if (realisation) {
|
||||
auto newRealisation = *realisation;
|
||||
newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput};
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
} else {
|
||||
// If we don't have a realisation, then it must mean that something
|
||||
// failed when building the resolved drv
|
||||
assert(!result.success());
|
||||
}
|
||||
}
|
||||
|
||||
// This is potentially a bit fishy in terms of error reporting. Not sure
|
||||
// how to do it in a cleaner way
|
||||
amDone(nrFailed == 0 ? ecSuccess : ecFailed, ex);
|
||||
}
|
||||
|
||||
HookReply DerivationGoal::tryBuildHook()
|
||||
|
@ -3790,9 +3844,8 @@ void DerivationGoal::checkPathValidity()
|
|||
{
|
||||
bool checkHash = buildMode == bmRepair;
|
||||
for (auto & i : queryPartialDerivationOutputMap()) {
|
||||
InitialOutput info {
|
||||
.wanted = wantOutput(i.first, wantedOutputs),
|
||||
};
|
||||
InitialOutput & info = initialOutputs.at(i.first);
|
||||
info.wanted = wantOutput(i.first, wantedOutputs);
|
||||
if (i.second) {
|
||||
auto outputPath = *i.second;
|
||||
info.known = {
|
||||
|
@ -3804,7 +3857,15 @@ void DerivationGoal::checkPathValidity()
|
|||
: PathStatus::Corrupt,
|
||||
};
|
||||
}
|
||||
initialOutputs.insert_or_assign(i.first, info);
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (auto real = worker.store.queryRealisation(
|
||||
DrvOutput{initialOutputs.at(i.first).outputHash, i.first})) {
|
||||
info.known = {
|
||||
.path = real->outPath,
|
||||
.status = PathStatus::Valid,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ struct InitialOutputStatus {
|
|||
|
||||
struct InitialOutput {
|
||||
bool wanted;
|
||||
Hash outputHash;
|
||||
std::optional<InitialOutputStatus> known;
|
||||
};
|
||||
|
||||
|
@ -48,6 +49,9 @@ struct DerivationGoal : public Goal
|
|||
/* The path of the derivation. */
|
||||
StorePath drvPath;
|
||||
|
||||
/* The path of the corresponding resolved derivation */
|
||||
std::optional<BasicDerivation> resolvedDrv;
|
||||
|
||||
/* The specific outputs that we need to build. Empty means all of
|
||||
them. */
|
||||
StringSet wantedOutputs;
|
||||
|
|
|
@ -745,7 +745,7 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
|
|||
|
||||
}
|
||||
|
||||
std::optional<BasicDerivation> Derivation::tryResolveUncached(Store & store) {
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
|
||||
BasicDerivation resolved { *this };
|
||||
|
||||
// Input paths that we'll want to rewrite in the derivation
|
||||
|
@ -756,8 +756,13 @@ std::optional<BasicDerivation> Derivation::tryResolveUncached(Store & store) {
|
|||
StringSet newOutputNames;
|
||||
for (auto & outputName : input.second) {
|
||||
auto actualPathOpt = inputDrvOutputs.at(outputName);
|
||||
if (!actualPathOpt)
|
||||
if (!actualPathOpt) {
|
||||
warn("output %s of input %s missing, aborting the resolving",
|
||||
outputName,
|
||||
store.printStorePath(input.first)
|
||||
);
|
||||
return std::nullopt;
|
||||
}
|
||||
auto actualPath = *actualPathOpt;
|
||||
inputRewrites.emplace(
|
||||
downstreamPlaceholder(store, input.first, outputName),
|
||||
|
@ -771,34 +776,4 @@ std::optional<BasicDerivation> Derivation::tryResolveUncached(Store & store) {
|
|||
return resolved;
|
||||
}
|
||||
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(Store& store)
|
||||
{
|
||||
auto drvPath = writeDerivation(store, *this, NoRepair, false);
|
||||
return Derivation::tryResolve(store, drvPath);
|
||||
}
|
||||
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(Store& store, const StorePath& drvPath)
|
||||
{
|
||||
// This is quite dirty and leaky, but will disappear once #4340 is merged
|
||||
static Sync<std::map<StorePath, std::optional<Derivation>>> resolutionsCache;
|
||||
|
||||
{
|
||||
auto resolutions = resolutionsCache.lock();
|
||||
auto resolvedDrvIter = resolutions->find(drvPath);
|
||||
if (resolvedDrvIter != resolutions->end()) {
|
||||
auto & [_, resolvedDrv] = *resolvedDrvIter;
|
||||
return *resolvedDrv;
|
||||
}
|
||||
}
|
||||
|
||||
/* Try resolve drv and use that path instead. */
|
||||
auto drv = store.readDerivation(drvPath);
|
||||
auto attempt = drv.tryResolveUncached(store);
|
||||
if (!attempt)
|
||||
return std::nullopt;
|
||||
/* Store in memo table. */
|
||||
resolutionsCache.lock()->insert_or_assign(drvPath, *attempt);
|
||||
return *attempt;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -138,14 +138,10 @@ struct Derivation : BasicDerivation
|
|||
|
||||
2. Input placeholders are replaced with realized input store paths. */
|
||||
std::optional<BasicDerivation> tryResolve(Store & store);
|
||||
static std::optional<BasicDerivation> tryResolve(Store & store, const StorePath & drvPath);
|
||||
|
||||
Derivation() = default;
|
||||
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
|
||||
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
|
||||
|
||||
private:
|
||||
std::optional<BasicDerivation> tryResolveUncached(Store & store);
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include "archive.hh"
|
||||
#include "args.hh"
|
||||
#include "abstract-setting-to-json.hh"
|
||||
#include "compute-levels.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <map>
|
||||
|
@ -133,24 +134,29 @@ StringSet Settings::getDefaultSystemFeatures()
|
|||
|
||||
StringSet Settings::getDefaultExtraPlatforms()
|
||||
{
|
||||
StringSet extraPlatforms;
|
||||
|
||||
if (std::string{SYSTEM} == "x86_64-linux" && !isWSL1())
|
||||
return StringSet{"i686-linux"};
|
||||
#if __APPLE__
|
||||
extraPlatforms.insert("i686-linux");
|
||||
|
||||
#if __linux__
|
||||
StringSet levels = computeLevels();
|
||||
for (auto iter = levels.begin(); iter != levels.end(); ++iter)
|
||||
extraPlatforms.insert(*iter + "-linux");
|
||||
#elif __APPLE__
|
||||
// Rosetta 2 emulation layer can run x86_64 binaries on aarch64
|
||||
// machines. Note that we can’t force processes from executing
|
||||
// x86_64 in aarch64 environments or vice versa since they can
|
||||
// always exec with their own binary preferences.
|
||||
else if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
||||
if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
||||
if (std::string{SYSTEM} == "x86_64-darwin")
|
||||
return StringSet{"aarch64-darwin"};
|
||||
extraPlatforms.insert("aarch64-darwin");
|
||||
else if (std::string{SYSTEM} == "aarch64-darwin")
|
||||
return StringSet{"x86_64-darwin"};
|
||||
else
|
||||
return StringSet{};
|
||||
extraPlatforms.insert("x86_64-darwin");
|
||||
}
|
||||
#endif
|
||||
else
|
||||
return StringSet{};
|
||||
|
||||
return extraPlatforms;
|
||||
}
|
||||
|
||||
bool Settings::isExperimentalFeatureEnabled(const std::string & name)
|
||||
|
@ -237,8 +243,17 @@ void MaxBuildJobsSetting::set(const std::string & str, bool append)
|
|||
}
|
||||
|
||||
|
||||
void PluginFilesSetting::set(const std::string & str, bool append)
|
||||
{
|
||||
if (pluginsLoaded)
|
||||
throw UsageError("plugin-files set after plugins were loaded, you may need to move the flag before the subcommand");
|
||||
BaseSetting<Paths>::set(str, append);
|
||||
}
|
||||
|
||||
|
||||
void initPlugins()
|
||||
{
|
||||
assert(!settings.pluginFiles.pluginsLoaded);
|
||||
for (const auto & pluginFile : settings.pluginFiles.get()) {
|
||||
Paths pluginFiles;
|
||||
try {
|
||||
|
@ -264,6 +279,9 @@ void initPlugins()
|
|||
unknown settings. */
|
||||
globalConfig.reapplyUnknownSettings();
|
||||
globalConfig.warnUnknownSettings();
|
||||
|
||||
/* Tell the user if they try to set plugin-files after we've already loaded */
|
||||
settings.pluginFiles.pluginsLoaded = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,6 +28,23 @@ struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
|
|||
void set(const std::string & str, bool append = false) override;
|
||||
};
|
||||
|
||||
struct PluginFilesSetting : public BaseSetting<Paths>
|
||||
{
|
||||
bool pluginsLoaded = false;
|
||||
|
||||
PluginFilesSetting(Config * options,
|
||||
const Paths & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<Paths>(def, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
||||
void set(const std::string & str, bool append = false) override;
|
||||
};
|
||||
|
||||
class Settings : public Config {
|
||||
|
||||
unsigned int getDefaultCores();
|
||||
|
@ -819,7 +836,7 @@ public:
|
|||
Setting<uint64_t> minFreeCheckInterval{this, 5, "min-free-check-interval",
|
||||
"Number of seconds between checking free disk space."};
|
||||
|
||||
Setting<Paths> pluginFiles{
|
||||
PluginFilesSetting pluginFiles{
|
||||
this, {}, "plugin-files",
|
||||
R"(
|
||||
A list of plugin files to be loaded by Nix. Each of these files will
|
||||
|
@ -831,6 +848,9 @@ public:
|
|||
command, and RegisterSetting to add new nix config settings. See the
|
||||
constructors for those types for more details.
|
||||
|
||||
Warning! These APIs are inherently unstable and may change from
|
||||
release to release.
|
||||
|
||||
Since these files are loaded into the same address space as Nix
|
||||
itself, they must be DSOs compatible with the instance of Nix
|
||||
running at the time (i.e. compiled against the same headers, not
|
||||
|
|
|
@ -883,7 +883,7 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
|
|||
|
||||
|
||||
std::map<std::string, std::optional<StorePath>>
|
||||
LocalStore::queryDerivationOutputMapNoResolve(const StorePath& path_)
|
||||
LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
|
||||
{
|
||||
auto path = path_;
|
||||
auto outputs = retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
|
||||
|
|
|
@ -127,7 +127,7 @@ public:
|
|||
|
||||
StorePathSet queryValidDerivers(const StorePath & path) override;
|
||||
|
||||
std::map<std::string, std::optional<StorePath>> queryDerivationOutputMapNoResolve(const StorePath & path) override;
|
||||
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path) override;
|
||||
|
||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
||||
|
||||
|
|
|
@ -366,7 +366,7 @@ bool Store::PathInfoCacheValue::isKnownNow()
|
|||
return std::chrono::steady_clock::now() < time_point + ttl;
|
||||
}
|
||||
|
||||
std::map<std::string, std::optional<StorePath>> Store::queryDerivationOutputMapNoResolve(const StorePath & path)
|
||||
std::map<std::string, std::optional<StorePath>> Store::queryPartialDerivationOutputMap(const StorePath & path)
|
||||
{
|
||||
std::map<std::string, std::optional<StorePath>> outputs;
|
||||
auto drv = readInvalidDerivation(path);
|
||||
|
@ -376,19 +376,6 @@ std::map<std::string, std::optional<StorePath>> Store::queryDerivationOutputMapN
|
|||
return outputs;
|
||||
}
|
||||
|
||||
std::map<std::string, std::optional<StorePath>> Store::queryPartialDerivationOutputMap(const StorePath & path)
|
||||
{
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
auto resolvedDrv = Derivation::tryResolve(*this, path);
|
||||
if (resolvedDrv) {
|
||||
auto resolvedDrvPath = writeDerivation(*this, *resolvedDrv, NoRepair, true);
|
||||
if (isValidPath(resolvedDrvPath))
|
||||
return queryDerivationOutputMapNoResolve(resolvedDrvPath);
|
||||
}
|
||||
}
|
||||
return queryDerivationOutputMapNoResolve(path);
|
||||
}
|
||||
|
||||
OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) {
|
||||
auto resp = queryPartialDerivationOutputMap(path);
|
||||
OutputPathMap result;
|
||||
|
|
|
@ -417,12 +417,6 @@ public:
|
|||
`std::nullopt`. */
|
||||
virtual std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(const StorePath & path);
|
||||
|
||||
/*
|
||||
* Similar to `queryPartialDerivationOutputMap`, but doesn't try to resolve
|
||||
* the derivation
|
||||
*/
|
||||
virtual std::map<std::string, std::optional<StorePath>> queryDerivationOutputMapNoResolve(const StorePath & path);
|
||||
|
||||
/* Query the mapping outputName=>outputPath for the given derivation.
|
||||
Assume every output has a mapping and throw an exception otherwise. */
|
||||
OutputPathMap queryDerivationOutputMap(const StorePath & path);
|
||||
|
|
|
@ -60,6 +60,7 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
verbosity = lvlError;
|
||||
}
|
||||
|
||||
bool argsSeen = false;
|
||||
for (auto pos = cmdline.begin(); pos != cmdline.end(); ) {
|
||||
|
||||
auto arg = *pos;
|
||||
|
@ -88,6 +89,10 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
throw UsageError("unrecognised flag '%1%'", arg);
|
||||
}
|
||||
else {
|
||||
if (!argsSeen) {
|
||||
argsSeen = true;
|
||||
initialFlagsProcessed();
|
||||
}
|
||||
pos = rewriteArgs(cmdline, pos);
|
||||
pendingArgs.push_back(*pos++);
|
||||
if (processArgs(pendingArgs, false))
|
||||
|
@ -96,6 +101,9 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
}
|
||||
|
||||
processArgs(pendingArgs, true);
|
||||
|
||||
if (!argsSeen)
|
||||
initialFlagsProcessed();
|
||||
}
|
||||
|
||||
bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
||||
|
@ -298,8 +306,8 @@ Strings argvToStrings(int argc, char * * argv)
|
|||
return args;
|
||||
}
|
||||
|
||||
MultiCommand::MultiCommand(const Commands & commands)
|
||||
: commands(commands)
|
||||
MultiCommand::MultiCommand(const Commands & commands_)
|
||||
: commands(commands_)
|
||||
{
|
||||
expectArgs({
|
||||
.label = "subcommand",
|
||||
|
|
|
@ -132,6 +132,10 @@ protected:
|
|||
|
||||
std::set<std::string> hiddenCategories;
|
||||
|
||||
/* Called after all command line flags before the first non-flag
|
||||
argument (if any) have been processed. */
|
||||
virtual void initialFlagsProcessed() {}
|
||||
|
||||
public:
|
||||
|
||||
void addFlag(Flag && flag);
|
||||
|
|
80
src/libutil/compute-levels.cc
Normal file
80
src/libutil/compute-levels.cc
Normal file
|
@ -0,0 +1,80 @@
|
|||
#include "types.hh"
|
||||
|
||||
#if HAVE_LIBCPUID
|
||||
#include <libcpuid/libcpuid.h>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
#if HAVE_LIBCPUID
|
||||
|
||||
StringSet computeLevels() {
|
||||
StringSet levels;
|
||||
|
||||
if (!cpuid_present())
|
||||
return levels;
|
||||
|
||||
cpu_raw_data_t raw;
|
||||
cpu_id_t data;
|
||||
|
||||
if (cpuid_get_raw_data(&raw) < 0)
|
||||
return levels;
|
||||
|
||||
if (cpu_identify(&raw, &data) < 0)
|
||||
return levels;
|
||||
|
||||
if (!(data.flags[CPU_FEATURE_CMOV] &&
|
||||
data.flags[CPU_FEATURE_CX8] &&
|
||||
data.flags[CPU_FEATURE_FPU] &&
|
||||
data.flags[CPU_FEATURE_FXSR] &&
|
||||
data.flags[CPU_FEATURE_MMX] &&
|
||||
data.flags[CPU_FEATURE_SSE] &&
|
||||
data.flags[CPU_FEATURE_SSE2]))
|
||||
return levels;
|
||||
|
||||
levels.insert("x86_64-v1");
|
||||
|
||||
if (!(data.flags[CPU_FEATURE_CX16] &&
|
||||
data.flags[CPU_FEATURE_LAHF_LM] &&
|
||||
data.flags[CPU_FEATURE_POPCNT] &&
|
||||
// SSE3
|
||||
data.flags[CPU_FEATURE_PNI] &&
|
||||
data.flags[CPU_FEATURE_SSSE3] &&
|
||||
data.flags[CPU_FEATURE_SSE4_1] &&
|
||||
data.flags[CPU_FEATURE_SSE4_2]))
|
||||
return levels;
|
||||
|
||||
levels.insert("x86_64-v2");
|
||||
|
||||
if (!(data.flags[CPU_FEATURE_AVX] &&
|
||||
data.flags[CPU_FEATURE_AVX2] &&
|
||||
data.flags[CPU_FEATURE_F16C] &&
|
||||
data.flags[CPU_FEATURE_FMA3] &&
|
||||
// LZCNT
|
||||
data.flags[CPU_FEATURE_ABM] &&
|
||||
data.flags[CPU_FEATURE_MOVBE]))
|
||||
return levels;
|
||||
|
||||
levels.insert("x86_64-v3");
|
||||
|
||||
if (!(data.flags[CPU_FEATURE_AVX512F] &&
|
||||
data.flags[CPU_FEATURE_AVX512BW] &&
|
||||
data.flags[CPU_FEATURE_AVX512CD] &&
|
||||
data.flags[CPU_FEATURE_AVX512DQ] &&
|
||||
data.flags[CPU_FEATURE_AVX512VL]))
|
||||
return levels;
|
||||
|
||||
levels.insert("x86_64-v4");
|
||||
|
||||
return levels;
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
StringSet computeLevels() {
|
||||
return StringSet{};
|
||||
}
|
||||
|
||||
#endif // HAVE_LIBCPUID
|
||||
|
||||
}
|
7
src/libutil/compute-levels.hh
Normal file
7
src/libutil/compute-levels.hh
Normal file
|
@ -0,0 +1,7 @@
|
|||
#include "types.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
StringSet computeLevels();
|
||||
|
||||
}
|
|
@ -7,3 +7,7 @@ libutil_DIR := $(d)
|
|||
libutil_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
ifeq ($(HAVE_LIBCPUID), 1)
|
||||
libutil_LDFLAGS += -lcpuid
|
||||
endif
|
||||
|
|
|
@ -240,8 +240,6 @@ static void main_nix_build(int argc, char * * argv)
|
|||
|
||||
myArgs.parseCmdline(args);
|
||||
|
||||
initPlugins();
|
||||
|
||||
if (packages && fromArgs)
|
||||
throw UsageError("'-p' and '-E' are mutually exclusive");
|
||||
|
||||
|
@ -518,9 +516,11 @@ static void main_nix_build(int argc, char * * argv)
|
|||
if (counter)
|
||||
drvPrefix += fmt("-%d", counter + 1);
|
||||
|
||||
auto builtOutputs = store->queryDerivationOutputMap(drvPath);
|
||||
auto builtOutputs = store->queryPartialDerivationOutputMap(drvPath);
|
||||
|
||||
auto outputPath = builtOutputs.at(outputName);
|
||||
auto maybeOutputPath = builtOutputs.at(outputName);
|
||||
assert(maybeOutputPath);
|
||||
auto outputPath = *maybeOutputPath;
|
||||
|
||||
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) {
|
||||
std::string symlink = drvPrefix;
|
||||
|
|
|
@ -196,8 +196,6 @@ static int main_nix_channel(int argc, char ** argv)
|
|||
return true;
|
||||
});
|
||||
|
||||
initPlugins();
|
||||
|
||||
switch (cmd) {
|
||||
case cNone:
|
||||
throw UsageError("no command specified");
|
||||
|
|
|
@ -74,8 +74,6 @@ static int main_nix_collect_garbage(int argc, char * * argv)
|
|||
return true;
|
||||
});
|
||||
|
||||
initPlugins();
|
||||
|
||||
auto profilesDir = settings.nixStateDir + "/profiles";
|
||||
if (removeOld) removeOldGenerations(profilesDir);
|
||||
|
||||
|
|
|
@ -43,8 +43,6 @@ static int main_nix_copy_closure(int argc, char ** argv)
|
|||
return true;
|
||||
});
|
||||
|
||||
initPlugins();
|
||||
|
||||
if (sshHost.empty())
|
||||
throw UsageError("no host name specified");
|
||||
|
||||
|
|
|
@ -1420,8 +1420,6 @@ static int main_nix_env(int argc, char * * argv)
|
|||
|
||||
myArgs.parseCmdline(argvToStrings(argc, argv));
|
||||
|
||||
initPlugins();
|
||||
|
||||
if (!op) throw UsageError("no operation specified");
|
||||
|
||||
auto store = openStore();
|
||||
|
|
|
@ -149,8 +149,6 @@ static int main_nix_instantiate(int argc, char * * argv)
|
|||
|
||||
myArgs.parseCmdline(argvToStrings(argc, argv));
|
||||
|
||||
initPlugins();
|
||||
|
||||
if (evalOnly && !wantsReadWrite)
|
||||
settings.readOnlyMode = true;
|
||||
|
||||
|
|
|
@ -1067,8 +1067,6 @@ static int main_nix_store(int argc, char * * argv)
|
|||
return true;
|
||||
});
|
||||
|
||||
initPlugins();
|
||||
|
||||
if (!op) throw UsageError("no operation specified");
|
||||
|
||||
if (op != opDump && op != opRestore) /* !!! hack */
|
||||
|
|
|
@ -12,11 +12,16 @@ App Installable::toApp(EvalState & state)
|
|||
|
||||
auto type = cursor->getAttr("type")->getString();
|
||||
|
||||
auto checkProgram = [&](const Path & program)
|
||||
{
|
||||
if (!state.store->isInStore(program))
|
||||
throw Error("app program '%s' is not in the Nix store", program);
|
||||
};
|
||||
|
||||
if (type == "app") {
|
||||
auto [program, context] = cursor->getAttr("program")->getStringWithContext();
|
||||
|
||||
if (!state.store->isInStore(program))
|
||||
throw Error("app program '%s' is not in the Nix store", program);
|
||||
checkProgram(program);
|
||||
|
||||
std::vector<StorePathWithOutputs> context2;
|
||||
for (auto & [path, name] : context)
|
||||
|
@ -33,9 +38,17 @@ App Installable::toApp(EvalState & state)
|
|||
auto outPath = cursor->getAttr(state.sOutPath)->getString();
|
||||
auto outputName = cursor->getAttr(state.sOutputName)->getString();
|
||||
auto name = cursor->getAttr(state.sName)->getString();
|
||||
auto aMeta = cursor->maybeGetAttr("meta");
|
||||
auto aMainProgram = aMeta ? aMeta->maybeGetAttr("mainProgram") : nullptr;
|
||||
auto mainProgram =
|
||||
aMainProgram
|
||||
? aMainProgram->getString()
|
||||
: DrvName(name).name;
|
||||
auto program = outPath + "/bin/" + mainProgram;
|
||||
checkProgram(program);
|
||||
return App {
|
||||
.context = { { drvPath, {outputName} } },
|
||||
.program = outPath + "/bin/" + DrvName(name).name,
|
||||
.program = program,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ struct CmdBundle : InstallableCommand
|
|||
|
||||
auto [bundlerFlakeRef, bundlerName] = parseFlakeRefWithFragment(bundler, absPath("."));
|
||||
const flake::LockFlags lockFlags{ .writeLockFile = false };
|
||||
auto bundler = InstallableFlake(
|
||||
auto bundler = InstallableFlake(this,
|
||||
evalState, std::move(bundlerFlakeRef),
|
||||
Strings{bundlerName == "" ? "defaultBundler" : bundlerName},
|
||||
Strings({"bundlers."}), lockFlags);
|
||||
|
|
|
@ -342,8 +342,6 @@ static int main_nix_daemon(int argc, char * * argv)
|
|||
return true;
|
||||
});
|
||||
|
||||
initPlugins();
|
||||
|
||||
runDaemon(stdio, isTrustedOpt);
|
||||
|
||||
return 0;
|
||||
|
|
|
@ -59,7 +59,7 @@ BuildEnvironment readEnvironment(const Path & path)
|
|||
R"re((?:\$?"(?:[^"\\]|\\[$`"\\\n])*"))re";
|
||||
|
||||
static std::string squotedStringRegex =
|
||||
R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
|
||||
R"re((?:\$?(?:'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'|\\')+))re";
|
||||
|
||||
static std::string indexedArrayRegex =
|
||||
R"re((?:\(( *\[[0-9]+\]="(?:[^"\\]|\\.)*")*\)))re";
|
||||
|
@ -443,6 +443,7 @@ struct CmdDevelop : Common, MixEnvironment
|
|||
auto state = getEvalState();
|
||||
|
||||
auto bashInstallable = std::make_shared<InstallableFlake>(
|
||||
this,
|
||||
state,
|
||||
installable->nixpkgsFlakeRef(),
|
||||
Strings{"bashInteractive"},
|
||||
|
|
|
@ -595,7 +595,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
|||
|
||||
auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment(templateUrl, absPath("."));
|
||||
|
||||
auto installable = InstallableFlake(
|
||||
auto installable = InstallableFlake(nullptr,
|
||||
evalState, std::move(templateFlakeRef),
|
||||
Strings{templateName == "" ? "defaultTemplate" : templateName},
|
||||
Strings(attrsPathPrefixes), lockFlags);
|
||||
|
@ -880,7 +880,8 @@ struct CmdFlakeShow : FlakeCommand
|
|||
|| attrPath[0] == "nixosConfigurations"
|
||||
|| attrPath[0] == "nixosModules"
|
||||
|| attrPath[0] == "defaultApp"
|
||||
|| attrPath[0] == "templates"))
|
||||
|| attrPath[0] == "templates"
|
||||
|| attrPath[0] == "overlays"))
|
||||
|| ((attrPath.size() == 1 || attrPath.size() == 2)
|
||||
&& (attrPath[0] == "checks"
|
||||
|| attrPath[0] == "packages"
|
||||
|
@ -943,7 +944,8 @@ struct CmdFlakeShow : FlakeCommand
|
|||
else {
|
||||
logger->cout("%s: %s",
|
||||
headerPrefix,
|
||||
attrPath.size() == 1 && attrPath[0] == "overlay" ? "Nixpkgs overlay" :
|
||||
(attrPath.size() == 1 && attrPath[0] == "overlay")
|
||||
|| (attrPath.size() == 2 && attrPath[0] == "overlays") ? "Nixpkgs overlay" :
|
||||
attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? "NixOS configuration" :
|
||||
attrPath.size() == 2 && attrPath[0] == "nixosModules" ? "NixOS module" :
|
||||
ANSI_YELLOW "unknown" ANSI_NORMAL);
|
||||
|
|
|
@ -17,6 +17,10 @@
|
|||
#include <netdb.h>
|
||||
#include <netinet/in.h>
|
||||
|
||||
#if __linux__
|
||||
#include <sys/resource.h>
|
||||
#endif
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
extern std::string chrootHelperName;
|
||||
|
@ -61,6 +65,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
bool printBuildLogs = false;
|
||||
bool useNet = true;
|
||||
bool refresh = false;
|
||||
bool showVersion = false;
|
||||
|
||||
NixArgs() : MultiCommand(RegisterCommand::getCommandsFor({})), MixCommonArgs("nix")
|
||||
{
|
||||
|
@ -87,7 +92,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
addFlag({
|
||||
.longName = "version",
|
||||
.description = "Show version information.",
|
||||
.handler = {[&]() { if (!completions) printVersion(programName); }},
|
||||
.handler = {[&]() { showVersion = true; }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
@ -154,6 +159,12 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
#include "nix.md"
|
||||
;
|
||||
}
|
||||
|
||||
// Plugins may add new subcommands.
|
||||
void pluginsInited() override
|
||||
{
|
||||
commands = RegisterCommand::getCommandsFor({});
|
||||
}
|
||||
};
|
||||
|
||||
static void showHelp(std::vector<std::string> subcommand)
|
||||
|
@ -278,7 +289,10 @@ void mainWrapped(int argc, char * * argv)
|
|||
|
||||
if (completions) return;
|
||||
|
||||
initPlugins();
|
||||
if (args.showVersion) {
|
||||
printVersion(programName);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!args.command)
|
||||
throw UsageError("no subcommand specified");
|
||||
|
@ -319,6 +333,17 @@ void mainWrapped(int argc, char * * argv)
|
|||
|
||||
int main(int argc, char * * argv)
|
||||
{
|
||||
// Increase the default stack size for the evaluator and for
|
||||
// libstdc++'s std::regex.
|
||||
#if __linux__
|
||||
rlim_t stackSize = 64 * 1024 * 1024;
|
||||
struct rlimit limit;
|
||||
if (getrlimit(RLIMIT_STACK, &limit) == 0 && limit.rlim_cur < stackSize) {
|
||||
limit.rlim_cur = stackSize;
|
||||
setrlimit(RLIMIT_STACK, &limit);
|
||||
}
|
||||
#endif
|
||||
|
||||
return nix::handleExceptions(argv[0], [&]() {
|
||||
nix::mainWrapped(argc, argv);
|
||||
});
|
||||
|
|
|
@ -171,8 +171,6 @@ static int main_nix_prefetch_url(int argc, char * * argv)
|
|||
|
||||
myArgs.parseCmdline(argvToStrings(argc, argv));
|
||||
|
||||
initPlugins();
|
||||
|
||||
if (args.size() > 2)
|
||||
throw UsageError("too many arguments");
|
||||
|
||||
|
|
|
@ -399,7 +399,13 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
|||
Activity act(*logger, lvlChatty, actUnknown,
|
||||
fmt("checking '%s' for updates", element.source->attrPath));
|
||||
|
||||
InstallableFlake installable(getEvalState(), FlakeRef(element.source->originalRef), {element.source->attrPath}, {}, lockFlags);
|
||||
InstallableFlake installable(
|
||||
this,
|
||||
getEvalState(),
|
||||
FlakeRef(element.source->originalRef),
|
||||
{element.source->attrPath},
|
||||
{},
|
||||
lockFlags);
|
||||
|
||||
auto [attrPath, resolvedRef, drv] = installable.toDerivation();
|
||||
|
||||
|
|
|
@ -43,9 +43,10 @@ program specified by the app definition.
|
|||
|
||||
If *installable* evaluates to a derivation, it will try to execute the
|
||||
program `<out>/bin/<name>`, where *out* is the primary output store
|
||||
path of the derivation and *name* is the name part of the value of the
|
||||
`name` attribute of the derivation (e.g. if `name` is set to
|
||||
`hello-1.10`, it will run `$out/bin/hello`).
|
||||
path of the derivation and *name* is the `meta.mainProgram` attribute
|
||||
of the derivation if it exists, and otherwise the name part of the
|
||||
value of the `name` attribute of the derivation (e.g. if `name` is set
|
||||
to `hello-1.10`, it will run `$out/bin/hello`).
|
||||
|
||||
# Flake output attributes
|
||||
|
||||
|
|
7
tests/compute-levels.sh
Normal file
7
tests/compute-levels.sh
Normal file
|
@ -0,0 +1,7 @@
|
|||
source common.sh
|
||||
|
||||
if [[ $(uname -ms) = "Linux x86_64" ]]; then
|
||||
# x86_64 CPUs must always support the baseline
|
||||
# microarchitecture level.
|
||||
nix -vv --version | grep -q "x86_64-v1-linux"
|
||||
fi
|
|
@ -48,6 +48,10 @@ testCutoff () {
|
|||
testGC () {
|
||||
nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 5
|
||||
nix-collect-garbage --experimental-features ca-derivations --option keep-derivations true
|
||||
clearStore
|
||||
buildAttr rootCA 1 --out-link $TEST_ROOT/rootCA
|
||||
nix-collect-garbage --experimental-features ca-derivations
|
||||
buildAttr rootCA 1 -j0
|
||||
}
|
||||
|
||||
testNixCommand () {
|
||||
|
|
|
@ -41,7 +41,8 @@ nix_tests = \
|
|||
describe-stores.sh \
|
||||
flakes.sh \
|
||||
content-addressed.sh \
|
||||
build.sh
|
||||
build.sh \
|
||||
compute-levels.sh
|
||||
# parallel.sh
|
||||
# build-remote-content-addressed-fixed.sh # problem with fixed output derivations
|
||||
# build-remote-trustless-should-pass-0.sh # problem with legacy ssh-store only
|
||||
|
|
|
@ -2,6 +2,6 @@ source common.sh
|
|||
|
||||
set -o pipefail
|
||||
|
||||
res=$(nix eval --expr builtins.anotherNull --option setting-set true --option plugin-files $PWD/plugins/libplugintest*)
|
||||
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
|
||||
|
||||
[ "$res"x = "nullx" ]
|
||||
|
|
Loading…
Reference in a new issue