Merge branch 'misc-ca' of github.com:obsidiansystems/nix into new-interface-for-path-pathOpt
This commit is contained in:
commit
c318d398f3
31 changed files with 148 additions and 144 deletions
|
@ -80,7 +80,7 @@ SV * queryReferences(char * path)
|
||||||
SV * queryPathHash(char * path)
|
SV * queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
|
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
|
|
|
@ -285,11 +285,10 @@ static std::shared_ptr<AttrDb> makeAttrDb(const Hash & fingerprint)
|
||||||
}
|
}
|
||||||
|
|
||||||
EvalCache::EvalCache(
|
EvalCache::EvalCache(
|
||||||
bool useCache,
|
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||||
const Hash & fingerprint,
|
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
RootLoader rootLoader)
|
RootLoader rootLoader)
|
||||||
: db(useCache ? makeAttrDb(fingerprint) : nullptr)
|
: db(useCache ? makeAttrDb(*useCache) : nullptr)
|
||||||
, state(state)
|
, state(state)
|
||||||
, rootLoader(rootLoader)
|
, rootLoader(rootLoader)
|
||||||
{
|
{
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
|
|
||||||
|
#include <functional>
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
|
||||||
namespace nix::eval_cache {
|
namespace nix::eval_cache {
|
||||||
|
@ -26,8 +27,7 @@ class EvalCache : public std::enable_shared_from_this<EvalCache>
|
||||||
public:
|
public:
|
||||||
|
|
||||||
EvalCache(
|
EvalCache(
|
||||||
bool useCache,
|
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||||
const Hash & fingerprint,
|
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
RootLoader rootLoader);
|
RootLoader rootLoader);
|
||||||
|
|
||||||
|
|
|
@ -1126,7 +1126,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
|
||||||
|
|
||||||
|
|
||||||
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
|
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
|
||||||
Value * filterFun, FileIngestionMethod method, const Hash & expectedHash, Value & v)
|
Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
|
||||||
{
|
{
|
||||||
const auto path = evalSettings.pureEval && expectedHash ?
|
const auto path = evalSettings.pureEval && expectedHash ?
|
||||||
path_ :
|
path_ :
|
||||||
|
@ -1157,7 +1157,7 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
|
||||||
|
|
||||||
std::optional<StorePath> expectedStorePath;
|
std::optional<StorePath> expectedStorePath;
|
||||||
if (expectedHash)
|
if (expectedHash)
|
||||||
expectedStorePath = state.store->makeFixedOutputPath(method, expectedHash, name);
|
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
|
||||||
Path dstPath;
|
Path dstPath;
|
||||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||||
dstPath = state.store->printStorePath(settings.readOnlyMode
|
dstPath = state.store->printStorePath(settings.readOnlyMode
|
||||||
|
@ -1191,7 +1191,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
|
||||||
.errPos = pos
|
.errPos = pos
|
||||||
});
|
});
|
||||||
|
|
||||||
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
|
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
@ -1201,7 +1201,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
||||||
string name;
|
string name;
|
||||||
Value * filterFun = nullptr;
|
Value * filterFun = nullptr;
|
||||||
auto method = FileIngestionMethod::Recursive;
|
auto method = FileIngestionMethod::Recursive;
|
||||||
Hash expectedHash;
|
Hash expectedHash(htSHA256);
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
const string & n(attr.name);
|
const string & n(attr.name);
|
||||||
|
|
|
@ -159,7 +159,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
: hashFile(htSHA256, path);
|
: hashFile(htSHA256, path);
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash)
|
||||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||||
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
|
|
|
@ -130,12 +130,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
||||||
tree.actualPath = store->toRealPath(tree.storePath);
|
tree.actualPath = store->toRealPath(tree.storePath);
|
||||||
|
|
||||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
||||||
|
|
||||||
if (auto prevNarHash = getNarHash()) {
|
if (auto prevNarHash = getNarHash()) {
|
||||||
if (narHash != *prevNarHash)
|
if (narHash != *prevNarHash)
|
||||||
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||||
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
|
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto prevLastModified = getLastModified()) {
|
if (auto prevLastModified = getLastModified()) {
|
||||||
|
|
|
@ -178,7 +178,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
auto [fileHash, fileSize] = fileHashSink.finish();
|
auto [fileHash, fileSize] = fileHashSink.finish();
|
||||||
narInfo->fileHash = fileHash;
|
narInfo->fileHash = fileHash;
|
||||||
narInfo->fileSize = fileSize;
|
narInfo->fileSize = fileSize;
|
||||||
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
|
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
|
||||||
+ (compression == "xz" ? ".xz" :
|
+ (compression == "xz" ? ".xz" :
|
||||||
compression == "bzip2" ? ".bz2" :
|
compression == "bzip2" ? ".bz2" :
|
||||||
compression == "br" ? ".br" :
|
compression == "br" ? ".br" :
|
||||||
|
@ -372,7 +372,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
method for very large paths, but `copyPath' is mainly used for
|
method for very large paths, but `copyPath' is mainly used for
|
||||||
small files. */
|
small files. */
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
Hash h;
|
std::optional<Hash> h;
|
||||||
if (method == FileIngestionMethod::Recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
dumpPath(srcPath, sink, filter);
|
dumpPath(srcPath, sink, filter);
|
||||||
h = hashString(hashAlgo, *sink.s);
|
h = hashString(hashAlgo, *sink.s);
|
||||||
|
@ -382,7 +382,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
h = hashString(hashAlgo, s);
|
h = hashString(hashAlgo, s);
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(makeFixedOutputPath(method, h, name));
|
ValidPathInfo info(makeFixedOutputPath(method, *h, name));
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
addToStore(info, source, repair, CheckSigs);
|
addToStore(info, source, repair, CheckSigs);
|
||||||
|
|
|
@ -3730,7 +3730,7 @@ void DerivationGoal::registerOutputs()
|
||||||
[&](DerivationOutputFixed dof) {
|
[&](DerivationOutputFixed dof) {
|
||||||
outputHash = DerivationOutputFloating {
|
outputHash = DerivationOutputFloating {
|
||||||
.method = dof.hash.method,
|
.method = dof.hash.method,
|
||||||
.hashType = *dof.hash.hash.type,
|
.hashType = dof.hash.hash.type,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
[&](DerivationOutputFloating dof) {
|
[&](DerivationOutputFloating dof) {
|
||||||
|
@ -3811,8 +3811,10 @@ void DerivationGoal::registerOutputs()
|
||||||
time. The hash is stored in the database so that we can
|
time. The hash is stored in the database so that we can
|
||||||
verify later on whether nobody has messed with the store. */
|
verify later on whether nobody has messed with the store. */
|
||||||
debug("scanning for references inside '%1%'", path);
|
debug("scanning for references inside '%1%'", path);
|
||||||
HashResult hash;
|
// HashResult hash;
|
||||||
auto references = worker.store.parseStorePathSet(scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths), hash));
|
auto pathSetAndHash = scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths));
|
||||||
|
auto references = worker.store.parseStorePathSet(pathSetAndHash.first);
|
||||||
|
HashResult hash = pathSetAndHash.second;
|
||||||
|
|
||||||
if (buildMode == bmCheck) {
|
if (buildMode == bmCheck) {
|
||||||
if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
|
if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
|
||||||
|
@ -5034,7 +5036,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
||||||
if (!pathExists(store.printStorePath(path)))
|
if (!pathExists(store.printStorePath(path)))
|
||||||
res = false;
|
res = false;
|
||||||
else {
|
else {
|
||||||
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
|
HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
|
||||||
Hash nullHash(htSHA256);
|
Hash nullHash(htSHA256);
|
||||||
res = info->narHash == nullHash || info->narHash == current.first;
|
res = info->narHash == nullHash || info->narHash == current.first;
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,7 +68,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
for (auto hashedMirror : settings.hashedMirrors.get()) {
|
for (auto hashedMirror : settings.hashedMirrors.get()) {
|
||||||
try {
|
try {
|
||||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||||
fetch(hashedMirror + printHashType(*hash->hash.hash.type) + "/" + hash->hash.hash.to_string(Base16, false));
|
fetch(hashedMirror + printHashType(hash->hash.hash.type) + "/" + hash->hash.hash.to_string(Base16, false));
|
||||||
return;
|
return;
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
debug(e.what());
|
debug(e.what());
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
std::string FixedOutputHash::printMethodAlgo() const {
|
std::string FixedOutputHash::printMethodAlgo() const {
|
||||||
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
|
return makeFileIngestionPrefix(method) + printHashType(hash.type);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
||||||
|
@ -42,7 +42,7 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
||||||
if (prefix == "text") {
|
if (prefix == "text") {
|
||||||
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
||||||
Hash hash = Hash(string(hashTypeAndHash));
|
Hash hash = Hash(string(hashTypeAndHash));
|
||||||
if (*hash.type != htSHA256) {
|
if (hash.type != htSHA256) {
|
||||||
throw Error("parseContentAddress: the text hash should have type SHA256");
|
throw Error("parseContentAddress: the text hash should have type SHA256");
|
||||||
}
|
}
|
||||||
return TextHash { hash };
|
return TextHash { hash };
|
||||||
|
|
|
@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
auto hash = store->queryPathInfo(path)->narHash;
|
auto hash = store->queryPathInfo(path)->narHash;
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << hash.to_string(Base16, false);
|
to << hash->to_string(Base16, false);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -632,7 +632,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
||||||
to << 1;
|
to << 1;
|
||||||
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
||||||
<< info->narHash.to_string(Base16, false);
|
<< info->narHash->to_string(Base16, false);
|
||||||
writeStorePaths(*store, to, info->references);
|
writeStorePaths(*store, to, info->references);
|
||||||
to << info->registrationTime << info->narSize;
|
to << info->registrationTime << info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||||
|
|
|
@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
filesystem corruption from spreading to other machines.
|
filesystem corruption from spreading to other machines.
|
||||||
Don't complain if the stored hash is zero (unknown). */
|
Don't complain if the stored hash is zero (unknown). */
|
||||||
Hash hash = hashSink.currentHash().first;
|
Hash hash = hashSink.currentHash().first;
|
||||||
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
|
||||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||||
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
|
|
||||||
teeSink
|
teeSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
|
|
|
@ -113,7 +113,7 @@ struct LegacySSHStore : public Store
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
info->narHash = s.empty() ? Hash() : Hash(s);
|
info->narHash = s.empty() ? std::optional<Hash>{} : Hash{s};
|
||||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||||
info->sigs = readStrings<StringSet>(conn->from);
|
info->sigs = readStrings<StringSet>(conn->from);
|
||||||
}
|
}
|
||||||
|
@ -138,7 +138,7 @@ struct LegacySSHStore : public Store
|
||||||
<< cmdAddToStoreNar
|
<< cmdAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash.to_string(Base16, false);
|
<< info.narHash->to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to
|
conn->to
|
||||||
<< info.registrationTime
|
<< info.registrationTime
|
||||||
|
|
|
@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
|
|
||||||
state.stmtRegisterValidPath.use()
|
state.stmtRegisterValidPath.use()
|
||||||
(printStorePath(info.path))
|
(printStorePath(info.path))
|
||||||
(info.narHash.to_string(Base16, true))
|
(info.narHash->to_string(Base16, true))
|
||||||
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
||||||
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
|
@ -694,7 +694,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||||
{
|
{
|
||||||
state.stmtUpdatePathInfo.use()
|
state.stmtUpdatePathInfo.use()
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
(info.narHash.to_string(Base16, true))
|
(info.narHash->to_string(Base16, true))
|
||||||
(info.ultimate ? 1 : 0, info.ultimate)
|
(info.ultimate ? 1 : 0, info.ultimate)
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(renderContentAddress(info.ca), (bool) info.ca)
|
(renderContentAddress(info.ca), (bool) info.ca)
|
||||||
|
@ -908,7 +908,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
|
|
||||||
for (auto & i : infos) {
|
for (auto & i : infos) {
|
||||||
assert(i.narHash.type == htSHA256);
|
assert(i.narHash && i.narHash->type == htSHA256);
|
||||||
if (isValidPath_(*state, i.path))
|
if (isValidPath_(*state, i.path))
|
||||||
updatePathInfo(*state, i);
|
updatePathInfo(*state, i);
|
||||||
else
|
else
|
||||||
|
@ -1021,7 +1021,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
if (hashResult.first != info.narHash)
|
if (hashResult.first != info.narHash)
|
||||||
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||||
|
|
||||||
if (hashResult.second != info.narSize)
|
if (hashResult.second != info.narSize)
|
||||||
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
|
@ -1317,9 +1317,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (!info->ca || !info->references.count(info->path))
|
if (!info->ca || !info->references.count(info->path))
|
||||||
hashSink = std::make_unique<HashSink>(*info->narHash.type);
|
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
||||||
|
|
||||||
dumpPath(Store::toRealPath(i), *hashSink);
|
dumpPath(Store::toRealPath(i), *hashSink);
|
||||||
auto current = hashSink->finish();
|
auto current = hashSink->finish();
|
||||||
|
@ -1328,7 +1328,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
logError({
|
logError({
|
||||||
.name = "Invalid hash - path modified",
|
.name = "Invalid hash - path modified",
|
||||||
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
|
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
if (repair) repairPath(i); else errors = true;
|
if (repair) repairPath(i); else errors = true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -230,9 +230,9 @@ public:
|
||||||
(std::string(info->path.name()))
|
(std::string(info->path.name()))
|
||||||
(narInfo ? narInfo->url : "", narInfo != 0)
|
(narInfo ? narInfo->url : "", narInfo != 0)
|
||||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||||
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||||
(info->narHash.to_string(Base32, true))
|
(info->narHash->to_string(Base32, true))
|
||||||
(info->narSize)
|
(info->narSize)
|
||||||
(concatStringsSep(" ", info->shortRefs()))
|
(concatStringsSep(" ", info->shortRefs()))
|
||||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||||
|
|
|
@ -7,15 +7,14 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
|
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
|
||||||
{
|
{
|
||||||
auto corrupt = [&]() {
|
auto corrupt = [&]() {
|
||||||
throw Error("NAR info file '%1%' is corrupt", whence);
|
return Error("NAR info file '%1%' is corrupt", whence);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto parseHashField = [&](const string & s) {
|
auto parseHashField = [&](const string & s) {
|
||||||
try {
|
try {
|
||||||
return Hash(s);
|
return Hash(s);
|
||||||
} catch (BadHash &) {
|
} catch (BadHash &) {
|
||||||
corrupt();
|
throw corrupt();
|
||||||
return Hash(); // never reached
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,12 +24,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
while (pos < s.size()) {
|
while (pos < s.size()) {
|
||||||
|
|
||||||
size_t colon = s.find(':', pos);
|
size_t colon = s.find(':', pos);
|
||||||
if (colon == std::string::npos) corrupt();
|
if (colon == std::string::npos) throw corrupt();
|
||||||
|
|
||||||
std::string name(s, pos, colon - pos);
|
std::string name(s, pos, colon - pos);
|
||||||
|
|
||||||
size_t eol = s.find('\n', colon + 2);
|
size_t eol = s.find('\n', colon + 2);
|
||||||
if (eol == std::string::npos) corrupt();
|
if (eol == std::string::npos) throw corrupt();
|
||||||
|
|
||||||
std::string value(s, colon + 2, eol - colon - 2);
|
std::string value(s, colon + 2, eol - colon - 2);
|
||||||
|
|
||||||
|
@ -45,16 +44,16 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "FileHash")
|
else if (name == "FileHash")
|
||||||
fileHash = parseHashField(value);
|
fileHash = parseHashField(value);
|
||||||
else if (name == "FileSize") {
|
else if (name == "FileSize") {
|
||||||
if (!string2Int(value, fileSize)) corrupt();
|
if (!string2Int(value, fileSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
else if (name == "NarHash")
|
else if (name == "NarHash")
|
||||||
narHash = parseHashField(value);
|
narHash = parseHashField(value);
|
||||||
else if (name == "NarSize") {
|
else if (name == "NarSize") {
|
||||||
if (!string2Int(value, narSize)) corrupt();
|
if (!string2Int(value, narSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
else if (name == "References") {
|
else if (name == "References") {
|
||||||
auto refs = tokenizeString<Strings>(value, " ");
|
auto refs = tokenizeString<Strings>(value, " ");
|
||||||
if (!references.empty()) corrupt();
|
if (!references.empty()) throw corrupt();
|
||||||
for (auto & r : refs)
|
for (auto & r : refs)
|
||||||
references.insert(StorePath(r));
|
references.insert(StorePath(r));
|
||||||
}
|
}
|
||||||
|
@ -67,7 +66,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "Sig")
|
else if (name == "Sig")
|
||||||
sigs.insert(value);
|
sigs.insert(value);
|
||||||
else if (name == "CA") {
|
else if (name == "CA") {
|
||||||
if (ca) corrupt();
|
if (ca) throw corrupt();
|
||||||
// FIXME: allow blank ca or require skipping field?
|
// FIXME: allow blank ca or require skipping field?
|
||||||
ca = parseContentAddressOpt(value);
|
ca = parseContentAddressOpt(value);
|
||||||
}
|
}
|
||||||
|
@ -77,7 +76,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
|
|
||||||
if (compression == "") compression = "bzip2";
|
if (compression == "") compression = "bzip2";
|
||||||
|
|
||||||
if (!havePath || url.empty() || narSize == 0 || !narHash) corrupt();
|
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string NarInfo::to_string(const Store & store) const
|
std::string NarInfo::to_string(const Store & store) const
|
||||||
|
@ -87,11 +86,11 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
res += "URL: " + url + "\n";
|
res += "URL: " + url + "\n";
|
||||||
assert(compression != "");
|
assert(compression != "");
|
||||||
res += "Compression: " + compression + "\n";
|
res += "Compression: " + compression + "\n";
|
||||||
assert(fileHash.type == htSHA256);
|
assert(fileHash && fileHash->type == htSHA256);
|
||||||
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n";
|
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
||||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||||
assert(narHash.type == htSHA256);
|
assert(narHash && narHash->type == htSHA256);
|
||||||
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
|
res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
|
||||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||||
|
|
||||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||||
|
|
|
@ -10,7 +10,7 @@ struct NarInfo : ValidPathInfo
|
||||||
{
|
{
|
||||||
std::string url;
|
std::string url;
|
||||||
std::string compression;
|
std::string compression;
|
||||||
Hash fileHash;
|
std::optional<Hash> fileHash;
|
||||||
uint64_t fileSize = 0;
|
uint64_t fileSize = 0;
|
||||||
std::string system;
|
std::string system;
|
||||||
|
|
||||||
|
|
|
@ -76,8 +76,8 @@ void RefScanSink::operator () (const unsigned char * data, size_t len)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
PathSet scanForReferences(const string & path,
|
std::pair<PathSet, HashResult> scanForReferences(const string & path,
|
||||||
const PathSet & refs, HashResult & hash)
|
const PathSet & refs)
|
||||||
{
|
{
|
||||||
RefScanSink refsSink;
|
RefScanSink refsSink;
|
||||||
HashSink hashSink { htSHA256 };
|
HashSink hashSink { htSHA256 };
|
||||||
|
@ -111,9 +111,9 @@ PathSet scanForReferences(const string & path,
|
||||||
found.insert(j->second);
|
found.insert(j->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
hash = hashSink.finish();
|
auto hash = hashSink.finish();
|
||||||
|
|
||||||
return found;
|
return std::pair<PathSet, HashResult>(found, hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -5,8 +5,7 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
PathSet scanForReferences(const Path & path, const PathSet & refs,
|
std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
|
||||||
HashResult & hash);
|
|
||||||
|
|
||||||
struct RewritingSink : Sink
|
struct RewritingSink : Sink
|
||||||
{
|
{
|
||||||
|
|
|
@ -498,7 +498,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
conn->to << wopAddToStoreNar
|
conn->to << wopAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash.to_string(Base16, false);
|
<< info.narHash->to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to << info.registrationTime << info.narSize
|
conn->to << info.registrationTime << info.narSize
|
||||||
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||||
|
|
|
@ -538,7 +538,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||||
auto info = queryPathInfo(i);
|
auto info = queryPathInfo(i);
|
||||||
|
|
||||||
if (showHash) {
|
if (showHash) {
|
||||||
s += info->narHash.to_string(Base16, false) + "\n";
|
s += info->narHash->to_string(Base16, false) + "\n";
|
||||||
s += (format("%1%\n") % info->narSize).str();
|
s += (format("%1%\n") % info->narSize).str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -570,7 +570,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
auto info = queryPathInfo(storePath);
|
auto info = queryPathInfo(storePath);
|
||||||
|
|
||||||
jsonPath
|
jsonPath
|
||||||
.attr("narHash", info->narHash.to_string(hashBase, true))
|
.attr("narHash", info->narHash->to_string(hashBase, true))
|
||||||
.attr("narSize", info->narSize);
|
.attr("narSize", info->narSize);
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -613,7 +613,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
if (!narInfo->url.empty())
|
if (!narInfo->url.empty())
|
||||||
jsonPath.attr("url", narInfo->url);
|
jsonPath.attr("url", narInfo->url);
|
||||||
if (narInfo->fileHash)
|
if (narInfo->fileHash)
|
||||||
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(hashBase, true));
|
jsonPath.attr("downloadHash", narInfo->fileHash->to_string(hashBase, true));
|
||||||
if (narInfo->fileSize)
|
if (narInfo->fileSize)
|
||||||
jsonPath.attr("downloadSize", narInfo->fileSize);
|
jsonPath.attr("downloadSize", narInfo->fileSize);
|
||||||
if (showClosureSize)
|
if (showClosureSize)
|
||||||
|
@ -854,7 +854,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||||
store.printStorePath(path));
|
store.printStorePath(path));
|
||||||
return
|
return
|
||||||
"1;" + store.printStorePath(path) + ";"
|
"1;" + store.printStorePath(path) + ";"
|
||||||
+ narHash.to_string(Base32, true) + ";"
|
+ narHash->to_string(Base32, true) + ";"
|
||||||
+ std::to_string(narSize) + ";"
|
+ std::to_string(narSize) + ";"
|
||||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,7 +115,8 @@ struct ValidPathInfo
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
std::optional<StorePath> deriver;
|
std::optional<StorePath> deriver;
|
||||||
Hash narHash;
|
// TODO document this
|
||||||
|
std::optional<Hash> narHash;
|
||||||
StorePathSet references;
|
StorePathSet references;
|
||||||
time_t registrationTime = 0;
|
time_t registrationTime = 0;
|
||||||
uint64_t narSize = 0; // 0 = unknown
|
uint64_t narSize = 0; // 0 = unknown
|
||||||
|
|
|
@ -15,19 +15,22 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
static size_t regularHashSize(HashType type) {
|
||||||
|
switch (type) {
|
||||||
|
case htMD5: return md5HashSize;
|
||||||
|
case htSHA1: return sha1HashSize;
|
||||||
|
case htSHA256: return sha256HashSize;
|
||||||
|
case htSHA512: return sha512HashSize;
|
||||||
|
}
|
||||||
|
abort();
|
||||||
|
}
|
||||||
|
|
||||||
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
|
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
|
||||||
|
|
||||||
|
|
||||||
void Hash::init()
|
void Hash::init()
|
||||||
{
|
{
|
||||||
assert(type);
|
hashSize = regularHashSize(type);
|
||||||
switch (*type) {
|
|
||||||
case htMD5: hashSize = md5HashSize; break;
|
|
||||||
case htSHA1: hashSize = sha1HashSize; break;
|
|
||||||
case htSHA256: hashSize = sha256HashSize; break;
|
|
||||||
case htSHA512: hashSize = sha512HashSize; break;
|
|
||||||
}
|
|
||||||
assert(hashSize <= maxHashSize);
|
assert(hashSize <= maxHashSize);
|
||||||
memset(hash, 0, maxHashSize);
|
memset(hash, 0, maxHashSize);
|
||||||
}
|
}
|
||||||
|
@ -108,17 +111,11 @@ string printHash16or32(const Hash & hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
HashType assertInitHashType(const Hash & h)
|
|
||||||
{
|
|
||||||
assert(h.type);
|
|
||||||
return *h.type;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string Hash::to_string(Base base, bool includeType) const
|
std::string Hash::to_string(Base base, bool includeType) const
|
||||||
{
|
{
|
||||||
std::string s;
|
std::string s;
|
||||||
if (base == SRI || includeType) {
|
if (base == SRI || includeType) {
|
||||||
s += printHashType(assertInitHashType(*this));
|
s += printHashType(type);
|
||||||
s += base == SRI ? '-' : ':';
|
s += base == SRI ? '-' : ':';
|
||||||
}
|
}
|
||||||
switch (base) {
|
switch (base) {
|
||||||
|
@ -139,60 +136,66 @@ std::string Hash::to_string(Base base, bool includeType) const
|
||||||
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { }
|
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { }
|
||||||
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
|
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
|
||||||
|
|
||||||
Hash::Hash(std::string_view s, std::optional<HashType> type)
|
Hash::Hash(std::string_view original, std::optional<HashType> optType)
|
||||||
: type(type)
|
|
||||||
{
|
{
|
||||||
|
auto rest = original;
|
||||||
|
|
||||||
size_t pos = 0;
|
size_t pos = 0;
|
||||||
bool isSRI = false;
|
bool isSRI = false;
|
||||||
|
|
||||||
auto sep = s.find(':');
|
// Parse the has type before the separater, if there was one.
|
||||||
if (sep == string::npos) {
|
std::optional<HashType> optParsedType;
|
||||||
sep = s.find('-');
|
{
|
||||||
if (sep != string::npos) {
|
auto sep = rest.find(':');
|
||||||
|
if (sep == std::string_view::npos) {
|
||||||
|
sep = rest.find('-');
|
||||||
|
if (sep != std::string_view::npos)
|
||||||
isSRI = true;
|
isSRI = true;
|
||||||
} else if (! type)
|
}
|
||||||
throw BadHash("hash '%s' does not include a type", s);
|
if (sep != std::string_view::npos) {
|
||||||
|
auto hashRaw = rest.substr(0, sep);
|
||||||
|
optParsedType = parseHashType(hashRaw);
|
||||||
|
rest = rest.substr(sep + 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sep != string::npos) {
|
// Either the string or user must provide the type, if they both do they
|
||||||
string hts = string(s, 0, sep);
|
// must agree.
|
||||||
this->type = parseHashType(hts);
|
if (!optParsedType && !optType) {
|
||||||
if (!this->type)
|
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context.", rest);
|
||||||
throw BadHash("unknown hash type '%s'", hts);
|
} else {
|
||||||
if (type && type != this->type)
|
this->type = optParsedType ? *optParsedType : *optType;
|
||||||
throw BadHash("hash '%s' should have type '%s'", s, printHashType(*type));
|
if (optParsedType && optType && *optParsedType != *optType)
|
||||||
pos = sep + 1;
|
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
|
||||||
}
|
}
|
||||||
|
|
||||||
init();
|
init();
|
||||||
|
|
||||||
size_t size = s.size() - pos;
|
if (!isSRI && rest.size() == base16Len()) {
|
||||||
|
|
||||||
if (!isSRI && size == base16Len()) {
|
|
||||||
|
|
||||||
auto parseHexDigit = [&](char c) {
|
auto parseHexDigit = [&](char c) {
|
||||||
if (c >= '0' && c <= '9') return c - '0';
|
if (c >= '0' && c <= '9') return c - '0';
|
||||||
if (c >= 'A' && c <= 'F') return c - 'A' + 10;
|
if (c >= 'A' && c <= 'F') return c - 'A' + 10;
|
||||||
if (c >= 'a' && c <= 'f') return c - 'a' + 10;
|
if (c >= 'a' && c <= 'f') return c - 'a' + 10;
|
||||||
throw BadHash("invalid base-16 hash '%s'", s);
|
throw BadHash("invalid base-16 hash '%s'", original);
|
||||||
};
|
};
|
||||||
|
|
||||||
for (unsigned int i = 0; i < hashSize; i++) {
|
for (unsigned int i = 0; i < hashSize; i++) {
|
||||||
hash[i] =
|
hash[i] =
|
||||||
parseHexDigit(s[pos + i * 2]) << 4
|
parseHexDigit(rest[pos + i * 2]) << 4
|
||||||
| parseHexDigit(s[pos + i * 2 + 1]);
|
| parseHexDigit(rest[pos + i * 2 + 1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (!isSRI && size == base32Len()) {
|
else if (!isSRI && rest.size() == base32Len()) {
|
||||||
|
|
||||||
for (unsigned int n = 0; n < size; ++n) {
|
for (unsigned int n = 0; n < rest.size(); ++n) {
|
||||||
char c = s[pos + size - n - 1];
|
char c = rest[rest.size() - n - 1];
|
||||||
unsigned char digit;
|
unsigned char digit;
|
||||||
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
|
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
|
||||||
if (base32Chars[digit] == c) break;
|
if (base32Chars[digit] == c) break;
|
||||||
if (digit >= 32)
|
if (digit >= 32)
|
||||||
throw BadHash("invalid base-32 hash '%s'", s);
|
throw BadHash("invalid base-32 hash '%s'", original);
|
||||||
unsigned int b = n * 5;
|
unsigned int b = n * 5;
|
||||||
unsigned int i = b / 8;
|
unsigned int i = b / 8;
|
||||||
unsigned int j = b % 8;
|
unsigned int j = b % 8;
|
||||||
|
@ -202,21 +205,21 @@ Hash::Hash(std::string_view s, std::optional<HashType> type)
|
||||||
hash[i + 1] |= digit >> (8 - j);
|
hash[i + 1] |= digit >> (8 - j);
|
||||||
} else {
|
} else {
|
||||||
if (digit >> (8 - j))
|
if (digit >> (8 - j))
|
||||||
throw BadHash("invalid base-32 hash '%s'", s);
|
throw BadHash("invalid base-32 hash '%s'", original);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (isSRI || size == base64Len()) {
|
else if (isSRI || rest.size() == base64Len()) {
|
||||||
auto d = base64Decode(s.substr(pos));
|
auto d = base64Decode(rest);
|
||||||
if (d.size() != hashSize)
|
if (d.size() != hashSize)
|
||||||
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", s);
|
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", original);
|
||||||
assert(hashSize);
|
assert(hashSize);
|
||||||
memcpy(hash, d.data(), hashSize);
|
memcpy(hash, d.data(), hashSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
else
|
else
|
||||||
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(*type));
|
throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
|
Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
|
||||||
|
@ -269,7 +272,7 @@ static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Hash hashString(HashType ht, const string & s)
|
Hash hashString(HashType ht, std::string_view s)
|
||||||
{
|
{
|
||||||
Ctx ctx;
|
Ctx ctx;
|
||||||
Hash hash(ht);
|
Hash hash(ht);
|
||||||
|
@ -336,7 +339,7 @@ HashResult hashPath(
|
||||||
|
|
||||||
Hash compressHash(const Hash & hash, unsigned int newSize)
|
Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||||
{
|
{
|
||||||
Hash h;
|
Hash h(hash.type);
|
||||||
h.hashSize = newSize;
|
h.hashSize = newSize;
|
||||||
for (unsigned int i = 0; i < hash.hashSize; ++i)
|
for (unsigned int i = 0; i < hash.hashSize; ++i)
|
||||||
h.hash[i % newSize] ^= hash.hash[i];
|
h.hash[i % newSize] ^= hash.hash[i];
|
||||||
|
@ -344,7 +347,7 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::optional<HashType> parseHashTypeOpt(const string & s)
|
std::optional<HashType> parseHashTypeOpt(std::string_view s)
|
||||||
{
|
{
|
||||||
if (s == "md5") return htMD5;
|
if (s == "md5") return htMD5;
|
||||||
else if (s == "sha1") return htSHA1;
|
else if (s == "sha1") return htSHA1;
|
||||||
|
@ -353,7 +356,7 @@ std::optional<HashType> parseHashTypeOpt(const string & s)
|
||||||
else return std::optional<HashType> {};
|
else return std::optional<HashType> {};
|
||||||
}
|
}
|
||||||
|
|
||||||
HashType parseHashType(const string & s)
|
HashType parseHashType(std::string_view s)
|
||||||
{
|
{
|
||||||
auto opt_h = parseHashTypeOpt(s);
|
auto opt_h = parseHashTypeOpt(s);
|
||||||
if (opt_h)
|
if (opt_h)
|
||||||
|
|
|
@ -27,14 +27,11 @@ enum Base : int { Base64, Base32, Base16, SRI };
|
||||||
|
|
||||||
struct Hash
|
struct Hash
|
||||||
{
|
{
|
||||||
static const unsigned int maxHashSize = 64;
|
constexpr static size_t maxHashSize = 64;
|
||||||
unsigned int hashSize = 0;
|
size_t hashSize = 0;
|
||||||
unsigned char hash[maxHashSize] = {};
|
uint8_t hash[maxHashSize] = {};
|
||||||
|
|
||||||
std::optional<HashType> type = {};
|
HashType type;
|
||||||
|
|
||||||
/* Create an unset hash object. */
|
|
||||||
Hash() { };
|
|
||||||
|
|
||||||
/* Create a zero-filled hash object. */
|
/* Create a zero-filled hash object. */
|
||||||
Hash(HashType type) : type(type) { init(); };
|
Hash(HashType type) : type(type) { init(); };
|
||||||
|
@ -107,7 +104,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht);
|
||||||
string printHash16or32(const Hash & hash);
|
string printHash16or32(const Hash & hash);
|
||||||
|
|
||||||
/* Compute the hash of the given string. */
|
/* Compute the hash of the given string. */
|
||||||
Hash hashString(HashType ht, const string & s);
|
Hash hashString(HashType ht, std::string_view s);
|
||||||
|
|
||||||
/* Compute the hash of the given file. */
|
/* Compute the hash of the given file. */
|
||||||
Hash hashFile(HashType ht, const Path & path);
|
Hash hashFile(HashType ht, const Path & path);
|
||||||
|
@ -123,10 +120,10 @@ HashResult hashPath(HashType ht, const Path & path,
|
||||||
Hash compressHash(const Hash & hash, unsigned int newSize);
|
Hash compressHash(const Hash & hash, unsigned int newSize);
|
||||||
|
|
||||||
/* Parse a string representing a hash type. */
|
/* Parse a string representing a hash type. */
|
||||||
HashType parseHashType(const string & s);
|
HashType parseHashType(std::string_view s);
|
||||||
|
|
||||||
/* Will return nothing on parse error */
|
/* Will return nothing on parse error */
|
||||||
std::optional<HashType> parseHashTypeOpt(const string & s);
|
std::optional<HashType> parseHashTypeOpt(std::string_view s);
|
||||||
|
|
||||||
/* And the reverse. */
|
/* And the reverse. */
|
||||||
string printHashType(HashType ht);
|
string printHashType(HashType ht);
|
||||||
|
|
|
@ -154,7 +154,7 @@ static int _main(int argc, char * * argv)
|
||||||
/* If an expected hash is given, the file may already exist in
|
/* If an expected hash is given, the file may already exist in
|
||||||
the store. */
|
the store. */
|
||||||
std::optional<Hash> expectedHash;
|
std::optional<Hash> expectedHash;
|
||||||
Hash hash;
|
Hash hash(ht);
|
||||||
std::optional<StorePath> storePath;
|
std::optional<StorePath> storePath;
|
||||||
if (args.size() == 2) {
|
if (args.size() == 2) {
|
||||||
expectedHash = Hash(args[1], ht);
|
expectedHash = Hash(args[1], ht);
|
||||||
|
|
|
@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
||||||
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
||||||
auto info = store->queryPathInfo(j);
|
auto info = store->queryPathInfo(j);
|
||||||
if (query == qHash) {
|
if (query == qHash) {
|
||||||
assert(info->narHash.type == htSHA256);
|
assert(info->narHash && info->narHash->type == htSHA256);
|
||||||
cout << fmt("%s\n", info->narHash.to_string(Base32, true));
|
cout << fmt("%s\n", info->narHash->to_string(Base32, true));
|
||||||
} else if (query == qSize)
|
} else if (query == qSize)
|
||||||
cout << fmt("%d\n", info->narSize);
|
cout << fmt("%d\n", info->narSize);
|
||||||
}
|
}
|
||||||
|
@ -725,7 +725,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
auto path = store->followLinksToStorePath(i);
|
auto path = store->followLinksToStorePath(i);
|
||||||
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
||||||
auto info = store->queryPathInfo(path);
|
auto info = store->queryPathInfo(path);
|
||||||
HashSink sink(*info->narHash.type);
|
HashSink sink(info->narHash->type);
|
||||||
store->narFromPath(path, sink);
|
store->narFromPath(path, sink);
|
||||||
auto current = sink.finish();
|
auto current = sink.finish();
|
||||||
if (current.first != info->narHash) {
|
if (current.first != info->narHash) {
|
||||||
|
@ -734,7 +734,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(path),
|
store->printStorePath(path),
|
||||||
info->narHash.to_string(Base32, true),
|
info->narHash->to_string(Base32, true),
|
||||||
current.first.to_string(Base32, true))
|
current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
status = 1;
|
status = 1;
|
||||||
|
@ -864,7 +864,9 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
out << info->narSize // downloadSize
|
out << info->narSize // downloadSize
|
||||||
<< info->narSize;
|
<< info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
||||||
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs;
|
out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
|
||||||
|
<< renderContentAddress(info->ca)
|
||||||
|
<< info->sigs;
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = std::optional { FixedOutputHash {
|
info.ca = std::optional { FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Recursive,
|
.method = FileIngestionMethod::Recursive,
|
||||||
.hash = info.narHash,
|
.hash = *info.narHash,
|
||||||
} };
|
} };
|
||||||
|
|
||||||
if (!dryRun) {
|
if (!dryRun) {
|
||||||
|
|
|
@ -276,7 +276,7 @@ std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
|
||||||
Installable::getCursors(EvalState & state, bool useEvalCache)
|
Installable::getCursors(EvalState & state, bool useEvalCache)
|
||||||
{
|
{
|
||||||
auto evalCache =
|
auto evalCache =
|
||||||
std::make_shared<nix::eval_cache::EvalCache>(false, Hash(), state,
|
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
|
||||||
[&]() { return toValue(state).first; });
|
[&]() { return toValue(state).first; });
|
||||||
return {{evalCache->getRoot(), ""}};
|
return {{evalCache->getRoot(), ""}};
|
||||||
}
|
}
|
||||||
|
@ -437,9 +437,11 @@ ref<eval_cache::EvalCache> openEvalCache(
|
||||||
std::shared_ptr<flake::LockedFlake> lockedFlake,
|
std::shared_ptr<flake::LockedFlake> lockedFlake,
|
||||||
bool useEvalCache)
|
bool useEvalCache)
|
||||||
{
|
{
|
||||||
return ref(std::make_shared<nix::eval_cache::EvalCache>(
|
auto fingerprint = lockedFlake->getFingerprint();
|
||||||
useEvalCache && evalSettings.pureEval,
|
return make_ref<nix::eval_cache::EvalCache>(
|
||||||
lockedFlake->getFingerprint(),
|
useEvalCache && evalSettings.pureEval
|
||||||
|
? std::optional { std::cref(fingerprint) }
|
||||||
|
: std::nullopt,
|
||||||
state,
|
state,
|
||||||
[&state, lockedFlake]()
|
[&state, lockedFlake]()
|
||||||
{
|
{
|
||||||
|
@ -457,7 +459,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
||||||
assert(aOutputs);
|
assert(aOutputs);
|
||||||
|
|
||||||
return aOutputs->value;
|
return aOutputs->value;
|
||||||
}));
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
static std::string showAttrPaths(const std::vector<std::string> & paths)
|
static std::string showAttrPaths(const std::vector<std::string> & paths)
|
||||||
|
|
|
@ -84,7 +84,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash {
|
info.ca = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Recursive,
|
.method = FileIngestionMethod::Recursive,
|
||||||
.hash = info.narHash,
|
.hash = *info.narHash,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!json)
|
if (!json)
|
||||||
|
|
|
@ -133,7 +133,7 @@ struct ProfileManifest
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
|
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash };
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
store->addToStore(info, source);
|
store->addToStore(info, source);
|
||||||
|
|
|
@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (!info->ca)
|
if (!info->ca)
|
||||||
hashSink = std::make_unique<HashSink>(*info->narHash.type);
|
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
||||||
|
|
||||||
store->narFromPath(info->path, *hashSink);
|
store->narFromPath(info->path, *hashSink);
|
||||||
|
|
||||||
auto hash = hashSink->finish();
|
auto hash = hashSink->finish();
|
||||||
|
|
||||||
if (hash.first != info->narHash) {
|
if (hash.first != *info->narHash) {
|
||||||
corrupted++;
|
corrupted++;
|
||||||
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
||||||
logError({
|
logError({
|
||||||
|
@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(info->path),
|
store->printStorePath(info->path),
|
||||||
info->narHash.to_string(Base32, true),
|
info->narHash->to_string(Base32, true),
|
||||||
hash.first.to_string(Base32, true))
|
hash.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue