refactor: make HashType and Base enum classes for type safety

Change-Id: I9fbd55a9d50464a56fe11cb42a06a206914150d8
This commit is contained in:
Jade Lovelace 2024-08-01 13:42:02 -07:00
parent f3ef0899c7
commit 370ac940dd
57 changed files with 257 additions and 260 deletions

View file

@ -77,7 +77,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path) SV * queryPathHash(char * path)
PPCODE: PPCODE:
try { try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true); auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base::Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -103,7 +103,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef); XPUSHs(&PL_sv_undef);
else else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true); auto s = info->narHash.to_string(base32 ? Base::Base32 : Base::Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime); mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize); mXPUSHi(info->narSize);
@ -205,7 +205,7 @@ SV * hashPath(char * algo, int base32, char * path)
PPCODE: PPCODE:
try { try {
Hash h = hashPath(parseHashType(algo), path).first; Hash h = hashPath(parseHashType(algo), path).first;
auto s = h.to_string(base32 ? Base32 : Base16, false); auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -216,7 +216,7 @@ SV * hashFile(char * algo, int base32, char * path)
PPCODE: PPCODE:
try { try {
Hash h = hashFile(parseHashType(algo), path); Hash h = hashFile(parseHashType(algo), path);
auto s = h.to_string(base32 ? Base32 : Base16, false); auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -227,7 +227,7 @@ SV * hashString(char * algo, int base32, char * s)
PPCODE: PPCODE:
try { try {
Hash h = hashString(parseHashType(algo), s); Hash h = hashString(parseHashType(algo), s);
auto s = h.to_string(base32 ? Base32 : Base16, false); auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -238,7 +238,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
PPCODE: PPCODE:
try { try {
auto h = Hash::parseAny(s, parseHashType(algo)); auto h = Hash::parseAny(s, parseHashType(algo));
auto s = h.to_string(toBase32 ? Base32 : Base16, false); auto s = h.to_string(toBase32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());

View file

@ -42,7 +42,7 @@ static std::string makeLockFilename(const std::string & storeUri) {
// This avoids issues with the escaped URI being very long and causing // This avoids issues with the escaped URI being very long and causing
// path too long errors, while also avoiding any possibility of collision // path too long errors, while also avoiding any possibility of collision
// caused by simple truncation. // caused by simple truncation.
auto hash = hashString(HashType::htSHA256, storeUri).to_string(Base::Base32, false); auto hash = hashString(HashType::SHA256, storeUri).to_string(Base::Base32, false);
return escapeUri(storeUri).substr(0, 48) + "-" + hash.substr(0, 16); return escapeUri(storeUri).substr(0, 48) + "-" + hash.substr(0, 16);
} }

View file

@ -246,7 +246,7 @@ StorePath ProfileManifest::build(ref<Store> store)
StringSink sink; StringSink sink;
sink << dumpPath(tempDir); sink << dumpPath(tempDir);
auto narHash = hashString(htSHA256, sink.s); auto narHash = hashString(HashType::SHA256, sink.s);
ValidPathInfo info{ ValidPathInfo info{
*store, *store,

View file

@ -50,7 +50,7 @@ struct AttrDb
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5"; Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
createDirs(cacheDir); createDirs(cacheDir);
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite"; Path dbPath = cacheDir + "/" + fingerprint.to_string(Base::Base16, false) + ".sqlite";
state->db = SQLite(dbPath); state->db = SQLite(dbPath);
state->db.isCache(); state->db.isCache();

View file

@ -937,7 +937,7 @@ Fingerprint LockedFlake::getFingerprint() const
// FIXME: as an optimization, if the flake contains a lock file // FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use // and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint. // flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256, return hashString(HashType::SHA256,
fmt("%s;%s;%d;%d;%s", fmt("%s;%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(), flake.sourceInfo->storePath.to_string(),
flake.lockedRef.subdir, flake.lockedRef.subdir,

View file

@ -1336,7 +1336,7 @@ drvName, Bindings * attrs, Value & v)
state.error<EvalError>("derivation cannot be both content-addressed and impure") state.error<EvalError>("derivation cannot be both content-addressed and impure")
.atPos(v).debugThrow(); .atPos(v).debugThrow();
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256); auto ht = parseHashTypeOpt(outputHashAlgo).value_or(HashType::SHA256);
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
for (auto & i : outputs) { for (auto & i : outputs) {
@ -1764,7 +1764,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
auto path = realisePath(state, pos, *args[1]); auto path = realisePath(state, pos, *args[1]);
v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false)); v.mkString(hashString(*ht, path.readFile()).to_string(Base::Base16, false));
} }
static RegisterPrimOp primop_hashFile({ static RegisterPrimOp primop_hashFile({
@ -2346,7 +2346,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
else if (n == "recursive") else if (n == "recursive")
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") }; method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
else if (n == "sha256") else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256); expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashType::SHA256);
else else
state.error<EvalError>( state.error<EvalError>(
"unsupported argument '%1%' to 'addPath'", "unsupported argument '%1%' to 'addPath'",
@ -3861,7 +3861,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
NixStringContext context; // discarded NixStringContext context; // discarded
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
v.mkString(hashString(*ht, s).to_string(Base16, false)); v.mkString(hashString(*ht, s).to_string(Base::Base16, false));
} }
static RegisterPrimOp primop_hashString({ static RegisterPrimOp primop_hashString({

View file

@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
// be both a revision or a branch/tag name. // be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial"); auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
if (std::regex_match(value.begin(), value.end(), revRegex)) if (std::regex_match(value.begin(), value.end(), revRegex))
rev = Hash::parseAny(value, htSHA1); rev = Hash::parseAny(value, HashType::SHA1);
else else
ref = value; ref = value;
} }
@ -73,7 +73,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
attrs2.alloc("branch").mkString(*input2.getRef()); attrs2.alloc("branch").mkString(*input2.getRef());
// Backward compatibility: set 'rev' to // Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree. // 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2.getRev().value_or(Hash(htSHA1)); auto rev2 = input2.getRev().value_or(Hash(HashType::SHA1));
attrs2.alloc("rev").mkString(rev2.gitRev()); attrs2.alloc("rev").mkString(rev2.gitRev());
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12)); attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
if (auto revCount = input2.getRevCount()) if (auto revCount = input2.getRevCount())

View file

@ -32,7 +32,7 @@ void emitTreeAttrs(
auto narHash = input.getNarHash(); auto narHash = input.getNarHash();
assert(narHash); assert(narHash);
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true)); attrs.alloc("narHash").mkString(narHash->to_string(Base::SRI, true));
if (input.getType() == "git") if (input.getType() == "git")
attrs.alloc("submodules").mkBool( attrs.alloc("submodules").mkBool(
@ -45,7 +45,7 @@ void emitTreeAttrs(
attrs.alloc("shortRev").mkString(rev->gitShortRev()); attrs.alloc("shortRev").mkString(rev->gitShortRev());
} else if (emptyRevFallback) { } else if (emptyRevFallback) {
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
auto emptyHash = Hash(htSHA1); auto emptyHash = Hash(HashType::SHA1);
attrs.alloc("rev").mkString(emptyHash.gitRev()); attrs.alloc("rev").mkString(emptyHash.gitRev());
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev()); attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
} }
@ -226,7 +226,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (n == "url") if (n == "url")
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch"); url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
else if (n == "sha256") else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256); expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashType::SHA256);
else if (n == "name") else if (n == "name")
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
else else
@ -252,7 +252,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow(); state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow();
// early exit if pinned and already in the store // early exit if pinned and already in the store
if (expectedHash && expectedHash->type == htSHA256) { if (expectedHash && expectedHash->type == HashType::SHA256) {
auto expectedPath = state.store->makeFixedOutputPath( auto expectedPath = state.store->makeFixedOutputPath(
name, name,
FixedOutputInfo { FixedOutputInfo {
@ -277,13 +277,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (expectedHash) { if (expectedHash) {
auto hash = unpack auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash ? state.store->queryPathInfo(storePath)->narHash
: hashFile(htSHA256, state.store->toRealPath(storePath)); : hashFile(HashType::SHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash) { if (hash != *expectedHash) {
state.error<EvalError>( state.error<EvalError>(
"hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, *url,
expectedHash->to_string(Base32, true), expectedHash->to_string(Base::Base32, true),
hash.to_string(Base32, true) hash.to_string(Base::Base32, true)
).withExitStatus(102) ).withExitStatus(102)
.debugThrow(); .debugThrow();
} }

View file

@ -18,8 +18,8 @@ StorePath fetchToStore(
return return
settings.readOnlyMode settings.readOnlyMode
? store.computeStorePathForPath(name, path.path.abs(), method, htSHA256, filter2).first ? store.computeStorePathForPath(name, path.path.abs(), method, HashType::SHA256, filter2).first
: store.addToStore(name, path.path.abs(), method, htSHA256, filter2, repair); : store.addToStore(name, path.path.abs(), method, HashType::SHA256, filter2, repair);
} }

View file

@ -153,12 +153,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
}; };
auto narHash = store->queryPathInfo(tree.storePath)->narHash; auto narHash = store->queryPathInfo(tree.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true)); input.attrs.insert_or_assign("narHash", narHash.to_string(Base::SRI, true));
if (auto prevNarHash = getNarHash()) { if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash) if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'", throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true)); to_string(), tree.actualPath, prevNarHash->to_string(Base::SRI, true), narHash.to_string(Base::SRI, true));
} }
if (auto prevLastModified = getLastModified()) { if (auto prevLastModified = getLastModified()) {
@ -240,8 +240,8 @@ std::string Input::getType() const
std::optional<Hash> Input::getNarHash() const std::optional<Hash> Input::getNarHash() const
{ {
if (auto s = maybeGetStrAttr(attrs, "narHash")) { if (auto s = maybeGetStrAttr(attrs, "narHash")) {
auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s); auto hash = s->empty() ? Hash(HashType::SHA256) : Hash::parseSRI(*s);
if (hash.type != htSHA256) if (hash.type != HashType::SHA256)
throw UsageError("narHash must use SHA-256"); throw UsageError("narHash must use SHA-256");
return hash; return hash;
} }
@ -264,7 +264,7 @@ std::optional<Hash> Input::getRev() const
hash = Hash::parseAnyPrefixed(*s); hash = Hash::parseAnyPrefixed(*s);
} catch (BadHash &e) { } catch (BadHash &e) {
// Default to sha1 for backwards compatibility with existing flakes // Default to sha1 for backwards compatibility with existing flakes
hash = Hash::parseAny(*s, htSHA1); hash = Hash::parseAny(*s, HashType::SHA1);
} }
} }

View file

@ -49,7 +49,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
Path getCachePath(std::string_view key) Path getCachePath(std::string_view key)
{ {
return getCacheDir() + "/nix/gitv3/" + return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(Base32, false); hashString(HashType::SHA256, key).to_string(Base::Base32, false);
} }
// Returns the name of the HEAD branch. // Returns the name of the HEAD branch.
@ -238,7 +238,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
return files.count(file); return files.count(file);
}; };
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter); auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashType::SHA256, filter);
// FIXME: maybe we should use the timestamp of the last // FIXME: maybe we should use the timestamp of the last
// modified dirty file? // modified dirty file?
@ -437,8 +437,8 @@ struct GitInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash) auto checkHashType = [&](const std::optional<Hash> & hash)
{ {
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256)) if (hash.has_value() && !(hash->type == HashType::SHA1 || hash->type == HashType::SHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true)); throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base::Base16, true));
}; };
auto getLockedAttrs = [&]() auto getLockedAttrs = [&]()
@ -501,7 +501,7 @@ struct GitInputScheme : InputScheme
if (!input.getRev()) if (!input.getRev())
input.attrs.insert_or_assign("rev", input.attrs.insert_or_assign("rev",
Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev()); Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), HashType::SHA1).gitRev());
repoDir = actualUrl; repoDir = actualUrl;
} else { } else {
@ -521,7 +521,7 @@ struct GitInputScheme : InputScheme
} }
if (auto res = getCache()->lookup(store, unlockedAttrs)) { if (auto res = getCache()->lookup(store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1); auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashType::SHA1);
if (!input.getRev() || input.getRev() == rev2) { if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev()); input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second)); return makeResult(res->first, std::move(res->second));
@ -599,7 +599,7 @@ struct GitInputScheme : InputScheme
} }
if (!input.getRev()) if (!input.getRev())
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev()); input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashType::SHA1).gitRev());
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
} }
@ -695,7 +695,7 @@ struct GitInputScheme : InputScheme
unpackTarfile(*proc.getStdout(), tmpDir); unpackTarfile(*proc.getStdout(), tmpDir);
} }
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter); auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, HashType::SHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() })); auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));

View file

@ -149,7 +149,7 @@ struct GitArchiveInputScheme : InputScheme
auto path = owner + "/" + repo; auto path = owner + "/" + repo;
assert(!(ref && rev)); assert(!(ref && rev));
if (ref) path += "/" + *ref; if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(Base16, false); if (rev) path += "/" + rev->to_string(Base::Base16, false);
return ParsedURL { return ParsedURL {
.scheme = type(), .scheme = type(),
.path = path, .path = path,
@ -274,7 +274,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); auto rev = Hash::parseAny(std::string { json["sha"] }, HashType::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} }
@ -295,7 +295,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
: "https://api.%s/repos/%s/%s/tarball/%s"; : "https://api.%s/repos/%s/%s/tarball/%s";
const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input), const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
input.getRev()->to_string(Base16, false)); input.getRev()->to_string(Base::Base16, false));
return DownloadUrl { url, headers }; return DownloadUrl { url, headers };
} }
@ -347,7 +347,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) { if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) {
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); auto rev = Hash::parseAny(std::string(json[0]["id"]), HashType::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} else if (json.is_array() && json.size() == 0) { } else if (json.is_array() && json.size() == 0) {
@ -367,7 +367,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s", auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false)); input.getRev()->to_string(Base::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host); Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers }; return DownloadUrl { url, headers };
@ -444,7 +444,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
if(!id) if(!id)
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref); throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
auto rev = Hash::parseAny(*id, htSHA1); auto rev = Hash::parseAny(*id, HashType::SHA1);
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev()); debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
return rev; return rev;
} }
@ -454,7 +454,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz", auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false)); input.getRev()->to_string(Base::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host); Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers }; return DownloadUrl { url, headers };

View file

@ -22,14 +22,14 @@ struct IndirectInputScheme : InputScheme
if (path.size() == 1) { if (path.size() == 1) {
} else if (path.size() == 2) { } else if (path.size() == 2) {
if (std::regex_match(path[1], revRegex)) if (std::regex_match(path[1], revRegex))
rev = Hash::parseAny(path[1], htSHA1); rev = Hash::parseAny(path[1], HashType::SHA1);
else if (std::regex_match(path[1], refRegex)) else if (std::regex_match(path[1], refRegex))
ref = path[1]; ref = path[1];
else else
throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]); throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]);
} else if (path.size() == 3) { } else if (path.size() == 3) {
ref = path[1]; ref = path[1];
rev = Hash::parseAny(path[2], htSHA1); rev = Hash::parseAny(path[2], HashType::SHA1);
} else } else
throw BadURL("GitHub URL '%s' is invalid", url.url); throw BadURL("GitHub URL '%s' is invalid", url.url);

View file

@ -198,7 +198,7 @@ struct MercurialInputScheme : InputScheme
return files.count(file); return files.count(file);
}; };
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter); auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashType::SHA256, filter);
return {std::move(storePath), input}; return {std::move(storePath), input};
} }
@ -208,8 +208,8 @@ struct MercurialInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash) auto checkHashType = [&](const std::optional<Hash> & hash)
{ {
if (hash.has_value() && hash->type != htSHA1) if (hash.has_value() && hash->type != HashType::SHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true)); throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base::Base16, true));
}; };
@ -248,14 +248,14 @@ struct MercurialInputScheme : InputScheme
}); });
if (auto res = getCache()->lookup(store, unlockedAttrs)) { if (auto res = getCache()->lookup(store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1); auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashType::SHA1);
if (!input.getRev() || input.getRev() == rev2) { if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev()); input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second)); return makeResult(res->first, std::move(res->second));
} }
} }
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false)); Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false));
/* If this is a commit hash that we already have, we don't /* If this is a commit hash that we already have, we don't
have to pull again. */ have to pull again. */
@ -289,7 +289,7 @@ struct MercurialInputScheme : InputScheme
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" })); runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3); assert(tokens.size() == 3);
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev()); input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashType::SHA1).gitRev());
auto revCount = std::stoull(tokens[1]); auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]); input.attrs.insert_or_assign("ref", tokens[2]);

View file

@ -72,7 +72,7 @@ DownloadFileResult downloadFile(
} else { } else {
StringSink sink; StringSink sink;
sink << dumpString(res.data); sink << dumpString(res.data);
auto hash = hashString(htSHA256, res.data); auto hash = hashString(HashType::SHA256, res.data);
ValidPathInfo info { ValidPathInfo info {
*store, *store,
name, name,
@ -81,7 +81,7 @@ DownloadFileResult downloadFile(
.hash = hash, .hash = hash,
.references = {}, .references = {},
}, },
hashString(htSHA256, sink.s), hashString(HashType::SHA256, sink.s),
}; };
info.narSize = sink.s.size(); info.narSize = sink.s.size();
auto source = StringSource { sink.s }; auto source = StringSource { sink.s };
@ -155,7 +155,7 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url); throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name; auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime; lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair); unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, NoRepair);
} }
Attrs infoAttrs({ Attrs infoAttrs({
@ -238,7 +238,7 @@ struct CurlInputScheme : InputScheme
// NAR hashes are preferred over file hashes since tar/zip // NAR hashes are preferred over file hashes since tar/zip
// files don't have a canonical representation. // files don't have a canonical representation.
if (auto narHash = input.getNarHash()) if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true)); url.query.insert_or_assign("narHash", narHash->to_string(Base::SRI, true));
return url; return url;
} }

View file

@ -128,9 +128,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
/* Read the NAR simultaneously into a CompressionSink+FileSink (to /* Read the NAR simultaneously into a CompressionSink+FileSink (to
write the compressed NAR to disk), into a HashSink (to get the write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */ NAR hash), and into a NarAccessor (to get the NAR listing). */
HashSink fileHashSink { htSHA256 }; HashSink fileHashSink { HashType::SHA256 };
std::shared_ptr<FSAccessor> narAccessor; std::shared_ptr<FSAccessor> narAccessor;
HashSink narHashSink { htSHA256 }; HashSink narHashSink { HashType::SHA256 };
{ {
FdSink fileSink(fdTemp.get()); FdSink fileSink(fdTemp.get());
TeeSink teeSinkCompressed { fileSink, fileHashSink }; TeeSink teeSinkCompressed { fileSink, fileHashSink };
@ -150,7 +150,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
auto [fileHash, fileSize] = fileHashSink.finish(); auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash; narInfo->fileHash = fileHash;
narInfo->fileSize = fileSize; narInfo->fileSize = fileSize;
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar" narInfo->url = "nar/" + narInfo->fileHash->to_string(Base::Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" : + (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" : compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" : compression == "zstd" ? ".zst" :
@ -288,7 +288,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name, StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
{ {
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) if (method != FileIngestionMethod::Recursive || hashAlgo != HashType::SHA256)
unsupported("addToStoreFromDump"); unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) { return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { ValidPathInfo info {
@ -425,7 +425,7 @@ StorePath BinaryCacheStore::addTextToStore(
const StorePathSet & references, const StorePathSet & references,
RepairFlag repair) RepairFlag repair)
{ {
auto textHash = hashString(htSHA256, s); auto textHash = hashString(HashType::SHA256, s);
auto path = makeTextPath(name, TextInfo { { textHash }, references }); auto path = makeTextPath(name, TextInfo { { textHash }, references });
if (!repair && isValidPath(path)) if (!repair && isValidPath(path))

View file

@ -818,8 +818,8 @@ void LocalDerivationGoal::initTmpDir() {
if (passAsFile.find(i.first) == passAsFile.end()) { if (passAsFile.find(i.first) == passAsFile.end()) {
env[i.first] = i.second; env[i.first] = i.second;
} else { } else {
auto hash = hashString(htSHA256, i.first); auto hash = hashString(HashType::SHA256, i.first);
std::string fn = ".attr-" + hash.to_string(Base32, false); std::string fn = ".attr-" + hash.to_string(Base::Base32, false);
Path p = tmpDir + "/" + fn; Path p = tmpDir + "/" + fn;
writeFile(p, rewriteStrings(i.second, inputRewrites)); writeFile(p, rewriteStrings(i.second, inputRewrites));
chownToBuilder(p); chownToBuilder(p);
@ -2147,7 +2147,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string(newInfo0.path.hashPart())}}); std::string(newInfo0.path.hashPart())}});
} }
HashResult narHashAndSize = hashPath(htSHA256, actualPath); HashResult narHashAndSize = hashPath(HashType::SHA256, actualPath);
newInfo0.narHash = narHashAndSize.first; newInfo0.narHash = narHashAndSize.first;
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
@ -2167,7 +2167,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string { scratchPath->hashPart() }, std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() }); std::string { requiredFinalPath.hashPart() });
rewriteOutput(outputRewrites); rewriteOutput(outputRewrites);
auto narHashAndSize = hashPath(htSHA256, actualPath); auto narHashAndSize = hashPath(HashType::SHA256, actualPath);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs(); auto refs = rewriteRefs();
@ -2204,8 +2204,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
BuildError("hash mismatch in fixed-output derivation '%s':\n likely URL: %s\n specified: %s\n got: %s", BuildError("hash mismatch in fixed-output derivation '%s':\n likely URL: %s\n specified: %s\n got: %s",
worker.store.printStorePath(drvPath), worker.store.printStorePath(drvPath),
guessedUrl, guessedUrl,
wanted.to_string(SRI, true), wanted.to_string(Base::SRI, true),
got.to_string(SRI, true))); got.to_string(Base::SRI, true)));
} }
if (!newInfo0.references.empty()) if (!newInfo0.references.empty())
delayedException = std::make_exception_ptr( delayedException = std::make_exception_ptr(
@ -2608,7 +2608,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName)
{ {
return worker.store.makeStorePath( return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName), "rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
Hash(htSHA256), outputPathName(drv->name, outputName)); Hash(HashType::SHA256), outputPathName(drv->name, outputName));
} }
@ -2616,7 +2616,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path)
{ {
return worker.store.makeStorePath( return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()), "rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
Hash(htSHA256), path.name()); Hash(HashType::SHA256), path.name());
} }

View file

@ -580,7 +580,7 @@ bool Worker::pathContentsGood(const StorePath & path)
res = false; res = false;
else { else {
HashResult current = hashPath(info->narHash.type, store.printStorePath(path)); HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256); Hash nullHash(HashType::SHA256);
res = info->narHash == nullHash || info->narHash == current.first; res = info->narHash == nullHash || info->narHash == current.first;
} }
pathContentsGoodCache.insert_or_assign(path, res); pathContentsGoodCache.insert_or_assign(path, res);

View file

@ -60,7 +60,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
if (!hashedMirror.ends_with("/")) hashedMirror += '/'; if (!hashedMirror.ends_with("/")) hashedMirror += '/';
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo")); std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht); Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false)); fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base::Base16, false));
return; return;
} catch (Error & e) { } catch (Error & e) {
debug(e.what()); debug(e.what());

View file

@ -61,7 +61,7 @@ std::string ContentAddress::render() const
+ makeFileIngestionPrefix(method); + makeFileIngestionPrefix(method);
}, },
}, method.raw) }, method.raw)
+ this->hash.to_string(Base32, true); + this->hash.to_string(Base::Base32, true);
} }
/** /**

View file

@ -416,7 +416,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store. // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded { return std::visit(overloaded {
[&](const TextIngestionMethod &) { [&](const TextIngestionMethod &) {
if (hashType != htSHA256) if (hashType != HashType::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType)); name, printHashType(hashType));
// We could stream this by changing Store // We could stream this by changing Store
@ -875,7 +875,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
bool repair, dontCheckSigs; bool repair, dontCheckSigs;
auto path = store->parseStorePath(readString(from)); auto path = store->parseStorePath(readString(from));
auto deriver = readString(from); auto deriver = readString(from);
auto narHash = Hash::parseAny(readString(from), htSHA256); auto narHash = Hash::parseAny(readString(from), HashType::SHA256);
ValidPathInfo info { path, narHash }; ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);

View file

@ -592,7 +592,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
[&](const DerivationOutput::CAFixed & dof) { [&](const DerivationOutput::CAFixed & dof) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first))); s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.ca.printMethodAlgo()); s += ','; printUnquotedString(s, dof.ca.printMethodAlgo());
s += ','; printUnquotedString(s, dof.ca.hash.to_string(Base16, false)); s += ','; printUnquotedString(s, dof.ca.hash.to_string(Base::Base16, false));
}, },
[&](const DerivationOutput::CAFloating & dof) { [&](const DerivationOutput::CAFloating & dof) {
s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, "");
@ -823,9 +823,9 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
std::map<std::string, Hash> outputHashes; std::map<std::string, Hash> outputHashes;
for (const auto & i : drv.outputs) { for (const auto & i : drv.outputs) {
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw); auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
auto hash = hashString(htSHA256, "fixed:out:" auto hash = hashString(HashType::SHA256, "fixed:out:"
+ dof.ca.printMethodAlgo() + ":" + dof.ca.printMethodAlgo() + ":"
+ dof.ca.hash.to_string(Base16, false) + ":" + dof.ca.hash.to_string(Base::Base16, false) + ":"
+ store.printStorePath(dof.path(store, drv.name, i.first))); + store.printStorePath(dof.path(store, drv.name, i.first)));
outputHashes.insert_or_assign(i.first, std::move(hash)); outputHashes.insert_or_assign(i.first, std::move(hash));
} }
@ -870,11 +870,11 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
const auto h = get(res.hashes, outputName); const auto h = get(res.hashes, outputName);
if (!h) if (!h)
throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name); throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name);
inputs2[h->to_string(Base16, false)].value.insert(outputName); inputs2[h->to_string(Base::Base16, false)].value.insert(outputName);
} }
} }
auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2)); auto hash = hashString(HashType::SHA256, drv.unparse(store, maskOutputs, &inputs2));
std::map<std::string, Hash> outputHashes; std::map<std::string, Hash> outputHashes;
for (const auto & [outputName, _] : drv.outputs) { for (const auto & [outputName, _] : drv.outputs) {
@ -975,7 +975,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
[&](const DerivationOutput::CAFixed & dof) { [&](const DerivationOutput::CAFixed & dof) {
out << store.printStorePath(dof.path(store, drv.name, i.first)) out << store.printStorePath(dof.path(store, drv.name, i.first))
<< dof.ca.printMethodAlgo() << dof.ca.printMethodAlgo()
<< dof.ca.hash.to_string(Base16, false); << dof.ca.hash.to_string(Base::Base16, false);
}, },
[&](const DerivationOutput::CAFloating & dof) { [&](const DerivationOutput::CAFloating & dof) {
out << "" out << ""
@ -1007,7 +1007,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
std::string hashPlaceholder(const OutputNameView outputName) std::string hashPlaceholder(const OutputNameView outputName)
{ {
// FIXME: memoize? // FIXME: memoize?
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false); return "/" + hashString(HashType::SHA256, concatStrings("nix-output:", outputName)).to_string(Base::Base32, false);
} }
@ -1199,7 +1199,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
} }
const Hash impureOutputHash = hashString(htSHA256, "impure"); const Hash impureOutputHash = hashString(HashType::SHA256, "impure");
nlohmann::json DerivationOutput::toJSON( nlohmann::json DerivationOutput::toJSON(
const Store & store, std::string_view drvName, OutputNameView outputName) const const Store & store, std::string_view drvName, OutputNameView outputName) const
@ -1212,7 +1212,7 @@ nlohmann::json DerivationOutput::toJSON(
[&](const DerivationOutput::CAFixed & dof) { [&](const DerivationOutput::CAFixed & dof) {
res["path"] = store.printStorePath(dof.path(store, drvName, outputName)); res["path"] = store.printStorePath(dof.path(store, drvName, outputName));
res["hashAlgo"] = dof.ca.printMethodAlgo(); res["hashAlgo"] = dof.ca.printMethodAlgo();
res["hash"] = dof.ca.hash.to_string(Base16, false); res["hash"] = dof.ca.hash.to_string(Base::Base16, false);
// FIXME print refs? // FIXME print refs?
}, },
[&](const DerivationOutput::CAFloating & dof) { [&](const DerivationOutput::CAFloating & dof) {

View file

@ -5,7 +5,7 @@ namespace nix {
std::string DownstreamPlaceholder::render() const std::string DownstreamPlaceholder::render() const
{ {
return "/" + hash.to_string(Base32, false); return "/" + hash.to_string(Base::Base32, false);
} }
@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4); auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName); auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
return DownstreamPlaceholder { return DownstreamPlaceholder {
hashString(htSHA256, clearText) hashString(HashType::SHA256, clearText)
}; };
} }
@ -31,10 +31,10 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
xpSettings.require(Xp::DynamicDerivations); xpSettings.require(Xp::DynamicDerivations);
auto compressed = compressHash(placeholder.hash, 20); auto compressed = compressHash(placeholder.hash, 20);
auto clearText = "nix-computed-output:" auto clearText = "nix-computed-output:"
+ compressed.to_string(Base32, false) + compressed.to_string(Base::Base32, false)
+ ":" + std::string { outputName }; + ":" + std::string { outputName };
return DownstreamPlaceholder { return DownstreamPlaceholder {
hashString(htSHA256, clearText) hashString(HashType::SHA256, clearText)
}; };
} }

View file

@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
{ {
auto info = queryPathInfo(path); auto info = queryPathInfo(path);
HashSink hashSink(htSHA256); HashSink hashSink(HashType::SHA256);
TeeSink teeSink(sink, hashSink); TeeSink teeSink(sink, hashSink);
teeSink << narFromPath(path); teeSink << narFromPath(path);
@ -41,7 +41,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
Hash hash = hashSink.currentHash().first; Hash hash = hashSink.currentHash().first;
if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!", throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true)); printStorePath(path), info->narHash.to_string(Base::Base32, true), hash.to_string(Base::Base32, true));
teeSink teeSink
<< exportMagic << exportMagic
@ -77,7 +77,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
auto references = CommonProto::Serialise<StorePathSet>::read(*this, auto references = CommonProto::Serialise<StorePathSet>::read(*this,
CommonProto::ReadConn { .from = source }); CommonProto::ReadConn { .from = source });
auto deriver = readString(source); auto deriver = readString(source);
auto narHash = hashString(htSHA256, saved.s); auto narHash = hashString(HashType::SHA256, saved.s);
ValidPathInfo info { path, narHash }; ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")

View file

@ -42,7 +42,7 @@ static void makeSymlink(const Path & link, const Path & target)
void LocalStore::addIndirectRoot(const Path & path) void LocalStore::addIndirectRoot(const Path & path)
{ {
std::string hash = hashString(htSHA1, path).to_string(Base32, false); std::string hash = hashString(HashType::SHA1, path).to_string(Base::Base32, false);
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash)); Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
makeSymlink(realRoot, path); makeSymlink(realRoot, path);
} }

View file

@ -183,7 +183,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
<< ServeProto::Command::AddToStoreNar << ServeProto::Command::AddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false); << info.narHash.to_string(Base::Base16, false);
conn->to << ServeProto::write(*this, *conn, info.references); conn->to << ServeProto::write(*this, *conn, info.references);
conn->to conn->to
<< info.registrationTime << info.registrationTime

View file

@ -842,7 +842,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmts->RegisterValidPath.use() state.stmts->RegisterValidPath.use()
(printStorePath(info.path)) (printStorePath(info.path))
(info.narHash.to_string(Base16, true)) (info.narHash.to_string(Base::Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime) (info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
@ -945,7 +945,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{ {
state.stmts->UpdatePathInfo.use() state.stmts->UpdatePathInfo.use()
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
(info.narHash.to_string(Base16, true)) (info.narHash.to_string(Base::Base16, true))
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca) (renderContentAddress(info.ca), (bool) info.ca)
@ -1123,7 +1123,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths; StorePathSet paths;
for (auto & [_, i] : infos) { for (auto & [_, i] : infos) {
assert(i.narHash.type == htSHA256); assert(i.narHash.type == HashType::SHA256);
if (isValidPath_(*state, i.path)) if (isValidPath_(*state, i.path))
updatePathInfo(*state, i); updatePathInfo(*state, i);
else else
@ -1241,7 +1241,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
/* While restoring the path from the NAR, compute the hash /* While restoring the path from the NAR, compute the hash
of the NAR. */ of the NAR. */
HashSink hashSink(htSHA256); HashSink hashSink(HashType::SHA256);
TeeSource wrapperSource { source, hashSink }; TeeSource wrapperSource { source, hashSink };
@ -1252,7 +1252,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash) if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s", throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true)); printStorePath(info.path), info.narHash.to_string(Base::Base32, true), hashResult.first.to_string(Base::Base32, true));
if (hashResult.second != info.narSize) if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s", throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
@ -1268,8 +1268,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (specified.hash != actualHash.hash) { if (specified.hash != actualHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path), printStorePath(info.path),
specified.hash.to_string(Base32, true), specified.hash.to_string(Base::Base32, true),
actualHash.hash.to_string(Base32, true)); actualHash.hash.to_string(Base::Base32, true));
} }
} }
@ -1404,8 +1404,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
/* For computing the nar hash. In recursive SHA-256 mode, this /* For computing the nar hash. In recursive SHA-256 mode, this
is the same as the store hash, so no need to do it again. */ is the same as the store hash, so no need to do it again. */
auto narHash = std::pair { hash, size }; auto narHash = std::pair { hash, size };
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) { if (method != FileIngestionMethod::Recursive || hashAlgo != HashType::SHA256) {
HashSink narSink { htSHA256 }; HashSink narSink { HashType::SHA256 };
narSink << dumpPath(realPath); narSink << dumpPath(realPath);
narHash = narSink.finish(); narHash = narSink.finish();
} }
@ -1436,7 +1436,7 @@ StorePath LocalStore::addTextToStore(
std::string_view s, std::string_view s,
const StorePathSet & references, RepairFlag repair) const StorePathSet & references, RepairFlag repair)
{ {
auto hash = hashString(htSHA256, s); auto hash = hashString(HashType::SHA256, s);
auto dstPath = makeTextPath(name, TextInfo { auto dstPath = makeTextPath(name, TextInfo {
.hash = hash, .hash = hash,
.references = references, .references = references,
@ -1462,7 +1462,7 @@ StorePath LocalStore::addTextToStore(
StringSink sink; StringSink sink;
sink << dumpString(s); sink << dumpString(s);
auto narHash = hashString(htSHA256, sink.s); auto narHash = hashString(HashType::SHA256, sink.s);
optimisePath(realPath, repair); optimisePath(realPath, repair);
@ -1573,7 +1573,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
for (auto & link : readDirectory(linksDir)) { for (auto & link : readDirectory(linksDir)) {
printMsg(lvlTalkative, "checking contents of '%s'", link.name); printMsg(lvlTalkative, "checking contents of '%s'", link.name);
Path linkPath = linksDir + "/" + link.name; Path linkPath = linksDir + "/" + link.name;
std::string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false); std::string hash = hashPath(HashType::SHA256, linkPath).first.to_string(Base::Base32, false);
if (hash != link.name) { if (hash != link.name) {
printError("link '%s' was modified! expected hash '%s', got '%s'", printError("link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash); linkPath, link.name, hash);
@ -1590,7 +1590,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printInfo("checking store hashes..."); printInfo("checking store hashes...");
Hash nullHash(htSHA256); Hash nullHash(HashType::SHA256);
for (auto & i : validPaths) { for (auto & i : validPaths) {
try { try {
@ -1606,7 +1606,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
if (info->narHash != nullHash && info->narHash != current.first) { if (info->narHash != nullHash && info->narHash != current.first) {
printError("path '%s' was modified! expected hash '%s', got '%s'", printError("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true)); printStorePath(i), info->narHash.to_string(Base::Base32, true), current.first.to_string(Base::Base32, true));
if (repair) repairPath(i); else errors = true; if (repair) repairPath(i); else errors = true;
} else { } else {

View file

@ -45,7 +45,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
auto narModuloHash = [&] { auto narModuloHash = [&] {
StringSource source{sink.s}; StringSource source{sink.s};
return computeHashModulo(htSHA256, oldHashPart, source).first; return computeHashModulo(HashType::SHA256, oldHashPart, source).first;
}(); }();
ValidPathInfo info { ValidPathInfo info {
@ -63,7 +63,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
const auto rewritten = rewriteStrings(sink.s, {{oldHashPart, std::string(info.path.hashPart())}}); const auto rewritten = rewriteStrings(sink.s, {{oldHashPart, std::string(info.path.hashPart())}});
info.narHash = hashString(htSHA256, rewritten); info.narHash = hashString(HashType::SHA256, rewritten);
info.narSize = sink.s.size(); info.narSize = sink.s.size();
StringSource source(rewritten); StringSource source(rewritten);

View file

@ -334,9 +334,9 @@ public:
(std::string(info->path.name())) (std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0) (narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash) (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base::Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash.to_string(Base32, true)) (info->narHash.to_string(Base::Base32, true))
(info->narSize) (info->narSize)
(concatStringsSep(" ", info->shortRefs())) (concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -104,11 +104,11 @@ std::string NarInfo::to_string(const Store & store) const
res += "URL: " + url + "\n"; res += "URL: " + url + "\n";
assert(compression != ""); assert(compression != "");
res += "Compression: " + compression + "\n"; res += "Compression: " + compression + "\n";
assert(fileHash && fileHash->type == htSHA256); assert(fileHash && fileHash->type == HashType::SHA256);
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n"; res += "FileHash: " + fileHash->to_string(Base::Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == htSHA256); assert(narHash.type == HashType::SHA256);
res += "NarHash: " + narHash.to_string(Base32, true) + "\n"; res += "NarHash: " + narHash.to_string(Base::Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -2,14 +2,11 @@
#include "globals.hh" #include "globals.hh"
#include "signals.hh" #include "signals.hh"
#include <cstdlib>
#include <cstring> #include <cstring>
#include <sys/types.h> #include <sys/types.h>
#include <sys/stat.h> #include <sys/stat.h>
#include <unistd.h> #include <unistd.h>
#include <errno.h> #include <errno.h>
#include <stdio.h>
#include <regex>
namespace nix { namespace nix {
@ -145,17 +142,17 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Also note that if `path' is a symlink, then we're hashing the Also note that if `path' is a symlink, then we're hashing the
contents of the symlink (i.e. the result of readlink()), not contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */ the contents of the target (which may not even exist). */
Hash hash = hashPath(htSHA256, path).first; Hash hash = hashPath(HashType::SHA256, path).first;
debug("'%1%' has hash '%2%'", path, hash.to_string(Base32, true)); debug("'%1%' has hash '%2%'", path, hash.to_string(Base::Base32, true));
/* Check if this is a known hash. */ /* Check if this is a known hash. */
Path linkPath = linksDir + "/" + hash.to_string(Base32, false); Path linkPath = linksDir + "/" + hash.to_string(Base::Base32, false);
/* Maybe delete the link, if it has been corrupted. */ /* Maybe delete the link, if it has been corrupted. */
if (pathExists(linkPath)) { if (pathExists(linkPath)) {
auto stLink = lstat(linkPath); auto stLink = lstat(linkPath);
if (st.st_size != stLink.st_size if (st.st_size != stLink.st_size
|| (repair && hash != hashPath(htSHA256, linkPath).first)) || (repair && hash != hashPath(HashType::SHA256, linkPath).first))
{ {
// XXX: Consider overwriting linkPath with our valid version. // XXX: Consider overwriting linkPath with our valid version.
warn("removing corrupted link '%s'", linkPath); warn("removing corrupted link '%s'", linkPath);

View file

@ -29,7 +29,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
store.printStorePath(path)); store.printStorePath(path));
return return
"1;" + store.printStorePath(path) + ";" "1;" + store.printStorePath(path) + ";"
+ narHash.to_string(Base32, true) + ";" + narHash.to_string(Base::Base32, true) + ";"
+ std::to_string(narSize) + ";" + std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references)); + concatStringsSep(",", store.printStorePathSet(references));
} }

View file

@ -46,7 +46,7 @@ std::pair<StorePathSet, HashResult> scanForReferences(
const std::string & path, const std::string & path,
const StorePathSet & refs) const StorePathSet & refs)
{ {
HashSink hashSink { htSHA256 }; HashSink hashSink { HashType::SHA256 };
auto found = scanForReferences(hashSink, path, refs); auto found = scanForReferences(hashSink, path, refs);
auto hash = hashSink.finish(); auto hash = hashSink.finish();
return std::pair<StorePathSet, HashResult>(found, hash); return std::pair<StorePathSet, HashResult>(found, hash);

View file

@ -46,7 +46,7 @@ StorePath::StorePath(std::string_view _baseName)
} }
StorePath::StorePath(const Hash & hash, std::string_view _name) StorePath::StorePath(const Hash & hash, std::string_view _name)
: baseName((hash.to_string(Base32, false) + "-").append(std::string(_name))) : baseName((hash.to_string(Base::Base32, false) + "-").append(std::string(_name)))
{ {
checkName(baseName, name()); checkName(baseName, name());
} }
@ -60,7 +60,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
StorePath StorePath::random(std::string_view name) StorePath StorePath::random(std::string_view name)
{ {
Hash hash(htSHA1); Hash hash(HashType::SHA1);
randombytes_buf(hash.hash, hash.hashSize); randombytes_buf(hash.hash, hash.hashSize);
return StorePath(hash, name); return StorePath(hash, name);
} }

View file

@ -39,7 +39,7 @@ struct DrvOutput {
std::string to_string() const; std::string to_string() const;
std::string strHash() const std::string strHash() const
{ return drvHash.to_string(Base16, true); } { return drvHash.to_string(Base::Base16, true); }
static DrvOutput parse(const std::string &); static DrvOutput parse(const std::string &);

View file

@ -397,7 +397,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
std::visit(overloaded { std::visit(overloaded {
[&](const TextIngestionMethod & thm) -> void { [&](const TextIngestionMethod & thm) -> void {
if (hashType != htSHA256) if (hashType != HashType::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType)); name, printHashType(hashType));
std::string s = dump.drain(); std::string s = dump.drain();
@ -409,7 +409,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to conn->to
<< WorkerProto::Op::AddToStore << WorkerProto::Op::AddToStore
<< name << name
<< ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ << ((hashType == HashType::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (fim == FileIngestionMethod::Recursive ? 1 : 0) << (fim == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashType); << printHashType(hashType);
@ -461,7 +461,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << WorkerProto::Op::AddToStoreNar conn->to << WorkerProto::Op::AddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false); << info.narHash.to_string(Base::Base16, false);
conn->to << WorkerProto::write(*this, *conn, info.references); conn->to << WorkerProto::write(*this, *conn, info.references);
conn->to << info.registrationTime << info.narSize conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca) << info.ultimate << info.sigs << renderContentAddress(info.ca)
@ -533,7 +533,7 @@ StorePath RemoteStore::addTextToStore(
RepairFlag repair) RepairFlag repair)
{ {
StringSource source(s); StringSource source(s);
return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path; return addCAToStore(source, name, TextIngestionMethod {}, HashType::SHA256, references, repair)->path;
} }
void RemoteStore::registerDrvOutput(const Realisation & info) void RemoteStore::registerDrvOutput(const Realisation & info)

View file

@ -84,7 +84,7 @@ public:
* Add a content-addressable store path. Does not support references. `dump` will be drained. * Add a content-addressable store path. Does not support references. `dump` will be drained.
*/ */
StorePath addToStoreFromDump(Source & dump, std::string_view name, StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override; FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
void addToStore(const ValidPathInfo & info, Source & nar, void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;

View file

@ -88,7 +88,7 @@ WireFormatGenerator ServeProto::Serialise<UnkeyedValidPathInfo>::write(const Sto
co_yield info.narSize; // downloadSize, lie a little co_yield info.narSize; // downloadSize, lie a little
co_yield info.narSize; co_yield info.narSize;
if (GET_PROTOCOL_MINOR(conn.version) >= 4) { if (GET_PROTOCOL_MINOR(conn.version) >= 4) {
co_yield info.narHash.to_string(Base32, true); co_yield info.narHash.to_string(Base::Base32, true);
co_yield renderContentAddress(info.ca); co_yield renderContentAddress(info.ca);
co_yield info.sigs; co_yield info.sigs;
} }

View file

@ -156,7 +156,7 @@ StorePath Store::makeStorePath(std::string_view type,
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
auto s = std::string(type) + ":" + std::string(hash) auto s = std::string(type) + ":" + std::string(hash)
+ ":" + storeDir + ":" + std::string(name); + ":" + storeDir + ":" + std::string(name);
auto h = compressHash(hashString(htSHA256, s), 20); auto h = compressHash(hashString(HashType::SHA256, s), 20);
return StorePath(h, name); return StorePath(h, name);
} }
@ -164,7 +164,7 @@ StorePath Store::makeStorePath(std::string_view type,
StorePath Store::makeStorePath(std::string_view type, StorePath Store::makeStorePath(std::string_view type,
const Hash & hash, std::string_view name) const const Hash & hash, std::string_view name) const
{ {
return makeStorePath(type, hash.to_string(Base16, true), name); return makeStorePath(type, hash.to_string(Base::Base16, true), name);
} }
@ -194,7 +194,7 @@ static std::string makeType(
StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
{ {
if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) { if (info.hash.type == HashType::SHA256 && info.method == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", info.references), info.hash, name); return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
} else { } else {
if (!info.references.empty()) { if (!info.references.empty()) {
@ -202,10 +202,10 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf
name); name);
} }
return makeStorePath("output:out", return makeStorePath("output:out",
hashString(htSHA256, hashString(HashType::SHA256,
"fixed:out:" "fixed:out:"
+ makeFileIngestionPrefix(info.method) + makeFileIngestionPrefix(info.method)
+ info.hash.to_string(Base16, true) + ":"), + info.hash.to_string(Base::Base16, true) + ":"),
name); name);
} }
} }
@ -213,7 +213,7 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf
StorePath Store::makeTextPath(std::string_view name, const TextInfo & info) const StorePath Store::makeTextPath(std::string_view name, const TextInfo & info) const
{ {
assert(info.hash.type == htSHA256); assert(info.hash.type == HashType::SHA256);
return makeStorePath( return makeStorePath(
makeType(*this, "text", StoreReferences { makeType(*this, "text", StoreReferences {
.others = info.references, .others = info.references,
@ -259,7 +259,7 @@ StorePath Store::computeStorePathForText(
const StorePathSet & references) const const StorePathSet & references) const
{ {
return makeTextPath(name, TextInfo { return makeTextPath(name, TextInfo {
.hash = hashString(htSHA256, s), .hash = hashString(HashType::SHA256, s),
.references = references, .references = references,
}); });
} }
@ -407,7 +407,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo, FileIngestionMethod method, HashType hashAlgo,
std::optional<Hash> expectedCAHash) std::optional<Hash> expectedCAHash)
{ {
HashSink narHashSink { htSHA256 }; HashSink narHashSink { HashType::SHA256 };
HashSink caHashSink { hashAlgo }; HashSink caHashSink { hashAlgo };
/* Note that fileSink and unusualHashTee must be mutually exclusive, since /* Note that fileSink and unusualHashTee must be mutually exclusive, since
@ -416,7 +416,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
RetrieveRegularNARSink fileSink { caHashSink }; RetrieveRegularNARSink fileSink { caHashSink };
TeeSink unusualHashTee { narHashSink, caHashSink }; TeeSink unusualHashTee { narHashSink, caHashSink };
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashType::SHA256
? static_cast<Sink &>(unusualHashTee) ? static_cast<Sink &>(unusualHashTee)
: narHashSink; : narHashSink;
@ -442,7 +442,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
finish. */ finish. */
auto [narHash, narSize] = narHashSink.finish(); auto [narHash, narSize] = narHashSink.finish();
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashType::SHA256
? narHash ? narHash
: caHashSink.finish().first; : caHashSink.finish().first;
@ -854,7 +854,7 @@ std::string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i); auto info = queryPathInfo(i);
if (showHash) { if (showHash) {
s += info->narHash.to_string(Base16, false) + "\n"; s += info->narHash.to_string(Base::Base16, false) + "\n";
s += fmt("%1%\n", info->narSize); s += fmt("%1%\n", info->narSize);
} }
@ -1257,7 +1257,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
if (!hashGiven) { if (!hashGiven) {
std::string s; std::string s;
getline(str, s); getline(str, s);
auto narHash = Hash::parseAny(s, htSHA256); auto narHash = Hash::parseAny(s, HashType::SHA256);
getline(str, s); getline(str, s);
auto narSize = string2Int<uint64_t>(s); auto narSize = string2Int<uint64_t>(s);
if (!narSize) throw Error("number expected"); if (!narSize) throw Error("number expected");

View file

@ -311,7 +311,7 @@ public:
*/ */
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name, std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive, const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const; HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter) const;
/** /**
* Preparatory part of addTextToStore(). * Preparatory part of addTextToStore().
@ -524,7 +524,7 @@ public:
std::string_view name, std::string_view name,
const Path & srcPath, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256, HashType hashAlgo = HashType::SHA256,
PathFilter & filter = defaultPathFilter, PathFilter & filter = defaultPathFilter,
RepairFlag repair = NoRepair, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet()); const StorePathSet & references = StorePathSet());
@ -535,7 +535,7 @@ public:
* memory. * memory.
*/ */
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath, ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256,
std::optional<Hash> expectedCAHash = {}); std::optional<Hash> expectedCAHash = {});
/** /**
@ -548,7 +548,7 @@ public:
* \todo remove? * \todo remove?
*/ */
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name, virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet()) const StorePathSet & references = StorePathSet())
{ unsupported("addToStoreFromDump"); } { unsupported("addToStoreFromDump"); }
@ -679,7 +679,7 @@ public:
*/ */
nlohmann::json pathInfoToJSON(const StorePathSet & storePaths, nlohmann::json pathInfoToJSON(const StorePathSet & storePaths,
bool includeImpureInfo, bool showClosureSize, bool includeImpureInfo, bool showClosureSize,
Base hashBase = Base32, Base hashBase = Base::Base32,
AllowInvalidFlag allowInvalid = DisallowInvalid); AllowInvalidFlag allowInvalid = DisallowInvalid);
/** /**

View file

@ -158,7 +158,7 @@ WireFormatGenerator WorkerProto::Serialise<ValidPathInfo>::write(const Store & s
UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const Store & store, ReadConn conn) UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const Store & store, ReadConn conn)
{ {
auto deriver = readString(conn.from); auto deriver = readString(conn.from);
auto narHash = Hash::parseAny(readString(conn.from), htSHA256); auto narHash = Hash::parseAny(readString(conn.from), HashType::SHA256);
UnkeyedValidPathInfo info(narHash); UnkeyedValidPathInfo info(narHash);
if (deriver != "") info.deriver = store.parseStorePath(deriver); if (deriver != "") info.deriver = store.parseStorePath(deriver);
info.references = WorkerProto::Serialise<StorePathSet>::read(store, conn); info.references = WorkerProto::Serialise<StorePathSet>::read(store, conn);
@ -174,7 +174,7 @@ UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const St
WireFormatGenerator WorkerProto::Serialise<UnkeyedValidPathInfo>::write(const Store & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo) WireFormatGenerator WorkerProto::Serialise<UnkeyedValidPathInfo>::write(const Store & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo)
{ {
co_yield (pathInfo.deriver ? store.printStorePath(*pathInfo.deriver) : ""); co_yield (pathInfo.deriver ? store.printStorePath(*pathInfo.deriver) : "");
co_yield pathInfo.narHash.to_string(Base16, false); co_yield pathInfo.narHash.to_string(Base::Base16, false);
co_yield WorkerProto::write(store, conn, pathInfo.references); co_yield WorkerProto::write(store, conn, pathInfo.references);
co_yield pathInfo.registrationTime; co_yield pathInfo.registrationTime;
co_yield pathInfo.narSize; co_yield pathInfo.narSize;

View file

@ -15,7 +15,7 @@
namespace nix { namespace nix {
enum HashType : char; enum class HashType : char;
class MultiCommand; class MultiCommand;

View file

@ -18,10 +18,10 @@ namespace nix {
static size_t regularHashSize(HashType type) { static size_t regularHashSize(HashType type) {
switch (type) { switch (type) {
case htMD5: return md5HashSize; case HashType::MD5: return md5HashSize;
case htSHA1: return sha1HashSize; case HashType::SHA1: return sha1HashSize;
case htSHA256: return sha256HashSize; case HashType::SHA256: return sha256HashSize;
case htSHA512: return sha512HashSize; case HashType::SHA512: return sha512HashSize;
} }
abort(); abort();
} }
@ -109,33 +109,33 @@ static std::string printHash32(const Hash & hash)
std::string printHash16or32(const Hash & hash) std::string printHash16or32(const Hash & hash)
{ {
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false); return hash.to_string(hash.type == HashType::MD5 ? Base::Base16 : Base::Base32, false);
} }
std::string Hash::to_string(Base base, bool includeType) const std::string Hash::to_string(Base base, bool includeType) const
{ {
std::string s; std::string s;
if (base == SRI || includeType) { if (base == Base::SRI || includeType) {
s += printHashType(type); s += printHashType(type);
s += base == SRI ? '-' : ':'; s += base == Base::SRI ? '-' : ':';
} }
switch (base) { switch (base) {
case Base16: case Base::Base16:
s += printHash16(*this); s += printHash16(*this);
break; break;
case Base32: case Base::Base32:
s += printHash32(*this); s += printHash32(*this);
break; break;
case Base64: case Base::Base64:
case SRI: case Base::SRI:
s += base64Encode(std::string_view((const char *) hash, hashSize)); s += base64Encode(std::string_view((const char *) hash, hashSize));
break; break;
} }
return s; return s;
} }
Hash Hash::dummy(htSHA256); Hash Hash::dummy(HashType::SHA256);
Hash Hash::parseSRI(std::string_view original) { Hash Hash::parseSRI(std::string_view original) {
auto rest = original; auto rest = original;
@ -265,7 +265,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
if (!ht) if (!ht)
throw BadHash("empty hash requires explicit hash type"); throw BadHash("empty hash requires explicit hash type");
Hash h(*ht); Hash h(*ht);
warn("found empty hash, assuming '%s'", h.to_string(SRI, true)); warn("found empty hash, assuming '%s'", h.to_string(Base::SRI, true));
return h; return h;
} else } else
return Hash::parseAny(hashStr, ht); return Hash::parseAny(hashStr, ht);
@ -283,29 +283,29 @@ union Ctx
static void start(HashType ht, Ctx & ctx) static void start(HashType ht, Ctx & ctx)
{ {
if (ht == htMD5) MD5_Init(&ctx.md5); if (ht == HashType::MD5) MD5_Init(&ctx.md5);
else if (ht == htSHA1) SHA1_Init(&ctx.sha1); else if (ht == HashType::SHA1) SHA1_Init(&ctx.sha1);
else if (ht == htSHA256) SHA256_Init(&ctx.sha256); else if (ht == HashType::SHA256) SHA256_Init(&ctx.sha256);
else if (ht == htSHA512) SHA512_Init(&ctx.sha512); else if (ht == HashType::SHA512) SHA512_Init(&ctx.sha512);
} }
static void update(HashType ht, Ctx & ctx, static void update(HashType ht, Ctx & ctx,
std::string_view data) std::string_view data)
{ {
if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size()); if (ht == HashType::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size()); else if (ht == HashType::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size()); else if (ht == HashType::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size()); else if (ht == HashType::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
} }
static void finish(HashType ht, Ctx & ctx, unsigned char * hash) static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
{ {
if (ht == htMD5) MD5_Final(hash, &ctx.md5); if (ht == HashType::MD5) MD5_Final(hash, &ctx.md5);
else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1); else if (ht == HashType::SHA1) SHA1_Final(hash, &ctx.sha1);
else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256); else if (ht == HashType::SHA256) SHA256_Final(hash, &ctx.sha256);
else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512); else if (ht == HashType::SHA512) SHA512_Final(hash, &ctx.sha512);
} }
@ -386,10 +386,10 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
std::optional<HashType> parseHashTypeOpt(std::string_view s) std::optional<HashType> parseHashTypeOpt(std::string_view s)
{ {
if (s == "md5") return htMD5; if (s == "md5") return HashType::MD5;
else if (s == "sha1") return htSHA1; else if (s == "sha1") return HashType::SHA1;
else if (s == "sha256") return htSHA256; else if (s == "sha256") return HashType::SHA256;
else if (s == "sha512") return htSHA512; else if (s == "sha512") return HashType::SHA512;
else return std::optional<HashType> {}; else return std::optional<HashType> {};
} }
@ -405,10 +405,10 @@ HashType parseHashType(std::string_view s)
std::string_view printHashType(HashType ht) std::string_view printHashType(HashType ht)
{ {
switch (ht) { switch (ht) {
case htMD5: return "md5"; case HashType::MD5: return "md5";
case htSHA1: return "sha1"; case HashType::SHA1: return "sha1";
case htSHA256: return "sha256"; case HashType::SHA256: return "sha256";
case htSHA512: return "sha512"; case HashType::SHA512: return "sha512";
default: default:
// illegal hash type enum value internally, as opposed to external input // illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message. // which should be validated with nice error message.

View file

@ -12,7 +12,7 @@ namespace nix {
MakeError(BadHash, Error); MakeError(BadHash, Error);
enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 }; enum class HashType : char { MD5 = 42, SHA1, SHA256, SHA512 };
const int md5HashSize = 16; const int md5HashSize = 16;
@ -24,7 +24,7 @@ extern std::set<std::string> hashTypes;
extern const std::string base32Chars; extern const std::string base32Chars;
enum Base : int { Base64, Base32, Base16, SRI }; enum class Base : int { Base64, Base32, Base16, SRI };
struct Hash struct Hash
@ -119,12 +119,12 @@ public:
std::string gitRev() const std::string gitRev() const
{ {
return to_string(Base16, false); return to_string(Base::Base16, false);
} }
std::string gitShortRev() const std::string gitShortRev() const
{ {
return std::string(to_string(Base16, false), 0, 7); return std::string(to_string(Base::Base16, false), 0, 7);
} }
static Hash dummy; static Hash dummy;

View file

@ -404,8 +404,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j); auto info = store->queryPathInfo(j);
if (query == qHash) { if (query == qHash) {
assert(info->narHash.type == htSHA256); assert(info->narHash.type == HashType::SHA256);
cout << fmt("%s\n", info->narHash.to_string(Base32, true)); cout << fmt("%s\n", info->narHash.to_string(Base::Base32, true));
} else if (query == qSize) } else if (query == qSize)
cout << fmt("%d\n", info->narSize); cout << fmt("%d\n", info->narSize);
} }
@ -540,7 +540,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
if (canonicalise) if (canonicalise)
canonicalisePathMetaData(store->printStorePath(info->path), {}); canonicalisePathMetaData(store->printStorePath(info->path), {});
if (!hashGiven) { if (!hashGiven) {
HashResult hash = hashPath(htSHA256, store->printStorePath(info->path)); HashResult hash = hashPath(HashType::SHA256, store->printStorePath(info->path));
info->narHash = hash.first; info->narHash = hash.first;
info->narSize = hash.second; info->narSize = hash.second;
} }
@ -768,8 +768,8 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
if (current.first != info->narHash) { if (current.first != info->narHash) {
printError("path '%s' was modified! expected hash '%s', got '%s'", printError("path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path), store->printStorePath(path),
info->narHash.to_string(Base32, true), info->narHash.to_string(Base::Base32, true),
current.first.to_string(Base32, true)); current.first.to_string(Base::Base32, true));
status = 1; status = 1;
} }
} }
@ -970,7 +970,7 @@ static void opServe(Strings opFlags, Strings opArgs)
auto deriver = readString(in); auto deriver = readString(in);
ValidPathInfo info { ValidPathInfo info {
store->parseStorePath(path), store->parseStorePath(path),
Hash::parseAny(readString(in), htSHA256), Hash::parseAny(readString(in), HashType::SHA256),
}; };
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);

View file

@ -32,11 +32,11 @@ struct CmdAddToStore : MixDryRun, StoreCommand
StringSink sink; StringSink sink;
sink << dumpPath(path); sink << dumpPath(path);
auto narHash = hashString(htSHA256, sink.s); auto narHash = hashString(HashType::SHA256, sink.s);
Hash hash = narHash; Hash hash = narHash;
if (ingestionMethod == FileIngestionMethod::Flat) { if (ingestionMethod == FileIngestionMethod::Flat) {
HashSink hsink(htSHA256); HashSink hsink(HashType::SHA256);
hsink << readFileSource(path); hsink << readFileSource(path);
hash = hsink.finish().first; hash = hsink.finish().first;
} }

View file

@ -226,7 +226,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs()); j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
if (auto rev = flake.lockedRef.input.getRev()) if (auto rev = flake.lockedRef.input.getRev())
j["revision"] = rev->to_string(Base16, false); j["revision"] = rev->to_string(Base::Base16, false);
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev")) if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
j["dirtyRevision"] = *dirtyRev; j["dirtyRevision"] = *dirtyRev;
if (auto revCount = flake.lockedRef.input.getRevCount()) if (auto revCount = flake.lockedRef.input.getRevCount())
@ -253,7 +253,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
if (auto rev = flake.lockedRef.input.getRev()) if (auto rev = flake.lockedRef.input.getRev())
logger->cout( logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s", ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
rev->to_string(Base16, false)); rev->to_string(Base::Base16, false));
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev")) if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
logger->cout( logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s", ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
@ -1434,13 +1434,13 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON
if (json) { if (json) {
auto res = nlohmann::json::object(); auto res = nlohmann::json::object();
res["storePath"] = store->printStorePath(tree.storePath); res["storePath"] = store->printStorePath(tree.storePath);
res["hash"] = hash.to_string(SRI, true); res["hash"] = hash.to_string(Base::SRI, true);
logger->cout(res.dump()); logger->cout(res.dump());
} else { } else {
notice("Downloaded '%s' to '%s' (hash '%s').", notice("Downloaded '%s' to '%s' (hash '%s').",
lockedRef.to_string(), lockedRef.to_string(),
store->printStorePath(tree.storePath), store->printStorePath(tree.storePath),
hash.to_string(SRI, true)); hash.to_string(Base::SRI, true));
} }
} }
}; };

View file

@ -11,9 +11,9 @@ using namespace nix;
struct CmdHashBase : Command struct CmdHashBase : Command
{ {
FileIngestionMethod mode; FileIngestionMethod mode;
Base base = SRI; Base base = Base::SRI;
bool truncate = false; bool truncate = false;
HashType ht = htSHA256; HashType ht = HashType::SHA256;
std::vector<std::string> paths; std::vector<std::string> paths;
std::optional<std::string> modulus; std::optional<std::string> modulus;
@ -22,25 +22,25 @@ struct CmdHashBase : Command
addFlag({ addFlag({
.longName = "sri", .longName = "sri",
.description = "Print the hash in SRI format.", .description = "Print the hash in SRI format.",
.handler = {&base, SRI}, .handler = {&base, Base::SRI},
}); });
addFlag({ addFlag({
.longName = "base64", .longName = "base64",
.description = "Print the hash in base-64 format.", .description = "Print the hash in base-64 format.",
.handler = {&base, Base64}, .handler = {&base, Base::Base64},
}); });
addFlag({ addFlag({
.longName = "base32", .longName = "base32",
.description = "Print the hash in base-32 (Nix-specific) format.", .description = "Print the hash in base-32 (Nix-specific) format.",
.handler = {&base, Base32}, .handler = {&base, Base::Base32},
}); });
addFlag({ addFlag({
.longName = "base16", .longName = "base16",
.description = "Print the hash in base-16 format.", .description = "Print the hash in base-16 format.",
.handler = {&base, Base16}, .handler = {&base, Base::Base16},
}); });
addFlag(Flag::mkHashTypeFlag("type", &ht)); addFlag(Flag::mkHashTypeFlag("type", &ht));
@ -90,7 +90,7 @@ struct CmdHashBase : Command
? computeHashModulo(ht, *modulus, source).first ? computeHashModulo(ht, *modulus, source).first
: hashSource(ht, source).first; : hashSource(ht, source).first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20); if (truncate && h.hashSize > 20) h = compressHash(h, 20);
logger->cout(h.to_string(base, base == SRI)); logger->cout(h.to_string(base, base == Base::SRI));
} }
} }
}; };
@ -110,16 +110,16 @@ struct CmdToBase : Command
std::string description() override std::string description() override
{ {
return fmt("convert a hash to %s representation", return fmt("convert a hash to %s representation",
base == Base16 ? "base-16" : base == Base::Base16 ? "base-16" :
base == Base32 ? "base-32" : base == Base::Base32 ? "base-32" :
base == Base64 ? "base-64" : base == Base::Base64 ? "base-64" :
"SRI"); "SRI");
} }
void run() override void run() override
{ {
for (auto s : args) for (auto s : args)
logger->cout(Hash::parseAny(s, ht).to_string(base, base == SRI)); logger->cout(Hash::parseAny(s, ht).to_string(base, base == Base::SRI));
} }
}; };
@ -129,10 +129,10 @@ struct CmdHash : NixMultiCommand
: MultiCommand({ : MultiCommand({
{"file", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Flat);; }}, {"file", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Flat);; }},
{"path", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Recursive); }}, {"path", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Recursive); }},
{"to-base16", []() { return make_ref<CmdToBase>(Base16); }}, {"to-base16", []() { return make_ref<CmdToBase>(Base::Base16); }},
{"to-base32", []() { return make_ref<CmdToBase>(Base32); }}, {"to-base32", []() { return make_ref<CmdToBase>(Base::Base32); }},
{"to-base64", []() { return make_ref<CmdToBase>(Base64); }}, {"to-base64", []() { return make_ref<CmdToBase>(Base::Base64); }},
{"to-sri", []() { return make_ref<CmdToBase>(SRI); }}, {"to-sri", []() { return make_ref<CmdToBase>(Base::SRI); }},
}) })
{ } { }
@ -158,7 +158,7 @@ static int compatNixHash(int argc, char * * argv)
{ {
std::optional<HashType> ht; std::optional<HashType> ht;
bool flat = false; bool flat = false;
Base base = Base16; Base base = Base::Base16;
bool truncate = false; bool truncate = false;
enum { opHash, opTo } op = opHash; enum { opHash, opTo } op = opHash;
std::vector<std::string> ss; std::vector<std::string> ss;
@ -169,10 +169,10 @@ static int compatNixHash(int argc, char * * argv)
else if (*arg == "--version") else if (*arg == "--version")
printVersion("nix-hash"); printVersion("nix-hash");
else if (*arg == "--flat") flat = true; else if (*arg == "--flat") flat = true;
else if (*arg == "--base16") base = Base16; else if (*arg == "--base16") base = Base::Base16;
else if (*arg == "--base32") base = Base32; else if (*arg == "--base32") base = Base::Base32;
else if (*arg == "--base64") base = Base64; else if (*arg == "--base64") base = Base::Base64;
else if (*arg == "--sri") base = SRI; else if (*arg == "--sri") base = Base::SRI;
else if (*arg == "--truncate") truncate = true; else if (*arg == "--truncate") truncate = true;
else if (*arg == "--type") { else if (*arg == "--type") {
std::string s = getArg(*arg, arg, end); std::string s = getArg(*arg, arg, end);
@ -180,19 +180,19 @@ static int compatNixHash(int argc, char * * argv)
} }
else if (*arg == "--to-base16") { else if (*arg == "--to-base16") {
op = opTo; op = opTo;
base = Base16; base = Base::Base16;
} }
else if (*arg == "--to-base32") { else if (*arg == "--to-base32") {
op = opTo; op = opTo;
base = Base32; base = Base::Base32;
} }
else if (*arg == "--to-base64") { else if (*arg == "--to-base64") {
op = opTo; op = opTo;
base = Base64; base = Base::Base64;
} }
else if (*arg == "--to-sri") { else if (*arg == "--to-sri") {
op = opTo; op = opTo;
base = SRI; base = Base::SRI;
} }
else if (*arg != "" && arg->at(0) == '-') else if (*arg != "" && arg->at(0) == '-')
return false; return false;
@ -203,7 +203,7 @@ static int compatNixHash(int argc, char * * argv)
if (op == opHash) { if (op == opHash) {
CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive); CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
if (!ht.has_value()) ht = htMD5; if (!ht.has_value()) ht = HashType::MD5;
cmd.ht = ht.value(); cmd.ht = ht.value();
cmd.base = base; cmd.base = base;
cmd.truncate = truncate; cmd.truncate = truncate;

View file

@ -90,7 +90,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << store->pathInfoToJSON( std::cout << store->pathInfoToJSON(
// FIXME: preserve order? // FIXME: preserve order?
StorePathSet(storePaths.begin(), storePaths.end()), StorePathSet(storePaths.begin(), storePaths.end()),
true, showClosureSize, SRI, AllowInvalid).dump(); true, showClosureSize, Base::SRI, AllowInvalid).dump();
} }
else { else {

View file

@ -133,7 +133,7 @@ std::tuple<StorePath, Hash> prefetchFile(
static int main_nix_prefetch_url(int argc, char * * argv) static int main_nix_prefetch_url(int argc, char * * argv)
{ {
{ {
HashType ht = htSHA256; HashType ht = HashType::SHA256;
std::vector<std::string> args; std::vector<std::string> args;
bool printPath = getEnv("PRINT_PATH") == "1"; bool printPath = getEnv("PRINT_PATH") == "1";
bool fromExpr = false; bool fromExpr = false;
@ -256,7 +256,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
bool executable = false; bool executable = false;
bool unpack = false; bool unpack = false;
std::optional<std::string> name; std::optional<std::string> name;
HashType hashType = htSHA256; HashType hashType = HashType::SHA256;
std::optional<Hash> expectedHash; std::optional<Hash> expectedHash;
CmdStorePrefetchFile() CmdStorePrefetchFile()
@ -316,13 +316,13 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
if (json) { if (json) {
auto res = nlohmann::json::object(); auto res = nlohmann::json::object();
res["storePath"] = store->printStorePath(storePath); res["storePath"] = store->printStorePath(storePath);
res["hash"] = hash.to_string(SRI, true); res["hash"] = hash.to_string(Base::SRI, true);
logger->cout(res.dump()); logger->cout(res.dump());
} else { } else {
notice("Downloaded '%s' to '%s' (hash '%s').", notice("Downloaded '%s' to '%s' (hash '%s').",
url, url,
store->printStorePath(storePath), store->printStorePath(storePath),
hash.to_string(SRI, true)); hash.to_string(Base::SRI, true));
} }
} }
}; };

View file

@ -109,8 +109,8 @@ struct CmdVerify : StorePathsCommand
act2.result(resCorruptedPath, store->printStorePath(info->path)); act2.result(resCorruptedPath, store->printStorePath(info->path));
printError("path '%s' was modified! expected hash '%s', got '%s'", printError("path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path), store->printStorePath(info->path),
info->narHash.to_string(Base32, true), info->narHash.to_string(Base::Base32, true),
hash.first.to_string(Base32, true)); hash.first.to_string(Base::Base32, true));
} }
} }

View file

@ -102,15 +102,15 @@ CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> { (std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress { ContentAddress {
.method = TextIngestionMethod {}, .method = TextIngestionMethod {},
.hash = hashString(HashType::htSHA256, "Derive(...)"), .hash = hashString(HashType::SHA256, "Derive(...)"),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."), .hash = hashString(HashType::SHA1, "blob blob..."),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"), .hash = hashString(HashType::SHA256, "(...)"),
}, },
})) }))
@ -197,7 +197,7 @@ CHARACTERIZATION_TEST(
std::optional { std::optional {
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."), .hash = hashString(HashType::SHA1, "blob blob..."),
}, },
}, },
})) }))

View file

@ -148,7 +148,7 @@ TEST_JSON(DynDerivationTest, caFixedText,
TEST_JSON(CaDerivationTest, caFloating, TEST_JSON(CaDerivationTest, caFloating,
(DerivationOutput::CAFloating { (DerivationOutput::CAFloating {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hashType = htSHA256, .hashType = HashType::SHA256,
}), }),
"drv-name", "output-name") "drv-name", "output-name")
@ -159,7 +159,7 @@ TEST_JSON(DerivationTest, deferred,
TEST_JSON(ImpureDerivationTest, impure, TEST_JSON(ImpureDerivationTest, impure,
(DerivationOutput::Impure { (DerivationOutput::Impure {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hashType = htSHA256, .hashType = HashType::SHA256,
}), }),
"drv-name", "output-name") "drv-name", "output-name")

View file

@ -53,15 +53,15 @@ VERSIONED_CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> { (std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress { ContentAddress {
.method = TextIngestionMethod {}, .method = TextIngestionMethod {},
.hash = hashString(HashType::htSHA256, "Derive(...)"), .hash = hashString(HashType::SHA256, "Derive(...)"),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."), .hash = hashString(HashType::SHA1, "blob blob..."),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"), .hash = hashString(HashType::SHA256, "(...)"),
}, },
})) }))
@ -278,7 +278,7 @@ VERSIONED_CHARACTERIZATION_TEST(
"foo", "foo",
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"), .hash = hashString(HashType::SHA256, "(...)"),
.references = { .references = {
.others = { .others = {
StorePath { StorePath {
@ -348,7 +348,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::optional { std::optional {
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."), .hash = hashString(HashType::SHA1, "blob blob..."),
}, },
}, },
})) }))

View file

@ -55,15 +55,15 @@ VERSIONED_CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> { (std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress { ContentAddress {
.method = TextIngestionMethod {}, .method = TextIngestionMethod {},
.hash = hashString(HashType::htSHA256, "Derive(...)"), .hash = hashString(HashType::SHA256, "Derive(...)"),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."), .hash = hashString(HashType::SHA1, "blob blob..."),
}, },
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"), .hash = hashString(HashType::SHA256, "(...)"),
}, },
})) }))
@ -417,7 +417,7 @@ VERSIONED_CHARACTERIZATION_TEST(
"foo", "foo",
FixedOutputInfo { FixedOutputInfo {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"), .hash = hashString(HashType::SHA256, "(...)"),
.references = { .references = {
.others = { .others = {
StorePath { StorePath {
@ -492,7 +492,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::optional { std::optional {
ContentAddress { ContentAddress {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,
.hash = hashString(HashType::htSHA1, "blob blob..."), .hash = hashString(HashType::SHA1, "blob blob..."),
}, },
}, },
})) }))

View file

@ -11,11 +11,11 @@ using namespace nix;
Gen<Hash> Arbitrary<Hash>::arbitrary() Gen<Hash> Arbitrary<Hash>::arbitrary()
{ {
Hash prototype(htSHA1); Hash prototype(HashType::SHA1);
return return
gen::apply( gen::apply(
[](const std::vector<uint8_t> & v) { [](const std::vector<uint8_t> & v) {
Hash hash(htSHA1); Hash hash(HashType::SHA1);
assert(v.size() == hash.hashSize); assert(v.size() == hash.hashSize);
std::copy(v.begin(), v.end(), hash.hash); std::copy(v.begin(), v.end(), hash.hash);
return hash; return hash;

View file

@ -13,28 +13,28 @@ namespace nix {
TEST(hashString, testKnownMD5Hashes1) { TEST(hashString, testKnownMD5Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc1321 // values taken from: https://tools.ietf.org/html/rfc1321
auto s1 = ""; auto s1 = "";
auto hash = hashString(HashType::htMD5, s1); auto hash = hashString(HashType::MD5, s1);
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e"); ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
} }
TEST(hashString, testKnownMD5Hashes2) { TEST(hashString, testKnownMD5Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc1321 // values taken from: https://tools.ietf.org/html/rfc1321
auto s2 = "abc"; auto s2 = "abc";
auto hash = hashString(HashType::htMD5, s2); auto hash = hashString(HashType::MD5, s2);
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72"); ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
} }
TEST(hashString, testKnownSHA1Hashes1) { TEST(hashString, testKnownSHA1Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc3174 // values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abc"; auto s = "abc";
auto hash = hashString(HashType::htSHA1, s); auto hash = hashString(HashType::SHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
} }
TEST(hashString, testKnownSHA1Hashes2) { TEST(hashString, testKnownSHA1Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc3174 // values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA1, s); auto hash = hashString(HashType::SHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
} }
@ -42,7 +42,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634 // values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc"; auto s = "abc";
auto hash = hashString(HashType::htSHA256, s); auto hash = hashString(HashType::SHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16, true), ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
} }
@ -50,7 +50,7 @@ namespace nix {
TEST(hashString, testKnownSHA256Hashes2) { TEST(hashString, testKnownSHA256Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc4634 // values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA256, s); auto hash = hashString(HashType::SHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16, true), ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"); "sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
} }
@ -58,7 +58,7 @@ namespace nix {
TEST(hashString, testKnownSHA512Hashes1) { TEST(hashString, testKnownSHA512Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc4634 // values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc"; auto s = "abc";
auto hash = hashString(HashType::htSHA512, s); auto hash = hashString(HashType::SHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16, true), ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9" "sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd" "7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
@ -68,7 +68,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634 // values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"; auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
auto hash = hashString(HashType::htSHA512, s); auto hash = hashString(HashType::SHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16, true), ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1" "sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"