2003-06-23 15:27:59 +02:00
|
|
|
#include <iostream>
|
2003-12-22 17:40:46 +01:00
|
|
|
#include <algorithm>
|
2003-06-23 15:27:59 +02:00
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
#include <sys/types.h>
|
|
|
|
#include <sys/stat.h>
|
2003-10-15 14:42:39 +02:00
|
|
|
#include <unistd.h>
|
2005-01-19 17:39:47 +01:00
|
|
|
#include <utime.h>
|
2003-06-23 15:27:59 +02:00
|
|
|
|
2003-07-07 09:43:58 +02:00
|
|
|
#include "store.hh"
|
2003-06-16 15:33:38 +02:00
|
|
|
#include "globals.hh"
|
|
|
|
#include "db.hh"
|
2003-06-23 15:27:59 +02:00
|
|
|
#include "archive.hh"
|
2003-08-04 09:09:36 +02:00
|
|
|
#include "pathlocks.hh"
|
2005-01-31 11:27:25 +01:00
|
|
|
#include "gc.hh"
|
2003-06-23 15:27:59 +02:00
|
|
|
|
|
|
|
|
2003-10-15 14:42:39 +02:00
|
|
|
/* Nix database. */
|
|
|
|
static Database nixDB;
|
|
|
|
|
|
|
|
|
|
|
|
/* Database tables. */
|
|
|
|
|
|
|
|
/* dbValidPaths :: Path -> ()
|
|
|
|
|
|
|
|
The existence of a key $p$ indicates that path $p$ is valid (that
|
|
|
|
is, produced by a succesful build). */
|
2004-10-25 16:38:23 +02:00
|
|
|
static TableId dbValidPaths = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
/* dbReferences :: Path -> [Path]
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
This table lists the outgoing file system references for each
|
|
|
|
output path that has been built by a Nix derivation. These are
|
|
|
|
found by scanning the path for the hash components of input
|
|
|
|
paths. */
|
|
|
|
static TableId dbReferences = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-12-12 19:24:42 +01:00
|
|
|
/* dbReferrers :: Path -> Path
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-12-12 19:24:42 +01:00
|
|
|
This table is just the reverse mapping of dbReferences. This table
|
|
|
|
can have duplicate keys, each corresponding value denoting a single
|
|
|
|
referrer. */
|
|
|
|
static TableId dbReferrers = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2004-12-20 14:43:32 +01:00
|
|
|
/* dbSubstitutes :: Path -> [[Path]]
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2004-06-20 21:17:54 +02:00
|
|
|
Each pair $(p, subs)$ tells Nix that it can use any of the
|
2004-12-20 14:43:32 +01:00
|
|
|
substitutes in $subs$ to build path $p$. Each substitute defines a
|
|
|
|
command-line invocation of a program (i.e., the first list element
|
|
|
|
is the full path to the program, the remaining elements are
|
|
|
|
arguments).
|
2003-10-15 14:42:39 +02:00
|
|
|
|
|
|
|
The main purpose of this is for distributed caching of derivates.
|
|
|
|
One system can compute a derivate and put it on a website (as a Nix
|
|
|
|
archive), for instance, and then another system can register a
|
|
|
|
substitute for that derivate. The substitute in this case might be
|
2005-01-20 17:01:07 +01:00
|
|
|
a Nix derivation that fetches the Nix archive.
|
2003-10-15 14:42:39 +02:00
|
|
|
*/
|
2004-10-25 16:38:23 +02:00
|
|
|
static TableId dbSubstitutes = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-02-07 14:40:40 +01:00
|
|
|
/* dbDerivers :: Path -> [Path]
|
|
|
|
|
|
|
|
This table lists the derivation used to build a path. There can
|
|
|
|
only be multiple such paths for fixed-output derivations (i.e.,
|
|
|
|
derivations specifying an expected hash). */
|
|
|
|
static TableId dbDerivers = 0;
|
|
|
|
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-05-04 18:31:49 +02:00
|
|
|
bool Substitute::operator == (const Substitute & sub) const
|
2004-06-20 21:17:54 +02:00
|
|
|
{
|
2004-12-20 14:43:32 +01:00
|
|
|
return program == sub.program
|
2004-06-20 21:17:54 +02:00
|
|
|
&& args == sub.args;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-12-12 19:24:42 +01:00
|
|
|
static void upgradeStore07();
|
|
|
|
static void upgradeStore09();
|
2005-02-09 10:50:29 +01:00
|
|
|
|
|
|
|
|
2003-10-15 14:42:39 +02:00
|
|
|
void openDB()
|
|
|
|
{
|
2004-10-25 16:38:23 +02:00
|
|
|
if (readOnlyMode) return;
|
2005-02-09 10:50:29 +01:00
|
|
|
|
2004-10-25 16:38:23 +02:00
|
|
|
try {
|
|
|
|
nixDB.open(nixDBPath);
|
|
|
|
} catch (DbNoPermission & e) {
|
|
|
|
printMsg(lvlTalkative, "cannot access Nix database; continuing anyway");
|
|
|
|
readOnlyMode = true;
|
|
|
|
return;
|
|
|
|
}
|
2003-10-15 14:42:39 +02:00
|
|
|
dbValidPaths = nixDB.openTable("validpaths");
|
2005-01-19 12:16:11 +01:00
|
|
|
dbReferences = nixDB.openTable("references");
|
2005-12-12 19:24:42 +01:00
|
|
|
dbReferrers = nixDB.openTable("referrers", true); /* must be sorted */
|
2003-10-15 14:42:39 +02:00
|
|
|
dbSubstitutes = nixDB.openTable("substitutes");
|
2005-02-07 14:40:40 +01:00
|
|
|
dbDerivers = nixDB.openTable("derivers");
|
2005-02-09 10:50:29 +01:00
|
|
|
|
|
|
|
int curSchema = 0;
|
|
|
|
Path schemaFN = nixDBPath + "/schema";
|
|
|
|
if (pathExists(schemaFN)) {
|
|
|
|
string s = readFile(schemaFN);
|
|
|
|
if (!string2Int(s, curSchema))
|
|
|
|
throw Error(format("`%1%' is corrupt") % schemaFN);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (curSchema > nixSchemaVersion)
|
|
|
|
throw Error(format("current Nix store schema is version %1%, but I only support %2%")
|
|
|
|
% curSchema % nixSchemaVersion);
|
|
|
|
|
|
|
|
if (curSchema < nixSchemaVersion) {
|
2005-12-12 19:24:42 +01:00
|
|
|
if (curSchema <= 1)
|
|
|
|
upgradeStore07();
|
|
|
|
if (curSchema == 2)
|
|
|
|
upgradeStore09();
|
2005-02-09 10:50:29 +01:00
|
|
|
writeFile(schemaFN, (format("%1%") % nixSchemaVersion).str());
|
|
|
|
}
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void initDB()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void createStoreTransaction(Transaction & txn)
|
|
|
|
{
|
|
|
|
Transaction txn2(nixDB);
|
|
|
|
txn2.moveTo(txn);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/* Path copying. */
|
|
|
|
|
2003-06-23 15:27:59 +02:00
|
|
|
struct CopySink : DumpSink
|
|
|
|
{
|
2005-03-03 14:58:02 +01:00
|
|
|
string s;
|
2003-06-23 15:27:59 +02:00
|
|
|
virtual void operator () (const unsigned char * data, unsigned int len)
|
|
|
|
{
|
2005-03-03 14:58:02 +01:00
|
|
|
s.append((const char *) data, len);
|
2003-06-23 15:27:59 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
struct CopySource : RestoreSource
|
|
|
|
{
|
2005-03-03 14:58:02 +01:00
|
|
|
string & s;
|
|
|
|
unsigned int pos;
|
|
|
|
CopySource(string & _s) : s(_s), pos(0) { }
|
2003-07-20 23:11:43 +02:00
|
|
|
virtual void operator () (unsigned char * data, unsigned int len)
|
2003-06-23 15:27:59 +02:00
|
|
|
{
|
2005-03-03 14:58:02 +01:00
|
|
|
s.copy((char *) data, len, pos);
|
|
|
|
pos += len;
|
|
|
|
assert(pos <= s.size());
|
2003-06-23 15:27:59 +02:00
|
|
|
}
|
|
|
|
};
|
2003-06-16 15:33:38 +02:00
|
|
|
|
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
void copyPath(const Path & src, const Path & dst)
|
2003-06-16 15:33:38 +02:00
|
|
|
{
|
2003-07-31 18:05:35 +02:00
|
|
|
debug(format("copying `%1%' to `%2%'") % src % dst);
|
|
|
|
|
2005-03-03 14:58:02 +01:00
|
|
|
/* Dump an archive of the path `src' into a string buffer, then
|
|
|
|
restore the archive to `dst'. This is not a very good method
|
|
|
|
for very large paths, but `copyPath' is mainly used for small
|
|
|
|
files. */
|
2003-06-23 15:27:59 +02:00
|
|
|
|
|
|
|
CopySink sink;
|
2004-09-09 23:12:53 +02:00
|
|
|
{
|
|
|
|
SwitchToOriginalUser sw;
|
|
|
|
dumpPath(src, sink);
|
|
|
|
}
|
2003-06-23 15:27:59 +02:00
|
|
|
|
2005-03-03 14:58:02 +01:00
|
|
|
CopySource source(sink.s);
|
|
|
|
restorePath(dst, source);
|
2003-06-16 15:33:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-07 15:32:44 +01:00
|
|
|
bool isInStore(const Path & path)
|
2004-02-14 22:44:18 +01:00
|
|
|
{
|
|
|
|
return path[0] == '/'
|
2005-05-04 18:31:49 +02:00
|
|
|
&& string(path, 0, nixStore.size()) == nixStore
|
2004-04-14 10:08:55 +02:00
|
|
|
&& path.size() >= nixStore.size() + 2
|
2005-02-07 15:32:44 +01:00
|
|
|
&& path[nixStore.size()] == '/';
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool isStorePath(const Path & path)
|
|
|
|
{
|
|
|
|
return isInStore(path)
|
2004-04-14 10:08:55 +02:00
|
|
|
&& path.find('/', nixStore.size() + 1) == Path::npos;
|
2004-02-14 22:44:18 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-04-14 10:08:55 +02:00
|
|
|
void assertStorePath(const Path & path)
|
2004-02-14 22:44:18 +01:00
|
|
|
{
|
2005-02-01 13:36:25 +01:00
|
|
|
if (!isStorePath(path))
|
2004-02-14 22:44:18 +01:00
|
|
|
throw Error(format("path `%1%' is not in the Nix store") % path);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-07 15:32:44 +01:00
|
|
|
Path toStorePath(const Path & path)
|
|
|
|
{
|
|
|
|
if (!isInStore(path))
|
|
|
|
throw Error(format("path `%1%' is not in the Nix store") % path);
|
|
|
|
unsigned int slash = path.find('/', nixStore.size() + 1);
|
|
|
|
if (slash == Path::npos)
|
|
|
|
return path;
|
|
|
|
else
|
|
|
|
return Path(path, 0, slash);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-04-07 16:01:51 +02:00
|
|
|
void checkStoreName(const string & name)
|
|
|
|
{
|
|
|
|
string validChars = "+-._?=";
|
|
|
|
for (string::const_iterator i = name.begin(); i != name.end(); ++i)
|
|
|
|
if (!((*i >= 'A' && *i <= 'Z') ||
|
|
|
|
(*i >= 'a' && *i <= 'z') ||
|
|
|
|
(*i >= '0' && *i <= '9') ||
|
|
|
|
validChars.find(*i) != string::npos))
|
|
|
|
{
|
|
|
|
throw Error(format("invalid character `%1%' in name `%2%'")
|
|
|
|
% *i % name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
void canonicalisePathMetaData(const Path & path)
|
|
|
|
{
|
|
|
|
checkInterrupt();
|
|
|
|
|
|
|
|
struct stat st;
|
|
|
|
if (lstat(path.c_str(), &st))
|
|
|
|
throw SysError(format("getting attributes of path `%1%'") % path);
|
|
|
|
|
|
|
|
if (!S_ISLNK(st.st_mode)) {
|
|
|
|
|
|
|
|
/* Mask out all type related bits. */
|
|
|
|
mode_t mode = st.st_mode & ~S_IFMT;
|
|
|
|
|
|
|
|
if (mode != 0444 && mode != 0555) {
|
|
|
|
mode = (st.st_mode & S_IFMT)
|
|
|
|
| 0444
|
|
|
|
| (st.st_mode & S_IXUSR ? 0111 : 0);
|
|
|
|
if (chmod(path.c_str(), mode) == -1)
|
|
|
|
throw SysError(format("changing mode of `%1%' to %2$o") % path % mode);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (st.st_uid != getuid() || st.st_gid != getgid()) {
|
|
|
|
if (chown(path.c_str(), getuid(), getgid()) == -1)
|
|
|
|
throw SysError(format("changing owner/group of `%1%' to %2%/%3%")
|
|
|
|
% path % getuid() % getgid());
|
|
|
|
}
|
|
|
|
|
|
|
|
if (st.st_mtime != 0) {
|
|
|
|
struct utimbuf utimbuf;
|
|
|
|
utimbuf.actime = st.st_atime;
|
|
|
|
utimbuf.modtime = 0;
|
|
|
|
if (utime(path.c_str(), &utimbuf) == -1)
|
|
|
|
throw SysError(format("changing modification time of `%1%'") % path);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if (S_ISDIR(st.st_mode)) {
|
|
|
|
Strings names = readDirectory(path);
|
|
|
|
for (Strings::iterator i = names.begin(); i != names.end(); ++i)
|
|
|
|
canonicalisePathMetaData(path + "/" + *i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-03-23 12:25:20 +01:00
|
|
|
bool isValidPathTxn(const Transaction & txn, const Path & path)
|
2003-12-05 12:05:19 +01:00
|
|
|
{
|
|
|
|
string s;
|
|
|
|
return nixDB.queryString(txn, dbValidPaths, path, s);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool isValidPath(const Path & path)
|
|
|
|
{
|
2005-01-25 22:28:25 +01:00
|
|
|
return isValidPathTxn(noTxn, path);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Substitutes readSubstitutes(const Transaction & txn,
|
|
|
|
const Path & srcPath);
|
|
|
|
|
|
|
|
|
|
|
|
static bool isRealisablePath(const Transaction & txn, const Path & path)
|
|
|
|
{
|
|
|
|
return isValidPathTxn(txn, path)
|
|
|
|
|| readSubstitutes(txn, path).size() > 0;
|
2003-12-05 12:05:19 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-12-12 19:24:42 +01:00
|
|
|
static string addPrefix(const string & prefix, const string & s)
|
|
|
|
{
|
|
|
|
return prefix + string(1, 0) + s;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static string stripPrefix(const string & prefix, const string & s)
|
|
|
|
{
|
|
|
|
if (s.size() <= prefix.size() ||
|
|
|
|
s.compare(0, prefix.size(), prefix) != 0 ||
|
|
|
|
s[prefix.size()] != 0)
|
|
|
|
throw Error(format("string `%1%' is missing prefix `%2%'")
|
|
|
|
% s % prefix);
|
|
|
|
return string(s, prefix.size() + 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-12-13 22:04:48 +01:00
|
|
|
static PathSet getReferrers(const Transaction & txn, const Path & storePath)
|
2005-01-27 17:18:39 +01:00
|
|
|
{
|
2005-12-12 19:24:42 +01:00
|
|
|
PathSet referrers;
|
|
|
|
Strings keys;
|
|
|
|
nixDB.enumTable(txn, dbReferrers, keys, storePath + string(1, 0));
|
|
|
|
for (Strings::iterator i = keys.begin(); i != keys.end(); ++i)
|
|
|
|
referrers.insert(stripPrefix(storePath, *i));
|
|
|
|
return referrers;
|
2005-01-27 17:18:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
void setReferences(const Transaction & txn, const Path & storePath,
|
|
|
|
const PathSet & references)
|
2003-10-10 16:46:28 +02:00
|
|
|
{
|
2005-03-14 19:54:40 +01:00
|
|
|
/* For unrealisable paths, we can only clear the references. */
|
|
|
|
if (references.size() > 0 && !isRealisablePath(txn, storePath))
|
2005-01-25 22:28:25 +01:00
|
|
|
throw Error(
|
|
|
|
format("cannot set references for path `%1%' which is invalid and has no substitutes")
|
|
|
|
% storePath);
|
2005-01-27 17:18:39 +01:00
|
|
|
|
|
|
|
Paths oldReferences;
|
2005-01-27 18:48:14 +01:00
|
|
|
nixDB.queryStrings(txn, dbReferences, storePath, oldReferences);
|
2005-03-03 14:10:44 +01:00
|
|
|
|
|
|
|
PathSet oldReferences2(oldReferences.begin(), oldReferences.end());
|
|
|
|
if (oldReferences2 == references) return;
|
2005-01-25 22:28:25 +01:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
nixDB.setStrings(txn, dbReferences, storePath,
|
|
|
|
Paths(references.begin(), references.end()));
|
2003-12-05 12:05:19 +01:00
|
|
|
|
2005-12-13 22:04:48 +01:00
|
|
|
/* Update the referrers mappings of all new referenced paths. */
|
2005-01-19 12:16:11 +01:00
|
|
|
for (PathSet::const_iterator i = references.begin();
|
|
|
|
i != references.end(); ++i)
|
2005-12-12 19:24:42 +01:00
|
|
|
if (oldReferences2.find(*i) == oldReferences2.end())
|
|
|
|
nixDB.setString(txn, dbReferrers, addPrefix(*i, storePath), "");
|
2005-01-27 17:18:39 +01:00
|
|
|
|
2005-12-13 22:04:48 +01:00
|
|
|
/* Remove referrer mappings from paths that are no longer
|
2005-01-27 17:18:39 +01:00
|
|
|
references. */
|
|
|
|
for (Paths::iterator i = oldReferences.begin();
|
|
|
|
i != oldReferences.end(); ++i)
|
2005-12-12 19:24:42 +01:00
|
|
|
if (references.find(*i) == references.end())
|
|
|
|
nixDB.delPair(txn, dbReferrers, addPrefix(*i, storePath));
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-08 14:23:55 +01:00
|
|
|
void queryReferences(const Transaction & txn,
|
|
|
|
const Path & storePath, PathSet & references)
|
2003-10-10 17:25:21 +02:00
|
|
|
{
|
2005-01-19 12:16:11 +01:00
|
|
|
Paths references2;
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isRealisablePath(txn, storePath))
|
2005-01-25 22:28:25 +01:00
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
2005-02-08 14:23:55 +01:00
|
|
|
nixDB.queryStrings(txn, dbReferences, storePath, references2);
|
2005-01-19 12:16:11 +01:00
|
|
|
references.insert(references2.begin(), references2.end());
|
2003-10-10 17:25:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-12-13 22:04:48 +01:00
|
|
|
void queryReferrers(const Transaction & txn,
|
|
|
|
const Path & storePath, PathSet & referrers)
|
2005-01-19 17:59:56 +01:00
|
|
|
{
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isRealisablePath(txn, storePath))
|
2005-01-25 22:28:25 +01:00
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
2005-12-13 22:04:48 +01:00
|
|
|
PathSet referrers2 = getReferrers(txn, storePath);
|
|
|
|
referrers.insert(referrers2.begin(), referrers2.end());
|
2005-01-19 17:59:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-07 14:40:40 +01:00
|
|
|
void setDeriver(const Transaction & txn, const Path & storePath,
|
|
|
|
const Path & deriver)
|
|
|
|
{
|
|
|
|
assertStorePath(storePath);
|
|
|
|
if (deriver == "") return;
|
|
|
|
assertStorePath(deriver);
|
|
|
|
if (!isRealisablePath(txn, storePath))
|
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
|
|
|
nixDB.setString(txn, dbDerivers, storePath, deriver);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Path queryDeriver(const Transaction & txn, const Path & storePath)
|
|
|
|
{
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isRealisablePath(txn, storePath))
|
2005-02-07 14:40:40 +01:00
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
|
|
|
Path deriver;
|
|
|
|
if (nixDB.queryString(txn, dbDerivers, storePath, deriver))
|
|
|
|
return deriver;
|
|
|
|
else
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-09 13:57:13 +01:00
|
|
|
const int substituteVersion = 2;
|
|
|
|
|
|
|
|
|
2004-06-20 21:17:54 +02:00
|
|
|
static Substitutes readSubstitutes(const Transaction & txn,
|
|
|
|
const Path & srcPath)
|
2003-07-10 17:11:48 +02:00
|
|
|
{
|
2004-06-20 21:17:54 +02:00
|
|
|
Strings ss;
|
|
|
|
nixDB.queryStrings(txn, dbSubstitutes, srcPath, ss);
|
|
|
|
|
|
|
|
Substitutes subs;
|
2004-02-14 22:44:18 +01:00
|
|
|
|
2004-06-20 21:17:54 +02:00
|
|
|
for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) {
|
|
|
|
if (i->size() < 4 || (*i)[3] != 0) {
|
|
|
|
/* Old-style substitute. !!! remove this code
|
|
|
|
eventually? */
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Strings ss2 = unpackStrings(*i);
|
2005-02-09 13:57:13 +01:00
|
|
|
if (ss2.size() == 0) continue;
|
|
|
|
int version;
|
|
|
|
if (!string2Int(ss2.front(), version)) continue;
|
|
|
|
if (version != substituteVersion) continue;
|
|
|
|
if (ss2.size() != 4) throw Error("malformed substitute");
|
2004-06-20 21:17:54 +02:00
|
|
|
Strings::iterator j = ss2.begin();
|
2005-02-09 13:57:13 +01:00
|
|
|
j++;
|
2004-06-20 21:17:54 +02:00
|
|
|
Substitute sub;
|
2005-02-09 13:57:13 +01:00
|
|
|
sub.deriver = *j++;
|
2004-06-20 21:17:54 +02:00
|
|
|
sub.program = *j++;
|
|
|
|
sub.args = unpackStrings(*j++);
|
|
|
|
subs.push_back(sub);
|
|
|
|
}
|
|
|
|
|
|
|
|
return subs;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static void writeSubstitutes(const Transaction & txn,
|
|
|
|
const Path & srcPath, const Substitutes & subs)
|
|
|
|
{
|
|
|
|
Strings ss;
|
|
|
|
|
|
|
|
for (Substitutes::const_iterator i = subs.begin();
|
|
|
|
i != subs.end(); ++i)
|
|
|
|
{
|
|
|
|
Strings ss2;
|
2005-02-09 13:57:13 +01:00
|
|
|
ss2.push_back((format("%1%") % substituteVersion).str());
|
|
|
|
ss2.push_back(i->deriver);
|
2004-06-20 21:17:54 +02:00
|
|
|
ss2.push_back(i->program);
|
|
|
|
ss2.push_back(packStrings(i->args));
|
|
|
|
ss.push_back(packStrings(ss2));
|
|
|
|
}
|
2003-12-05 12:05:19 +01:00
|
|
|
|
2004-06-28 12:42:57 +02:00
|
|
|
nixDB.setStrings(txn, dbSubstitutes, srcPath, ss);
|
2004-06-20 21:17:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
void registerSubstitute(const Transaction & txn,
|
|
|
|
const Path & srcPath, const Substitute & sub)
|
2004-06-20 21:17:54 +02:00
|
|
|
{
|
2005-01-25 21:27:40 +01:00
|
|
|
assertStorePath(srcPath);
|
2004-06-20 21:17:54 +02:00
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
Substitutes subs = readSubstitutes(txn, srcPath);
|
2004-08-31 18:13:10 +02:00
|
|
|
|
2005-03-03 14:10:44 +01:00
|
|
|
if (find(subs.begin(), subs.end(), sub) != subs.end())
|
|
|
|
return;
|
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
/* New substitutes take precedence over old ones. If the
|
|
|
|
substitute is already present, it's moved to the front. */
|
|
|
|
remove(subs.begin(), subs.end(), sub);
|
|
|
|
subs.push_front(sub);
|
2004-08-31 18:13:10 +02:00
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
writeSubstitutes(txn, srcPath, subs);
|
2003-07-10 17:11:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-27 16:21:29 +01:00
|
|
|
Substitutes querySubstitutes(const Transaction & txn, const Path & srcPath)
|
2003-10-16 18:29:57 +02:00
|
|
|
{
|
2005-01-27 16:21:29 +01:00
|
|
|
return readSubstitutes(txn, srcPath);
|
2003-10-16 18:29:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-03-03 14:10:44 +01:00
|
|
|
static void invalidatePath(Transaction & txn, const Path & path);
|
|
|
|
|
|
|
|
|
2004-12-20 14:43:32 +01:00
|
|
|
void clearSubstitutes()
|
|
|
|
{
|
2004-12-20 15:16:55 +01:00
|
|
|
Transaction txn(nixDB);
|
2004-12-20 14:43:32 +01:00
|
|
|
|
2004-12-20 15:16:55 +01:00
|
|
|
/* Iterate over all paths for which there are substitutes. */
|
|
|
|
Paths subKeys;
|
|
|
|
nixDB.enumTable(txn, dbSubstitutes, subKeys);
|
|
|
|
for (Paths::iterator i = subKeys.begin(); i != subKeys.end(); ++i) {
|
2005-03-03 14:10:44 +01:00
|
|
|
|
2004-12-20 15:16:55 +01:00
|
|
|
/* Delete all substitutes for path *i. */
|
|
|
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
2005-03-03 14:10:44 +01:00
|
|
|
|
|
|
|
/* Maintain the cleanup invariant. */
|
|
|
|
if (!isValidPathTxn(txn, *i))
|
|
|
|
invalidatePath(txn, *i);
|
2004-12-20 15:16:55 +01:00
|
|
|
}
|
|
|
|
|
2005-12-13 22:04:48 +01:00
|
|
|
/* !!! there should be no referrers to any of the invalid
|
2005-03-03 14:10:44 +01:00
|
|
|
substitutable paths. This should be the case by construction
|
2005-12-13 22:04:48 +01:00
|
|
|
(the only referrers can be other invalid substitutable paths,
|
2005-03-03 14:10:44 +01:00
|
|
|
which have all been removed now). */
|
|
|
|
|
2004-12-20 15:16:55 +01:00
|
|
|
txn.commit();
|
2004-12-20 14:43:32 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-09 10:50:29 +01:00
|
|
|
static void setHash(const Transaction & txn, const Path & storePath,
|
|
|
|
const Hash & hash)
|
|
|
|
{
|
|
|
|
assert(hash.type == htSHA256);
|
|
|
|
nixDB.setString(txn, dbValidPaths, storePath, "sha256:" + printHash(hash));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Hash queryHash(const Transaction & txn, const Path & storePath)
|
|
|
|
{
|
|
|
|
string s;
|
|
|
|
nixDB.queryString(txn, dbValidPaths, storePath, s);
|
|
|
|
unsigned int colon = s.find(':');
|
|
|
|
if (colon == string::npos)
|
|
|
|
throw Error(format("corrupt hash `%1%' in valid-path entry for `%2%'")
|
|
|
|
% s % storePath);
|
|
|
|
HashType ht = parseHashType(string(s, 0, colon));
|
|
|
|
if (ht == htUnknown)
|
|
|
|
throw Error(format("unknown hash type `%1%' in valid-path entry for `%2%'")
|
2005-02-14 10:53:11 +01:00
|
|
|
% string(s, 0, colon) % storePath);
|
2005-02-09 10:50:29 +01:00
|
|
|
return parseHash(ht, string(s, colon + 1));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-03-02 16:57:06 +01:00
|
|
|
Hash queryPathHash(const Path & path)
|
|
|
|
{
|
|
|
|
if (!isValidPath(path))
|
|
|
|
throw Error(format("path `%1%' is not valid") % path);
|
|
|
|
return queryHash(noTxn, path);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
void registerValidPath(const Transaction & txn,
|
2005-03-23 14:07:28 +01:00
|
|
|
const Path & path, const Hash & hash, const PathSet & references,
|
2005-02-07 14:40:40 +01:00
|
|
|
const Path & deriver)
|
2003-07-07 11:25:26 +02:00
|
|
|
{
|
2005-03-23 14:07:28 +01:00
|
|
|
ValidPathInfo info;
|
|
|
|
info.path = path;
|
|
|
|
info.hash = hash;
|
|
|
|
info.references = references;
|
|
|
|
info.deriver = deriver;
|
|
|
|
ValidPathInfos infos;
|
|
|
|
infos.push_back(info);
|
|
|
|
registerValidPaths(txn, infos);
|
|
|
|
}
|
2005-01-19 17:39:47 +01:00
|
|
|
|
|
|
|
|
2005-03-23 14:07:28 +01:00
|
|
|
void registerValidPaths(const Transaction & txn,
|
|
|
|
const ValidPathInfos & infos)
|
|
|
|
{
|
|
|
|
PathSet newPaths;
|
|
|
|
for (ValidPathInfos::const_iterator i = infos.begin();
|
|
|
|
i != infos.end(); ++i)
|
|
|
|
newPaths.insert(i->path);
|
|
|
|
|
|
|
|
for (ValidPathInfos::const_iterator i = infos.begin();
|
|
|
|
i != infos.end(); ++i)
|
|
|
|
{
|
|
|
|
assertStorePath(i->path);
|
|
|
|
|
|
|
|
debug(format("registering path `%1%'") % i->path);
|
|
|
|
setHash(txn, i->path, i->hash);
|
2005-02-07 14:40:40 +01:00
|
|
|
|
2005-03-23 14:07:28 +01:00
|
|
|
setReferences(txn, i->path, i->references);
|
|
|
|
|
|
|
|
/* Check that all referenced paths are also valid (or about to
|
|
|
|
become valid). */
|
|
|
|
for (PathSet::iterator j = i->references.begin();
|
|
|
|
j != i->references.end(); ++j)
|
|
|
|
if (!isValidPathTxn(txn, *j) && newPaths.find(*j) == newPaths.end())
|
|
|
|
throw Error(format("cannot register path `%1%' as valid, since its reference `%2%' is invalid")
|
|
|
|
% i->path % *j);
|
|
|
|
|
|
|
|
setDeriver(txn, i->path, i->deriver);
|
|
|
|
}
|
2003-10-08 17:06:59 +02:00
|
|
|
}
|
2003-07-07 11:25:26 +02:00
|
|
|
|
2003-07-31 18:05:35 +02:00
|
|
|
|
2005-01-31 15:00:43 +01:00
|
|
|
/* Invalidate a path. The caller is responsible for checking that
|
2005-12-13 22:04:48 +01:00
|
|
|
there are no referrers. */
|
2005-03-03 14:10:44 +01:00
|
|
|
static void invalidatePath(Transaction & txn, const Path & path)
|
2003-07-08 11:54:47 +02:00
|
|
|
{
|
2003-07-31 18:05:35 +02:00
|
|
|
debug(format("unregistering path `%1%'") % path);
|
2003-07-08 11:54:47 +02:00
|
|
|
|
2005-01-27 16:21:29 +01:00
|
|
|
/* Clear the `references' entry for this path, as well as the
|
2005-12-13 22:04:48 +01:00
|
|
|
inverse `referrers' entries, and the `derivers' entry; but only
|
2005-02-07 14:40:40 +01:00
|
|
|
if there are no substitutes for this path. This maintains the
|
|
|
|
cleanup invariant. */
|
|
|
|
if (querySubstitutes(txn, path).size() == 0) {
|
2005-01-27 16:21:29 +01:00
|
|
|
setReferences(txn, path, PathSet());
|
2005-02-07 14:40:40 +01:00
|
|
|
nixDB.delPair(txn, dbDerivers, path);
|
|
|
|
}
|
2005-01-27 17:18:39 +01:00
|
|
|
|
|
|
|
nixDB.delPair(txn, dbValidPaths, path);
|
2003-07-08 11:54:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-14 14:51:38 +01:00
|
|
|
Path makeStorePath(const string & type,
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
const Hash & hash, const string & suffix)
|
2005-01-14 14:51:38 +01:00
|
|
|
{
|
|
|
|
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
2005-01-14 17:04:03 +01:00
|
|
|
string s = type + ":sha256:" + printHash(hash) + ":"
|
2005-01-14 14:51:38 +01:00
|
|
|
+ nixStore + ":" + suffix;
|
|
|
|
|
2005-04-07 16:01:51 +02:00
|
|
|
checkStoreName(suffix);
|
|
|
|
|
2005-01-14 17:04:03 +01:00
|
|
|
return nixStore + "/"
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
+ printHash32(compressHash(hashString(htSHA256, s), 20))
|
2005-01-14 17:04:03 +01:00
|
|
|
+ "-" + suffix;
|
2005-01-14 14:51:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-04-07 16:01:51 +02:00
|
|
|
Path makeFixedOutputPath(bool recursive,
|
|
|
|
string hashAlgo, Hash hash, string name)
|
|
|
|
{
|
|
|
|
/* !!! copy/paste from primops.cc */
|
|
|
|
Hash h = hashString(htSHA256, "fixed:out:"
|
|
|
|
+ (recursive ? (string) "r:" : "") + hashAlgo + ":"
|
|
|
|
+ printHash(hash) + ":"
|
|
|
|
+ "");
|
|
|
|
return makeStorePath("output:out", h, name);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Path _addToStore(bool fixed, bool recursive,
|
|
|
|
string hashAlgo, const Path & _srcPath)
|
2003-07-07 11:25:26 +02:00
|
|
|
{
|
2003-10-08 17:06:59 +02:00
|
|
|
Path srcPath(absPath(_srcPath));
|
|
|
|
debug(format("adding `%1%' to the store") % srcPath);
|
2003-07-07 11:25:26 +02:00
|
|
|
|
2005-01-14 14:51:38 +01:00
|
|
|
Hash h(htSHA256);
|
2004-09-09 23:12:53 +02:00
|
|
|
{
|
|
|
|
SwitchToOriginalUser sw;
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
h = hashPath(htSHA256, srcPath);
|
2004-09-09 23:12:53 +02:00
|
|
|
}
|
2003-07-10 17:11:48 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
string baseName = baseNameOf(srcPath);
|
2005-04-07 16:01:51 +02:00
|
|
|
|
|
|
|
Path dstPath;
|
|
|
|
|
|
|
|
if (fixed) {
|
|
|
|
|
|
|
|
HashType ht(parseHashType(hashAlgo));
|
|
|
|
Hash h2(ht);
|
|
|
|
{
|
|
|
|
SwitchToOriginalUser sw;
|
|
|
|
h2 = recursive ? hashPath(ht, srcPath) : hashFile(ht, srcPath);
|
|
|
|
}
|
|
|
|
|
|
|
|
dstPath = makeFixedOutputPath(recursive, hashAlgo, h2, baseName);
|
|
|
|
}
|
|
|
|
|
|
|
|
else dstPath = makeStorePath("source", h, baseName);
|
2003-07-10 17:11:48 +02:00
|
|
|
|
2005-05-07 23:33:31 +02:00
|
|
|
if (!readOnlyMode) addTempRoot(dstPath);
|
2005-01-31 11:27:25 +01:00
|
|
|
|
2004-10-25 16:38:23 +02:00
|
|
|
if (!readOnlyMode && !isValidPath(dstPath)) {
|
2003-07-10 17:11:48 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
/* The first check above is an optimisation to prevent
|
|
|
|
unnecessary lock acquisition. */
|
2003-07-22 17:15:15 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
PathSet lockPaths;
|
|
|
|
lockPaths.insert(dstPath);
|
|
|
|
PathLocks outputLock(lockPaths);
|
2003-07-22 17:15:15 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
if (!isValidPath(dstPath)) {
|
2004-06-21 09:46:02 +02:00
|
|
|
|
|
|
|
if (pathExists(dstPath)) deletePath(dstPath);
|
2004-10-25 16:38:23 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
copyPath(srcPath, dstPath);
|
2003-08-01 11:01:51 +02:00
|
|
|
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
Hash h2 = hashPath(htSHA256, dstPath);
|
2005-01-14 14:51:38 +01:00
|
|
|
if (h != h2)
|
|
|
|
throw Error(format("contents of `%1%' changed while copying it to `%2%' (%3% -> %4%)")
|
2005-01-14 17:04:03 +01:00
|
|
|
% srcPath % dstPath % printHash(h) % printHash(h2));
|
2005-01-14 14:51:38 +01:00
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
canonicalisePathMetaData(dstPath);
|
2004-09-09 23:19:20 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
Transaction txn(nixDB);
|
2005-02-07 14:40:40 +01:00
|
|
|
registerValidPath(txn, dstPath, h, PathSet(), "");
|
2003-10-08 17:06:59 +02:00
|
|
|
txn.commit();
|
2003-08-01 11:01:51 +02:00
|
|
|
}
|
2003-11-22 19:45:56 +01:00
|
|
|
|
|
|
|
outputLock.setDeletion(true);
|
2003-06-16 15:33:38 +02:00
|
|
|
}
|
2003-08-04 09:09:36 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
return dstPath;
|
2003-06-16 15:33:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-04-07 16:01:51 +02:00
|
|
|
Path addToStore(const Path & srcPath)
|
|
|
|
{
|
|
|
|
return _addToStore(false, false, "", srcPath);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Path addToStoreFixed(bool recursive, string hashAlgo, const Path & srcPath)
|
|
|
|
{
|
|
|
|
return _addToStore(true, recursive, hashAlgo, srcPath);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-25 22:28:25 +01:00
|
|
|
Path addTextToStore(const string & suffix, const string & s,
|
|
|
|
const PathSet & references)
|
2003-10-15 14:42:39 +02:00
|
|
|
{
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
Hash hash = hashString(htSHA256, s);
|
2005-01-14 14:51:38 +01:00
|
|
|
|
|
|
|
Path dstPath = makeStorePath("text", hash, suffix);
|
2004-02-14 22:44:18 +01:00
|
|
|
|
2005-05-07 23:33:31 +02:00
|
|
|
if (!readOnlyMode) addTempRoot(dstPath);
|
2005-01-31 11:27:25 +01:00
|
|
|
|
2005-01-14 14:51:38 +01:00
|
|
|
if (!readOnlyMode && !isValidPath(dstPath)) {
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2003-10-23 12:51:55 +02:00
|
|
|
PathSet lockPaths;
|
|
|
|
lockPaths.insert(dstPath);
|
|
|
|
PathLocks outputLock(lockPaths);
|
|
|
|
|
|
|
|
if (!isValidPath(dstPath)) {
|
2004-06-21 09:46:02 +02:00
|
|
|
|
|
|
|
if (pathExists(dstPath)) deletePath(dstPath);
|
|
|
|
|
2003-11-22 16:58:34 +01:00
|
|
|
writeStringToFile(dstPath, s);
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
canonicalisePathMetaData(dstPath);
|
2004-09-09 23:19:20 +02:00
|
|
|
|
2003-10-23 12:51:55 +02:00
|
|
|
Transaction txn(nixDB);
|
2005-01-25 22:28:25 +01:00
|
|
|
registerValidPath(txn, dstPath,
|
2005-02-07 14:40:40 +01:00
|
|
|
hashPath(htSHA256, dstPath), references, "");
|
2003-10-23 12:51:55 +02:00
|
|
|
txn.commit();
|
|
|
|
}
|
2003-11-22 19:45:56 +01:00
|
|
|
|
|
|
|
outputLock.setDeletion(true);
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
2005-01-14 14:51:38 +01:00
|
|
|
|
|
|
|
return dstPath;
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
void deleteFromStore(const Path & _path)
|
2003-06-23 16:40:49 +02:00
|
|
|
{
|
2003-10-08 17:06:59 +02:00
|
|
|
Path path(canonPath(_path));
|
|
|
|
|
2004-02-14 22:44:18 +01:00
|
|
|
assertStorePath(path);
|
2003-07-08 11:54:47 +02:00
|
|
|
|
2003-11-22 19:45:56 +01:00
|
|
|
Transaction txn(nixDB);
|
2005-01-31 15:00:43 +01:00
|
|
|
if (isValidPathTxn(txn, path)) {
|
2005-12-13 22:04:48 +01:00
|
|
|
PathSet referrers = getReferrers(txn, path);
|
|
|
|
for (PathSet::iterator i = referrers.begin();
|
|
|
|
i != referrers.end(); ++i)
|
2005-04-12 12:51:00 +02:00
|
|
|
if (*i != path && isValidPathTxn(txn, *i))
|
|
|
|
throw Error(format("cannot delete path `%1%' because it is in use by path `%2%'") % path % *i);
|
2005-03-03 14:10:44 +01:00
|
|
|
invalidatePath(txn, path);
|
2005-01-31 15:00:43 +01:00
|
|
|
}
|
2003-11-22 19:45:56 +01:00
|
|
|
txn.commit();
|
2003-07-08 11:54:47 +02:00
|
|
|
|
2003-06-27 16:56:12 +02:00
|
|
|
deletePath(path);
|
2003-06-23 16:40:49 +02:00
|
|
|
}
|
2003-07-17 14:27:55 +02:00
|
|
|
|
|
|
|
|
2005-02-08 14:48:53 +01:00
|
|
|
void verifyStore(bool checkContents)
|
2003-07-17 14:27:55 +02:00
|
|
|
{
|
2003-07-31 21:49:11 +02:00
|
|
|
Transaction txn(nixDB);
|
|
|
|
|
2003-10-10 17:14:29 +02:00
|
|
|
Paths paths;
|
2003-11-22 19:45:56 +01:00
|
|
|
PathSet validPaths;
|
2003-10-10 17:14:29 +02:00
|
|
|
nixDB.enumTable(txn, dbValidPaths, paths);
|
2003-07-17 14:27:55 +02:00
|
|
|
|
2003-12-05 12:05:19 +01:00
|
|
|
for (Paths::iterator i = paths.begin(); i != paths.end(); ++i) {
|
2005-02-08 14:48:53 +01:00
|
|
|
if (!pathExists(*i)) {
|
|
|
|
printMsg(lvlError, format("path `%1%' disappeared") % *i);
|
2005-03-03 14:10:44 +01:00
|
|
|
invalidatePath(txn, *i);
|
2005-02-08 14:48:53 +01:00
|
|
|
} else if (!isStorePath(*i)) {
|
|
|
|
printMsg(lvlError, format("path `%1%' is not in the Nix store") % *i);
|
2005-03-03 14:10:44 +01:00
|
|
|
invalidatePath(txn, *i);
|
2005-02-08 14:48:53 +01:00
|
|
|
} else {
|
|
|
|
if (checkContents) {
|
|
|
|
Hash expected = queryHash(txn, *i);
|
|
|
|
Hash current = hashPath(expected.type, *i);
|
|
|
|
if (current != expected) {
|
|
|
|
printMsg(lvlError, format("path `%1%' was modified! "
|
|
|
|
"expected hash `%2%', got `%3%'")
|
|
|
|
% *i % printHash(expected) % printHash(current));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
validPaths.insert(*i);
|
|
|
|
}
|
2003-07-17 14:27:55 +02:00
|
|
|
}
|
|
|
|
|
2005-02-08 14:23:55 +01:00
|
|
|
/* "Usable" paths are those that are valid or have a
|
|
|
|
substitute. */
|
2003-12-05 12:05:19 +01:00
|
|
|
PathSet usablePaths(validPaths);
|
2003-07-17 14:27:55 +02:00
|
|
|
|
2003-11-24 10:24:52 +01:00
|
|
|
/* Check that the values of the substitute mappings are valid
|
|
|
|
paths. */
|
2004-06-20 21:17:54 +02:00
|
|
|
Paths subKeys;
|
|
|
|
nixDB.enumTable(txn, dbSubstitutes, subKeys);
|
|
|
|
for (Paths::iterator i = subKeys.begin(); i != subKeys.end(); ++i) {
|
2004-12-20 14:43:32 +01:00
|
|
|
Substitutes subs = readSubstitutes(txn, *i);
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isStorePath(*i)) {
|
|
|
|
printMsg(lvlError, format("found substitutes for non-store path `%1%'") % *i);
|
2004-12-20 15:16:55 +01:00
|
|
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
|
|
|
else if (subs.size() == 0)
|
|
|
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
|
|
|
else
|
|
|
|
usablePaths.insert(*i);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Check the cleanup invariant: only usable paths can have
|
2005-12-13 22:04:48 +01:00
|
|
|
`references', `referrers', or `derivers' entries. */
|
2005-02-08 14:23:55 +01:00
|
|
|
|
|
|
|
/* Check the `derivers' table. */
|
|
|
|
Paths deriversKeys;
|
|
|
|
nixDB.enumTable(txn, dbDerivers, deriversKeys);
|
|
|
|
for (Paths::iterator i = deriversKeys.begin();
|
|
|
|
i != deriversKeys.end(); ++i)
|
|
|
|
{
|
|
|
|
if (usablePaths.find(*i) == usablePaths.end()) {
|
|
|
|
printMsg(lvlError, format("found deriver entry for unusable path `%1%'")
|
|
|
|
% *i);
|
|
|
|
nixDB.delPair(txn, dbDerivers, *i);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
Path deriver = queryDeriver(txn, *i);
|
|
|
|
if (!isStorePath(deriver)) {
|
|
|
|
printMsg(lvlError, format("found corrupt deriver `%1%' for `%2%'")
|
|
|
|
% deriver % *i);
|
|
|
|
nixDB.delPair(txn, dbDerivers, *i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Check the `references' table. */
|
|
|
|
Paths referencesKeys;
|
|
|
|
nixDB.enumTable(txn, dbReferences, referencesKeys);
|
|
|
|
for (Paths::iterator i = referencesKeys.begin();
|
|
|
|
i != referencesKeys.end(); ++i)
|
|
|
|
{
|
|
|
|
if (usablePaths.find(*i) == usablePaths.end()) {
|
|
|
|
printMsg(lvlError, format("found references entry for unusable path `%1%'")
|
|
|
|
% *i);
|
2005-03-25 15:21:49 +01:00
|
|
|
setReferences(txn, *i, PathSet());
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
|
|
|
else {
|
2005-02-09 10:50:29 +01:00
|
|
|
bool isValid = validPaths.find(*i) != validPaths.end();
|
2005-02-08 14:23:55 +01:00
|
|
|
PathSet references;
|
|
|
|
queryReferences(txn, *i, references);
|
|
|
|
for (PathSet::iterator j = references.begin();
|
|
|
|
j != references.end(); ++j)
|
|
|
|
{
|
2005-12-12 19:24:42 +01:00
|
|
|
string dummy;
|
|
|
|
if (!nixDB.queryString(txn, dbReferrers, addPrefix(*j, *i), dummy)) {
|
2005-12-13 22:04:48 +01:00
|
|
|
printMsg(lvlError, format("missing referrer mapping from `%1%' to `%2%'")
|
2005-02-08 14:23:55 +01:00
|
|
|
% *j % *i);
|
2005-12-12 19:24:42 +01:00
|
|
|
nixDB.setString(txn, dbReferrers, addPrefix(*j, *i), "");
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
2005-02-09 10:50:29 +01:00
|
|
|
if (isValid && validPaths.find(*j) == validPaths.end()) {
|
|
|
|
printMsg(lvlError, format("incomplete closure: `%1%' needs missing `%2%'")
|
|
|
|
% *i % *j);
|
|
|
|
}
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2005-12-12 19:24:42 +01:00
|
|
|
#if 0 // !!!
|
2005-12-13 22:04:48 +01:00
|
|
|
/* Check the `referrers' table. */
|
|
|
|
Paths referrersKeys;
|
|
|
|
nixDB.enumTable(txn, dbReferrers, referrersKeys);
|
|
|
|
for (Paths::iterator i = referrersKeys.begin();
|
|
|
|
i != referrersKeys.end(); ++i)
|
2005-02-08 14:23:55 +01:00
|
|
|
{
|
|
|
|
if (usablePaths.find(*i) == usablePaths.end()) {
|
2005-12-13 22:04:48 +01:00
|
|
|
printMsg(lvlError, format("found referrers entry for unusable path `%1%'")
|
2005-02-08 14:23:55 +01:00
|
|
|
% *i);
|
2005-12-13 22:04:48 +01:00
|
|
|
nixDB.delPair(txn, dbReferrers, *i);
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
|
|
|
else {
|
2005-12-13 22:04:48 +01:00
|
|
|
PathSet referrers, newReferrers;
|
|
|
|
queryReferrers(txn, *i, referrers);
|
|
|
|
for (PathSet::iterator j = referrers.begin();
|
|
|
|
j != referrers.end(); ++j)
|
2005-02-08 14:23:55 +01:00
|
|
|
{
|
|
|
|
Paths references;
|
2005-03-25 15:21:49 +01:00
|
|
|
if (usablePaths.find(*j) == usablePaths.end()) {
|
2005-12-13 22:04:48 +01:00
|
|
|
printMsg(lvlError, format("referrer mapping from `%1%' to unusable `%2%'")
|
2005-03-25 15:21:49 +01:00
|
|
|
% *i % *j);
|
|
|
|
} else {
|
|
|
|
nixDB.queryStrings(txn, dbReferences, *j, references);
|
|
|
|
if (find(references.begin(), references.end(), *i) == references.end()) {
|
|
|
|
printMsg(lvlError, format("missing reference mapping from `%1%' to `%2%'")
|
|
|
|
% *j % *i);
|
|
|
|
/* !!! repair by inserting *i into references */
|
|
|
|
}
|
2005-12-13 22:04:48 +01:00
|
|
|
else newReferrers.insert(*j);
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
|
|
|
}
|
2005-12-13 22:04:48 +01:00
|
|
|
if (referrers != newReferrers)
|
|
|
|
nixDB.setStrings(txn, dbReferrers, *i,
|
|
|
|
Paths(newReferrers.begin(), newReferrers.end()));
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
2003-11-24 10:24:52 +01:00
|
|
|
}
|
2005-12-12 19:24:42 +01:00
|
|
|
#endif
|
2003-10-10 17:14:29 +02:00
|
|
|
|
2003-07-31 21:49:11 +02:00
|
|
|
txn.commit();
|
2003-07-17 14:27:55 +02:00
|
|
|
}
|
2005-02-09 10:50:29 +01:00
|
|
|
|
|
|
|
|
|
|
|
#include "aterm.hh"
|
|
|
|
#include "derivations-ast.hh"
|
|
|
|
|
|
|
|
|
|
|
|
/* Upgrade from schema 1 (Nix <= 0.7) to schema 2 (Nix >= 0.8). */
|
2005-12-12 19:24:42 +01:00
|
|
|
static void upgradeStore07()
|
2005-02-09 10:50:29 +01:00
|
|
|
{
|
|
|
|
printMsg(lvlError, "upgrading Nix store to new schema (this may take a while)...");
|
|
|
|
|
|
|
|
Transaction txn(nixDB);
|
|
|
|
|
|
|
|
Paths validPaths2;
|
|
|
|
nixDB.enumTable(txn, dbValidPaths, validPaths2);
|
|
|
|
PathSet validPaths(validPaths2.begin(), validPaths2.end());
|
|
|
|
|
|
|
|
cerr << "hashing paths...";
|
2005-02-09 15:37:24 +01:00
|
|
|
int n = 0;
|
2005-02-09 10:50:29 +01:00
|
|
|
for (PathSet::iterator i = validPaths.begin(); i != validPaths.end(); ++i) {
|
|
|
|
checkInterrupt();
|
|
|
|
string s;
|
|
|
|
nixDB.queryString(txn, dbValidPaths, *i, s);
|
|
|
|
if (s == "") {
|
|
|
|
Hash hash = hashPath(htSHA256, *i);
|
|
|
|
setHash(txn, *i, hash);
|
|
|
|
cerr << ".";
|
2005-02-09 15:37:24 +01:00
|
|
|
if (++n % 1000 == 0) {
|
|
|
|
txn.commit();
|
|
|
|
txn.begin(nixDB);
|
|
|
|
}
|
2005-02-09 10:50:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
cerr << "\n";
|
|
|
|
|
2005-02-09 15:37:24 +01:00
|
|
|
txn.commit();
|
|
|
|
|
|
|
|
txn.begin(nixDB);
|
|
|
|
|
2005-02-09 10:50:29 +01:00
|
|
|
cerr << "processing closures...";
|
|
|
|
for (PathSet::iterator i = validPaths.begin(); i != validPaths.end(); ++i) {
|
|
|
|
checkInterrupt();
|
|
|
|
if (i->size() > 6 && string(*i, i->size() - 6) == ".store") {
|
|
|
|
ATerm t = ATreadFromNamedFile(i->c_str());
|
|
|
|
if (!t) throw Error(format("cannot read aterm from `%1%'") % *i);
|
|
|
|
|
|
|
|
ATermList roots, elems;
|
|
|
|
if (!matchOldClosure(t, roots, elems)) continue;
|
|
|
|
|
|
|
|
for (ATermIterator j(elems); j; ++j) {
|
|
|
|
|
|
|
|
ATerm path2;
|
|
|
|
ATermList references2;
|
|
|
|
if (!matchOldClosureElem(*j, path2, references2)) continue;
|
|
|
|
|
|
|
|
Path path = aterm2String(path2);
|
|
|
|
if (validPaths.find(path) == validPaths.end())
|
|
|
|
/* Skip this path; it's invalid. This is a normal
|
|
|
|
condition (Nix <= 0.7 did not enforce closure
|
|
|
|
on closure store expressions). */
|
|
|
|
continue;
|
|
|
|
|
|
|
|
PathSet references;
|
|
|
|
for (ATermIterator k(references2); k; ++k) {
|
|
|
|
Path reference = aterm2String(*k);
|
|
|
|
if (validPaths.find(reference) == validPaths.end())
|
|
|
|
/* Bad reference. Set it anyway and let the
|
|
|
|
user fix it. */
|
|
|
|
printMsg(lvlError, format("closure `%1%' contains reference from `%2%' "
|
|
|
|
"to invalid path `%3%' (run `nix-store --verify')")
|
|
|
|
% *i % path % reference);
|
|
|
|
references.insert(reference);
|
|
|
|
}
|
|
|
|
|
|
|
|
PathSet prevReferences;
|
|
|
|
queryReferences(txn, path, prevReferences);
|
|
|
|
if (prevReferences.size() > 0 && references != prevReferences)
|
|
|
|
printMsg(lvlError, format("warning: conflicting references for `%1%'") % path);
|
|
|
|
|
|
|
|
if (references != prevReferences)
|
|
|
|
setReferences(txn, path, references);
|
|
|
|
}
|
|
|
|
|
|
|
|
cerr << ".";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
cerr << "\n";
|
|
|
|
|
|
|
|
/* !!! maybe this transaction is way too big */
|
|
|
|
txn.commit();
|
|
|
|
}
|
2005-12-12 19:24:42 +01:00
|
|
|
|
|
|
|
|
|
|
|
/* Upgrade from schema 2 (0.8 <= Nix <= 0.9) to schema 3 (Nix >=
|
|
|
|
0.10). The only thing to do here is to upgrade the old `referer'
|
|
|
|
table (which causes quadratic complexity in some cases) to the new
|
|
|
|
(and properly spelled) `referrer' table. */
|
|
|
|
static void upgradeStore09()
|
|
|
|
{
|
|
|
|
printMsg(lvlError, "upgrading Nix store to new schema (this may take a while)...");
|
|
|
|
|
|
|
|
if (!pathExists(nixDBPath + "/referers")) return;
|
|
|
|
|
|
|
|
Transaction txn(nixDB);
|
|
|
|
|
|
|
|
cerr << "converting referers to referrers...";
|
|
|
|
|
|
|
|
TableId dbReferers = nixDB.openTable("referers"); /* sic! */
|
|
|
|
|
|
|
|
Paths referersKeys;
|
|
|
|
nixDB.enumTable(txn, dbReferers, referersKeys);
|
|
|
|
for (Paths::iterator i = referersKeys.begin();
|
|
|
|
i != referersKeys.end(); ++i)
|
|
|
|
{
|
|
|
|
Paths referers;
|
|
|
|
nixDB.queryStrings(txn, dbReferers, *i, referers);
|
|
|
|
for (Paths::iterator j = referers.begin();
|
|
|
|
j != referers.end(); ++j)
|
|
|
|
nixDB.setString(txn, dbReferrers, addPrefix(*i, *j), "");
|
|
|
|
cerr << ".";
|
|
|
|
}
|
2005-12-12 20:14:38 +01:00
|
|
|
|
2005-12-12 19:24:42 +01:00
|
|
|
cerr << "\n";
|
|
|
|
|
|
|
|
txn.commit();
|
2005-12-12 20:14:38 +01:00
|
|
|
|
|
|
|
nixDB.closeTable(dbReferers);
|
|
|
|
|
|
|
|
nixDB.deleteTable("referers");
|
2005-12-12 19:24:42 +01:00
|
|
|
}
|