update-walker: remove because unused
This commit is contained in:
parent
09e3b7a570
commit
59c55f4558
11 changed files with 0 additions and 761 deletions
|
@ -1,19 +0,0 @@
|
|||
a :
|
||||
a.stdenv.mkDerivation {
|
||||
buildCommand = ''
|
||||
mkdir -p "$out/attributes"
|
||||
'' + (a.lib.concatStrings (map
|
||||
(n: ''
|
||||
ln -s "${a.writeTextFile {name=n; text=builtins.getAttr n a.theAttrSet;}}" $out/attributes/${n};
|
||||
'')
|
||||
(builtins.attrNames a.theAttrSet)
|
||||
));
|
||||
|
||||
name = "attribute-set";
|
||||
meta = {
|
||||
description = "Contents of an attribute set";
|
||||
maintainers = [
|
||||
a.lib.maintainers.raskin
|
||||
];
|
||||
};
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
#! /bin/sh
|
||||
|
||||
[ -z "$1" ] && {
|
||||
echo "Use $0 expression-basename repo-url branch-name package-base-name"
|
||||
echo "Like:"
|
||||
echo "$0 default http://git.example.com/repo origin/master hello"
|
||||
exit 1;
|
||||
} >&2
|
||||
|
||||
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
|
||||
|
||||
cp "$own_dir/../builder-defs/template-bdp-uud.nix" "$1.nix"
|
||||
sed -e "s@src-for-default.nix@src-for-$1.nix@g;
|
||||
s@fetchUrlFromSrcInfo@fetchGitFromSrcInfo@g" -i "$1.nix"
|
||||
echo '{}' > "src-for-$1.nix"
|
||||
cat << EOF > src-info-for-$1.nix
|
||||
{
|
||||
repoUrl = "$2";
|
||||
rev = "$3";
|
||||
baseName = "$4";
|
||||
method = "fetchgit";
|
||||
}
|
||||
EOF
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
#! /bin/sh
|
||||
|
||||
[ -z "$1" ] && {
|
||||
echo "Use $0 expression-basename download-page package-base-name"
|
||||
echo "Like:"
|
||||
echo "$0 default http://example.com/downloads hello"
|
||||
exit 1;
|
||||
} >&2
|
||||
|
||||
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
|
||||
|
||||
cp "$own_dir/../builder-defs/template-auto-callable.nix" "$1.nix"
|
||||
sed -e "s@src-for-default.nix@src-for-$1.nix@g" -i "$1.nix"
|
||||
echo '{}' > "src-for-$1.nix"
|
||||
cat << EOF > src-info-for-$1.nix
|
||||
{
|
||||
downloadPage = "$2";
|
||||
baseName = "$3";
|
||||
}
|
||||
EOF
|
|
@ -1,29 +0,0 @@
|
|||
Next to file.nix we get src-for-file.nix
|
||||
src-for-file.nix should evaluate to a flat attribute set with
|
||||
string values.
|
||||
It is supposed to be imported in the main expression.
|
||||
In the ideal world it can export url, hash, version.
|
||||
|
||||
src-for-file.nix generation is directed by
|
||||
src-info-for-file.nix.
|
||||
|
||||
Attributes:
|
||||
|
||||
src-info-for-file.nix:
|
||||
|
||||
downloadPage
|
||||
rev (for repos)
|
||||
baseName (default = unnamed-package)
|
||||
sourceRegexp (default = '.*[.]tar[.].*')
|
||||
choiceCommand (default = 'head -1')
|
||||
versionExtractorSedScript (default = 's/.*-([0-9.]+)[.].*/\1/')
|
||||
versionReferenceCreator (default = 's/-([0-9.]+)[.]/-${version}./')
|
||||
mirrorSedScript (default = none)
|
||||
|
||||
src-for-file.nix:
|
||||
|
||||
advertisedUrl (its match is the check for update presence)
|
||||
url
|
||||
hash
|
||||
version
|
||||
name
|
|
@ -1,14 +0,0 @@
|
|||
# sed scripts
|
||||
|
||||
#http://sourceforge.net/projects/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz/download
|
||||
#http://downloads.sourceforge.net/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz
|
||||
skipRedirectSF='s@sourceforge.net/projects@downloads.sourceforge.net/project@; s@/files@@; s@/download$@@;'
|
||||
extractReleaseSF='s@.*/([^/]+)/[^/]+@\1@'
|
||||
extractVersionSF='s@.*/[^/0-9]*([0-9].*)[.](tar|tgz|tbz2|zip).*@\1@'
|
||||
apacheMirror='s@http://www.apache.org/dist/@mirror://apache/@'
|
||||
skipRedirectApache='s@/dyn/closer.cgi[?]path=@/dist@'
|
||||
|
||||
replaceAllVersionOccurences() {
|
||||
echo s/"$version"/\${version}/g
|
||||
}
|
||||
dashDelimitedVersion='s/.*-([0-9.]+)-.*/\1/'
|
|
@ -1,13 +0,0 @@
|
|||
{
|
||||
a=1;
|
||||
b="text";
|
||||
c=''
|
||||
text
|
||||
'';
|
||||
d=''
|
||||
Multi-line text with special characters -
|
||||
like \ (backslash) and ''${} (dollar +
|
||||
curly braces) and $ (dollar) and ' (quote)
|
||||
and " (double quote).
|
||||
'';
|
||||
}
|
|
@ -1,182 +0,0 @@
|
|||
#! /bin/sh
|
||||
|
||||
set -x
|
||||
|
||||
own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
|
||||
|
||||
source "$own_dir/snippets.sh"
|
||||
|
||||
[ -z "$1" ] && {
|
||||
echo "Specify main expression filename."
|
||||
exit 1;
|
||||
}
|
||||
|
||||
main_file="$1"
|
||||
main_dir="$(cd "$(dirname "$main_file")" ; sh -c pwd)"
|
||||
file_name="$(basename "$main_file")"
|
||||
defs_file="$main_dir"/src-info-for-"$file_name"
|
||||
src_file="$main_dir"/src-for-"$file_name"
|
||||
# OK, [vcs] revert is always possible
|
||||
new_src_file="$main_dir"/src-for-"$file_name"
|
||||
|
||||
forcedUrl="$2"
|
||||
|
||||
defs_dir="$("$own_dir"/attrset-to-dir.sh "$defs_file")"
|
||||
src_defs_dir="$("$own_dir"/attrset-to-dir.sh "$src_file")"
|
||||
|
||||
getAttr () {
|
||||
file="$defs_dir"/"$1"
|
||||
data="$( ( [ -f "$file" ] && cat "$file" ) || echo "$2" )"
|
||||
echo "attribute $1 obtained as: [[$data]]" >&2
|
||||
echo "$data"
|
||||
}
|
||||
|
||||
method="$(getAttr method fetchurl)"
|
||||
baseName="$(getAttr baseName 'unnamed-package')"
|
||||
commonPrefetchVars=" version name hash"
|
||||
|
||||
prefetchClause=""
|
||||
[ fetchSFdirs = "$method" ] && {
|
||||
if [ -z "$forcedUrl" ]; then
|
||||
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
|
||||
eval "egrep '$(getAttr sourceRegexp '[-][0-9.]+/$')'" |
|
||||
eval "egrep -v '$(getAttr blacklistRegexp '^$')'" |
|
||||
eval "$(getAttr choiceCommand 'head -n 1')" |
|
||||
eval "$(getAttr versionToFileCommand "sed -re 's@/([^/]*-[0-9.]+)/@/\1/\1$(getAttr fileSuffix .tar.gz)@'")"
|
||||
)"
|
||||
|
||||
if ! egrep ':' <<< "$freshUrl" ; then
|
||||
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
|
||||
fi
|
||||
|
||||
echo "Found download link: $freshUrl" >&2
|
||||
else
|
||||
freshUrl="$forcedUrl"
|
||||
fi
|
||||
|
||||
freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
|
||||
echo "Sourceforge-corrected URL: $freshUrl" >&2
|
||||
|
||||
version="$(echo "$freshUrl" |
|
||||
sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
|
||||
baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
|
||||
url="$freshUrl"
|
||||
name="$baseName-$version"
|
||||
advertisedUrl="$freshUrl"
|
||||
|
||||
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
|
||||
echo "Source link not changed" >&2
|
||||
exit
|
||||
fi
|
||||
hash=$(nix-prefetch-url "$freshUrl")
|
||||
|
||||
prefetchVars="url advertisedUrl";
|
||||
}
|
||||
[ fetchSF = "$method" ] && {
|
||||
if [ -z "$forcedUrl" ]; then
|
||||
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
|
||||
eval "egrep \"$(getAttr sourceRegexp '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" |
|
||||
eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
|
||||
eval "$(getAttr choiceCommand 'head -1')")"
|
||||
|
||||
if ! egrep ':' <<< "$freshUrl" ; then
|
||||
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
|
||||
fi
|
||||
|
||||
echo "Found download link: $freshUrl" >&2
|
||||
else
|
||||
freshUrl="$forcedUrl"
|
||||
fi
|
||||
|
||||
freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
|
||||
echo "Sourceforge-corrected URL: $freshUrl" >&2
|
||||
|
||||
version="$(echo "$freshUrl" |
|
||||
sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
|
||||
baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
|
||||
url="$freshUrl"
|
||||
name="$baseName-$version"
|
||||
advertisedUrl="$freshUrl"
|
||||
|
||||
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
|
||||
echo "Source link not changed" >&2
|
||||
exit
|
||||
fi
|
||||
hash=$(nix-prefetch-url "$freshUrl")
|
||||
|
||||
prefetchVars="url advertisedUrl";
|
||||
}
|
||||
[ fetchurl = "$method" ] && {
|
||||
if [ -z "$forcedUrl" ] ; then
|
||||
freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
|
||||
eval "egrep \"$(getAttr sourceRegexp '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" |
|
||||
eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
|
||||
eval "$(getAttr choiceCommand 'head -1')")"
|
||||
|
||||
if ! egrep ':' <<< "$freshUrl" ; then
|
||||
freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
|
||||
fi
|
||||
|
||||
echo "Found download link: $freshUrl" >&2
|
||||
else
|
||||
freshUrl="$forcedUrl"
|
||||
fi
|
||||
|
||||
version="$(echo "$freshUrl" |
|
||||
eval "sed -re \"$(getAttr versionExtractorSedScript \
|
||||
's/.*-([0-9.]+)[.].*/\1/')\"")"
|
||||
|
||||
mirrorUrl="$(echo "$freshUrl" |
|
||||
eval "sed -r -e \"$(getAttr versionReferenceCreator \
|
||||
's/-'"${version}"'[.]/-\${version}./')\"" |
|
||||
eval "sed -r -e \"$(getAttr mirrorSedScript)\"")"
|
||||
url="$mirrorUrl"
|
||||
|
||||
name="$baseName-$version"
|
||||
|
||||
advertisedUrl="$freshUrl"
|
||||
url="$mirrorUrl"
|
||||
|
||||
if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
|
||||
echo "Source link not changed" >&2
|
||||
exit
|
||||
fi
|
||||
hash=$(nix-prefetch-url "$freshUrl")
|
||||
|
||||
prefetchVars="url advertisedUrl";
|
||||
}
|
||||
[ "fetchgit" = "$method" ] && {
|
||||
repoUrl="$(getAttr repoUrl)"
|
||||
export NIX_PREFETCH_GIT_CHECKOUT_HOOK="
|
||||
cat .git/HEAD
|
||||
"
|
||||
export NIX_HASH_ALGO=sha256
|
||||
rev="$(getAttr rev '')";
|
||||
rev_and_hash="$("$own_dir"/../fetchgit/nix-prefetch-git "$repoUrl" "$rev" | tee /dev/stderr | tail -2)"
|
||||
|
||||
rev="$(echo "$rev_and_hash" | head -1)"
|
||||
url="$repoUrl";
|
||||
hash="$(echo "$rev_and_hash" | tail -1)"
|
||||
version="$rev"
|
||||
name="$baseName-$version"
|
||||
|
||||
prefetchVars="rev url";
|
||||
}
|
||||
|
||||
prefetchAssignments="";
|
||||
for i in $commonPrefetchVars $prefetchVars; do
|
||||
prefetchAssignments="$prefetchAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n ')"
|
||||
done;
|
||||
|
||||
extraAssignments=""
|
||||
for i in $(getAttr extraVars ''); do
|
||||
eval "$(getAttr "eval_$i" 'i=""')"
|
||||
extraAssignments="$extraAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n ')"
|
||||
done
|
||||
|
||||
cat << EOF > "$new_src_file"
|
||||
rec {
|
||||
$prefetchAssignments
|
||||
$extraAssignments
|
||||
}
|
||||
EOF
|
|
@ -1,20 +0,0 @@
|
|||
SF_redirect () {
|
||||
redirect 99
|
||||
process 'http://[a-z]+[.]dl[.]sourceforge[.]net/' 'mirror://sourceforge/'
|
||||
process '[?].*' ''
|
||||
}
|
||||
|
||||
SF_version_dir () {
|
||||
version_link 'http://sourceforge.net/.+/'"$1"'[0-9.]+/$'
|
||||
}
|
||||
|
||||
SF_version_tarball () {
|
||||
version_link "${1:-[.]tar[.]}.*/download\$"
|
||||
}
|
||||
|
||||
GH_latest () {
|
||||
prefetch_command_rel ../fetchgit/nix-prefetch-git
|
||||
revision "$("$(dirname "$0")/urls-from-page.sh" "$CURRENT_URL/commits" | grep /commit/ | head -n 1 | xargs basename )"
|
||||
version '.*' "git-$(date +%Y-%m-%d)"
|
||||
NEED_TO_CHOOSE_URL=
|
||||
}
|
|
@ -1,320 +0,0 @@
|
|||
#! /bin/sh
|
||||
|
||||
own_dir="$(cd "$(dirname "$0")"; pwd)"
|
||||
|
||||
URL_WAS_SET=
|
||||
DL_URL_RE=
|
||||
CURRENT_URL=
|
||||
CURRENT_REV=
|
||||
PREFETCH_COMMAND=
|
||||
NEED_TO_CHOOSE_URL=1
|
||||
|
||||
url () {
|
||||
URL_WAS_SET=1
|
||||
CURRENT_URL="$1"
|
||||
}
|
||||
|
||||
dl_url_re () {
|
||||
DL_URL_RE="$1"
|
||||
}
|
||||
|
||||
version_unpack () {
|
||||
sed -re '
|
||||
s/[.]/ /g;
|
||||
s@/@ / @g
|
||||
s/-(rc|pre)/ -1 \1 /g;
|
||||
s/-(gamma)/ -2 \1 /g;
|
||||
s/-(beta)/ -3 \1 /g;
|
||||
s/-(alpha)/ -4 \1 /g;
|
||||
s/[-]/ - /g;
|
||||
'
|
||||
}
|
||||
|
||||
version_repack () {
|
||||
sed -re '
|
||||
s/ - /-/g;
|
||||
s/ -[0-9]+ ([a-z]+) /-\1/g;
|
||||
s@ / @/@g
|
||||
s/ /./g;
|
||||
'
|
||||
}
|
||||
|
||||
version_sort () {
|
||||
version_unpack |
|
||||
sort -t ' ' -n $(for i in $(seq 30); do echo " -k${i}n" ; done) | tac |
|
||||
version_repack
|
||||
}
|
||||
|
||||
position_choice () {
|
||||
head -n "${1:-1}" | tail -n "${2:-1}"
|
||||
}
|
||||
|
||||
matching_links () {
|
||||
"$own_dir"/urls-from-page.sh "$CURRENT_URL" | grep -E "$1"
|
||||
}
|
||||
|
||||
link () {
|
||||
CURRENT_URL="$(matching_links "$1" | position_choice "$2" "$3")"
|
||||
unset NEED_TO_CHOOSE_URL
|
||||
echo "Linked by: $*"
|
||||
echo "URL: $CURRENT_URL" >&2
|
||||
}
|
||||
|
||||
version_link () {
|
||||
CURRENT_URL="$(matching_links "$1" | version_sort | position_choice "$2" "$3")"
|
||||
unset NEED_TO_CHOOSE_URL
|
||||
echo "Linked version by: $*"
|
||||
echo "URL: $CURRENT_URL" >&2
|
||||
}
|
||||
|
||||
redirect () {
|
||||
CURRENT_URL="$(curl -I -L --max-redirs "${1:-99}" "$CURRENT_URL" |
|
||||
grep -E '^[Ll]ocation: ' | position_choice "${2:-999999}" "$3" |
|
||||
sed -e 's/^[Ll]ocation: //; s/\r//')"
|
||||
echo "Redirected: $*"
|
||||
echo "URL: $CURRENT_URL" >&2
|
||||
}
|
||||
|
||||
replace () {
|
||||
sed -re "s $1 $2 g"
|
||||
}
|
||||
|
||||
process () {
|
||||
CURRENT_URL="$(echo "$CURRENT_URL" | replace "$1" "$2")"
|
||||
echo "Processed: $*"
|
||||
echo "URL: $CURRENT_URL" >&2
|
||||
}
|
||||
|
||||
version () {
|
||||
CURRENT_VERSION="$(echo "$CURRENT_URL" | replace "$1" "$2")"
|
||||
echo "Version: $CURRENT_VERSION" >&2
|
||||
}
|
||||
|
||||
ensure_version () {
|
||||
echo "Ensuring version. CURRENT_VERSION: $CURRENT_VERSION" >&2
|
||||
[ -z "$CURRENT_VERSION" ] && version '.*-([0-9.]+)[-._].*' '\1'
|
||||
}
|
||||
|
||||
ensure_target () {
|
||||
echo "Ensuring target. CURRENT_TARGET: $CURRENT_TARGET" >&2
|
||||
[ -z "$CURRENT_TARGET" ] && target "$(basename "$CONFIG_NAME" .upstream).nix"
|
||||
}
|
||||
|
||||
ensure_name () {
|
||||
echo "Ensuring name. CURRENT_NAME: $CURRENT_NAME" >&2
|
||||
[ -z "$CURRENT_NAME" ] && name "$(basename "$CONFIG_DIR")"
|
||||
echo "Resulting name: $CURRENT_NAME"
|
||||
}
|
||||
|
||||
ensure_attribute_name () {
|
||||
echo "Ensuring attribute name. CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
|
||||
ensure_name
|
||||
[ -z "$CURRENT_ATTRIBUTE_NAME" ] && attribute_name "$CURRENT_NAME"
|
||||
echo "Resulting attribute name: $CURRENT_ATTRIBUTE_NAME"
|
||||
}
|
||||
|
||||
ensure_url () {
|
||||
echo "Ensuring starting URL. CURRENT_URL: $CURRENT_URL" >&2
|
||||
ensure_attribute_name
|
||||
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadPage)"
|
||||
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadpage)"
|
||||
[ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta homepage)"
|
||||
echo "Resulting URL: $CURRENT_URL"
|
||||
}
|
||||
|
||||
ensure_choice () {
|
||||
echo "Ensuring that choice is made." >&2
|
||||
echo "NEED_TO_CHOOSE_URL: [$NEED_TO_CHOOSE_URL]." >&2
|
||||
echo "CURRENT_URL: $CURRENT_URL" >&2
|
||||
[ -z "$URL_WAS_SET" ] && [ -z "$CURRENT_URL" ] && ensure_url
|
||||
[ -n "$NEED_TO_CHOOSE_URL" ] && {
|
||||
version_link "${DL_URL_RE:-[.]tar[.]([^./])+\$}"
|
||||
unset NEED_TO_CHOOSE_URL
|
||||
}
|
||||
[ -z "$CURRENT_URL" ] && {
|
||||
echo "Error: empty CURRENT_URL"
|
||||
echo "Error: empty CURRENT_URL" >&2
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
revision () {
|
||||
CURRENT_REV="$1"
|
||||
echo "CURRENT_REV: $CURRENT_REV"
|
||||
}
|
||||
|
||||
prefetch_command () {
|
||||
PREFETCH_COMMAND="$1"
|
||||
}
|
||||
|
||||
prefetch_command_rel () {
|
||||
PREFETCH_COMMAND="$(dirname "$0")/$1"
|
||||
}
|
||||
|
||||
ensure_hash () {
|
||||
echo "Ensuring hash. CURRENT_HASH: $CURRENT_HASH" >&2
|
||||
[ -z "$CURRENT_HASH" ] && hash
|
||||
}
|
||||
|
||||
hash () {
|
||||
CURRENT_HASH="$(${PREFETCH_COMMAND:-nix-prefetch-url} "$CURRENT_URL" $CURRENT_REV)"
|
||||
echo "CURRENT_HASH: $CURRENT_HASH" >&2
|
||||
}
|
||||
|
||||
name () {
|
||||
CURRENT_NAME="$1"
|
||||
echo "CURRENT_NAME: $CURRENT_NAME" >&2
|
||||
}
|
||||
|
||||
attribute_name () {
|
||||
CURRENT_ATTRIBUTE_NAME="$1"
|
||||
echo "CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
|
||||
}
|
||||
|
||||
retrieve_meta () {
|
||||
nix-instantiate --eval-only '<nixpkgs>' -A "$CURRENT_ATTRIBUTE_NAME".meta."$1" | xargs
|
||||
}
|
||||
|
||||
retrieve_version () {
|
||||
PACKAGED_VERSION="$(retrieve_meta version)"
|
||||
}
|
||||
|
||||
ensure_dl_url_re () {
|
||||
echo "Ensuring DL_URL_RE. DL_URL_RE: $DL_URL_RE" >&2
|
||||
[ -z "$DL_URL_RE" ] && dl_url_re "$(retrieve_meta downloadURLRegexp)"
|
||||
echo "DL_URL_RE: $DL_URL_RE" >&2
|
||||
}
|
||||
|
||||
directory_of () {
|
||||
cd "$(dirname "$1")"; pwd
|
||||
}
|
||||
|
||||
full_path () {
|
||||
echo "$(directory_of "$1")/$(basename "$1")"
|
||||
}
|
||||
|
||||
target () {
|
||||
CURRENT_TARGET="$1"
|
||||
{ [ "$CURRENT_TARGET" = "${CURRENT_TARGET#/}" ] && CURRENT_TARGET="$CONFIG_DIR/$CURRENT_TARGET"; }
|
||||
echo "Target set to: $CURRENT_TARGET"
|
||||
}
|
||||
|
||||
marker () {
|
||||
BEGIN_EXPRESSION="$1"
|
||||
}
|
||||
|
||||
update_found () {
|
||||
echo "Compare: $CURRENT_VERSION vs $PACKAGED_VERSION"
|
||||
[ "$CURRENT_VERSION" != "$PACKAGED_VERSION" ]
|
||||
}
|
||||
|
||||
do_write_expression () {
|
||||
echo "${1}rec {"
|
||||
echo "${1} baseName=\"$CURRENT_NAME\";"
|
||||
echo "${1} version=\"$CURRENT_VERSION\";"
|
||||
echo "${1} name=\"\${baseName}-\${version}\";"
|
||||
echo "${1} hash=\"$CURRENT_HASH\";"
|
||||
echo "${1} url=\"$CURRENT_URL\";"
|
||||
[ -n "$CURRENT_REV" ] && echo "${1} rev=\"$CURRENT_REV\";"
|
||||
echo "${1} sha256=\"$CURRENT_HASH\";"
|
||||
echo "$2"
|
||||
}
|
||||
|
||||
line_position () {
|
||||
file="$1"
|
||||
regexp="$2"
|
||||
count="${3:-1}"
|
||||
grep -E "$regexp" -m "$count" -B 999999 "$file" | wc -l
|
||||
}
|
||||
|
||||
replace_once () {
|
||||
file="$1"
|
||||
regexp="$2"
|
||||
replacement="$3"
|
||||
instance="${4:-1}"
|
||||
|
||||
echo "Replacing once:"
|
||||
echo "file: [[$file]]"
|
||||
echo "regexp: [[$regexp]]"
|
||||
echo "replacement: [[$replacement]]"
|
||||
echo "instance: [[$instance]]"
|
||||
|
||||
position="$(line_position "$file" "$regexp" "$instance")"
|
||||
sed -re "${position}s $regexp $replacement " -i "$file"
|
||||
}
|
||||
|
||||
set_var_value () {
|
||||
var="${1}"
|
||||
value="${2}"
|
||||
instance="${3:-1}"
|
||||
file="${4:-$CURRENT_TARGET}"
|
||||
no_quotes="${5:-0}"
|
||||
|
||||
quote='"'
|
||||
let "$no_quotes" && quote=""
|
||||
|
||||
replace_once "$file" "${var} *= *.*" "${var} = ${quote}${value}${quote};" "$instance"
|
||||
}
|
||||
|
||||
do_regenerate () {
|
||||
BEFORE="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -B 999999;)"
|
||||
AFTER_EXPANDED="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -A 999999 | grep -E '^ *[}] *; *$' -A 999999;)"
|
||||
AFTER="$(echo "$AFTER_EXPANDED" | tail -n +2)"
|
||||
CLOSE_BRACE="$(echo "$AFTER_EXPANDED" | head -n 1)"
|
||||
SPACING="$(echo "$CLOSE_BRACE" | sed -re 's/[^ ].*//')"
|
||||
|
||||
echo "$BEFORE"
|
||||
do_write_expression "$SPACING" "$CLOSE_BRACE"
|
||||
echo "$AFTER"
|
||||
}
|
||||
|
||||
do_overwrite () {
|
||||
ensure_hash
|
||||
do_regenerate "$1" > "$1.new.tmp"
|
||||
mv "$1.new.tmp" "$1"
|
||||
}
|
||||
|
||||
do_overwrite_just_version () {
|
||||
ensure_hash
|
||||
set_var_value version $CURRENT_VERSION
|
||||
set_var_value sha256 $CURRENT_HASH
|
||||
}
|
||||
|
||||
minimize_overwrite() {
|
||||
do_overwrite(){
|
||||
do_overwrite_just_version
|
||||
}
|
||||
}
|
||||
|
||||
process_config () {
|
||||
CONFIG_DIR="$(directory_of "$1")"
|
||||
CONFIG_NAME="$(basename "$1")"
|
||||
BEGIN_EXPRESSION='# Generated upstream information';
|
||||
if [ -f "$CONFIG_DIR/$CONFIG_NAME" ] &&
|
||||
[ "${CONFIG_NAME}" = "${CONFIG_NAME%.nix}" ]; then
|
||||
source "$CONFIG_DIR/$CONFIG_NAME"
|
||||
else
|
||||
CONFIG_NAME="${CONFIG_NAME%.nix}"
|
||||
ensure_attribute_name
|
||||
[ -n "$(retrieve_meta updateWalker)" ] ||
|
||||
[ -n "$FORCE_UPDATE_WALKER" ] || {
|
||||
echo "Error: package not marked as safe for update-walker" >&2
|
||||
echo "Set FORCE_UPDATE_WALKER=1 to override" >&2
|
||||
exit 1;
|
||||
}
|
||||
[ -z "$(retrieve_meta fullRegenerate)" ] && eval "
|
||||
minimize_overwrite
|
||||
"
|
||||
fi
|
||||
ensure_attribute_name
|
||||
retrieve_version
|
||||
ensure_dl_url_re
|
||||
ensure_choice
|
||||
ensure_version
|
||||
ensure_target
|
||||
update_found && do_overwrite "$CURRENT_TARGET"
|
||||
}
|
||||
|
||||
source "$own_dir/update-walker-service-specific.sh"
|
||||
|
||||
process_config "$1"
|
|
@ -1,118 +0,0 @@
|
|||
update-walker is an imperative semiautomated update helper.
|
||||
|
||||
It runs the X.upstream file to find the freshest version of the package in
|
||||
the specified upstream source and updates the corresponding X.nix file.
|
||||
|
||||
|
||||
|
||||
The simplest available commands:
|
||||
|
||||
url: set the upstream source list URL equal to $1; the default is
|
||||
meta.downloadPage with meta.homepage fallback
|
||||
|
||||
dl_url_re: set the regular expression used to select download links to $1; the
|
||||
default is meta.downloadURLRegexp or '[.]tar[.]([^./])+\$' if it is not set
|
||||
|
||||
target: specify target expression; default is to replace .upstream extension
|
||||
with .nix extension
|
||||
|
||||
name: specify the derivation name; default is the basename of the dirname
|
||||
of the .upstream file
|
||||
|
||||
attribute_name: specify the attribute name to evaluate for getting the current
|
||||
version from meta.version; default is to use the derivation name
|
||||
|
||||
minimize_overwrite: set config options that mean that only version= and
|
||||
sha256= have to be replaced; the default is to regenerate a full upstream
|
||||
description block with url, name, version, hash etc.
|
||||
|
||||
|
||||
|
||||
A lot of packages can be updated in a pseudo-declarative style using only
|
||||
the commands from the previous paragraph.
|
||||
|
||||
Some packages do not need any non-default settings, in these case just setting
|
||||
meta.updateWalker to true is enough, you can run update-walker directly on the
|
||||
.nix file afterwards. In this case minimize_overwrite it implied unless
|
||||
meta.fullRegenerate is set.
|
||||
|
||||
|
||||
|
||||
The packages that require more fine-grained control than the described options
|
||||
allow, you need to take into account the default control flow of the tool.
|
||||
|
||||
First, the definitions from update-walker script and additional definitions
|
||||
from update-walker-service-specific.sh are loaded. Then the config is executed
|
||||
as a shell script. Some of the commands it can use do remember whether they
|
||||
have been used. Afterwards the following steps happen:
|
||||
|
||||
attribute_name is set to name unless it has been already set
|
||||
|
||||
meta.version is read from the NixPkgs package called attribute_name
|
||||
|
||||
download URL regexp is set to default unless it has been already set in the
|
||||
updater script
|
||||
|
||||
the download page URL gets set to default value unless it has been set
|
||||
previously
|
||||
|
||||
if the action of getting the download page and choosing the freshest link by
|
||||
version has not yet been taken, it happens
|
||||
|
||||
if the version has not yet been extracted from the URL, it gets extracted
|
||||
|
||||
target nix expression to update gets set to the default value unless it has
|
||||
been set explicitly
|
||||
|
||||
if the URL version is fresher than the packaged version, the new file gets
|
||||
downloaded and its hash is calculated
|
||||
|
||||
do_overwrite function is called; the default calculates a big upstream data
|
||||
block and puts it after the '# Generated upstream information' marker (the
|
||||
marker can be changed by the command marker)
|
||||
|
||||
|
||||
|
||||
If the update needs some special logic, it is put into the updater script and
|
||||
the corresponding steps are skipped because the needed action has already been
|
||||
performed.
|
||||
|
||||
For example:
|
||||
|
||||
minimize_overwrite is exactly the same as
|
||||
|
||||
do_overwrite() { do_overwrite_just_version; }
|
||||
|
||||
redefinition. You can do a more complex do_overwrite redifinition, if needed.
|
||||
It can probably use ensure_hash to download the source and calculate the hash
|
||||
and set_var_value.
|
||||
|
||||
set_var_value alters the $3-th instance of assigning the $1 name in the
|
||||
expression to the value $2. $3 defaults to 1. It can modify $4 instead of the
|
||||
current target, it can put the value without quotes if $5 is 1.
|
||||
|
||||
|
||||
|
||||
Typical steps include:
|
||||
|
||||
ensure_choice: download current URL and find the freshest version link on the
|
||||
page, it is now the new URL
|
||||
|
||||
ensure_hash: download current URL and calculate the source package hash
|
||||
|
||||
ensure_version: extract version from the URL
|
||||
|
||||
SF_redirect: replace the current URL with a SourceForge.net mirror:// URL
|
||||
|
||||
SF_version_dir: assume SourceForge.net layout and choose the freshest
|
||||
version-named subdirectory in the file catalog; you can optionally specify $1
|
||||
as a directory name regexp (digits and periods will be required after it)
|
||||
|
||||
SF_version_tarball: assume SourceForge.net layout and choose the freshest
|
||||
tarball download link
|
||||
|
||||
version: apply replacement of $1 with $2 (extended regexp format) to extract
|
||||
the version from URL
|
||||
|
||||
version_link: choose the freshest versioned link, $1 is the regexp of
|
||||
acceptable links
|
|
@ -236,7 +236,6 @@ let
|
|||
name = str;
|
||||
version = str;
|
||||
tag = str;
|
||||
updateWalker = bool;
|
||||
executables = listOf str;
|
||||
outputsToInstall = listOf str;
|
||||
position = str;
|
||||
|
@ -244,7 +243,6 @@ let
|
|||
repositories = attrsOf str;
|
||||
isBuildPythonPackage = platforms;
|
||||
schedulingPriority = int;
|
||||
downloadURLRegexp = str;
|
||||
isFcitxEngine = bool;
|
||||
isIbusEngine = bool;
|
||||
isGutenprint = bool;
|
||||
|
|
Loading…
Reference in a new issue