2024-06-01 01:35:13 +02:00
|
|
|
import json
|
|
|
|
import subprocess
|
|
|
|
import itertools
|
|
|
|
import textwrap
|
|
|
|
from pathlib import Path
|
|
|
|
import tempfile
|
|
|
|
import hashlib
|
|
|
|
import datetime
|
2024-06-16 02:35:05 +02:00
|
|
|
|
2024-06-01 01:35:13 +02:00
|
|
|
from . import environment
|
2024-06-07 07:28:49 +02:00
|
|
|
from .environment import RelengEnvironment
|
2024-06-01 01:35:13 +02:00
|
|
|
from . import keys
|
2024-06-07 07:28:49 +02:00
|
|
|
from . import docker
|
2024-07-23 23:25:18 +02:00
|
|
|
from .version import VERSION, RELEASE_NAME, MAJOR, OFFICIAL_RELEASE
|
2024-06-09 09:50:40 +02:00
|
|
|
from .gitutils import verify_are_on_tag, git_preconditions
|
2024-06-16 02:35:05 +02:00
|
|
|
from . import release_notes
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
$RAISE_SUBPROC_ERROR = True
|
|
|
|
$XONSH_SHOW_TRACEBACK = True
|
|
|
|
|
|
|
|
GCROOTS_DIR = Path('./release/gcroots')
|
|
|
|
BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
|
|
|
|
DRVS_TXT = Path('./release/drvs.txt')
|
|
|
|
ARTIFACTS = Path('./release/artifacts')
|
2024-06-06 23:40:59 +02:00
|
|
|
MANUAL = Path('./release/manual')
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
RELENG_MSG = "Release created with releng/create_release.xsh"
|
|
|
|
|
|
|
|
BUILD_CORES = 16
|
|
|
|
MAX_JOBS = 2
|
|
|
|
|
|
|
|
|
2024-06-07 07:28:49 +02:00
|
|
|
def setup_creds(env: RelengEnvironment):
|
|
|
|
key = keys.get_ephemeral_key(env)
|
2024-06-01 01:35:13 +02:00
|
|
|
$AWS_SECRET_ACCESS_KEY = key.secret_key
|
|
|
|
$AWS_ACCESS_KEY_ID = key.id
|
|
|
|
$AWS_DEFAULT_REGION = 'garage'
|
|
|
|
$AWS_ENDPOINT_URL = environment.S3_ENDPOINT
|
|
|
|
|
|
|
|
|
|
|
|
def official_release_commit_tag(force_tag=False):
|
2024-07-23 23:25:18 +02:00
|
|
|
print('[+] Setting officialRelease in version.json and tagging')
|
2024-06-01 01:35:13 +02:00
|
|
|
prev_branch = $(git symbolic-ref --short HEAD).strip()
|
|
|
|
|
|
|
|
git switch --detach
|
2024-07-23 23:25:18 +02:00
|
|
|
|
|
|
|
# Must be done in two parts due to buffering (opening the file immediately
|
|
|
|
# would truncate it).
|
|
|
|
new_version_json = $(jq --indent 4 '.official_release = true' version.json)
|
|
|
|
with open('version.json', 'w') as fh:
|
|
|
|
fh.write(new_version_json)
|
|
|
|
git add version.json
|
|
|
|
|
2024-06-01 01:35:13 +02:00
|
|
|
message = f'release: {VERSION} "{RELEASE_NAME}"\n\nRelease produced with releng/create_release.xsh'
|
|
|
|
git commit -m @(message)
|
|
|
|
git tag @(['-f'] if force_tag else []) -a -m @(message) @(VERSION)
|
|
|
|
|
|
|
|
return prev_branch
|
|
|
|
|
|
|
|
|
|
|
|
def merge_to_release(prev_branch):
|
|
|
|
git switch @(prev_branch)
|
|
|
|
# Create a merge back into the release branch so that git tools understand
|
|
|
|
# that the release branch contains the tag, without the release commit
|
|
|
|
# actually influencing the tree.
|
|
|
|
merge_msg = textwrap.dedent("""\
|
|
|
|
release: merge release {VERSION} back to mainline
|
|
|
|
|
|
|
|
This merge commit returns to the previous state prior to the release but leaves the tag in the branch history.
|
|
|
|
{RELENG_MSG}
|
|
|
|
""").format(VERSION=VERSION, RELENG_MSG=RELENG_MSG)
|
|
|
|
git merge -m @(merge_msg) -s ours @(VERSION)
|
|
|
|
|
|
|
|
|
|
|
|
def realise(paths: list[str]):
|
|
|
|
args = [
|
|
|
|
'--realise',
|
|
|
|
'--max-jobs',
|
|
|
|
MAX_JOBS,
|
|
|
|
'--cores',
|
|
|
|
BUILD_CORES,
|
|
|
|
'--log-format',
|
|
|
|
'bar-with-logs',
|
|
|
|
'--add-root',
|
|
|
|
BUILT_GCROOTS_DIR
|
|
|
|
]
|
|
|
|
nix-store @(args) @(paths)
|
|
|
|
|
|
|
|
|
2024-06-13 23:03:27 +02:00
|
|
|
def eval_jobs(build_profile):
|
|
|
|
nej_output = $(nix-eval-jobs --workers 4 --gc-roots-dir @(GCROOTS_DIR) --force-recurse --flake f'.#release-jobs.{build_profile}')
|
|
|
|
return [json.loads(s) for s in nej_output.strip().split('\n')]
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
|
2024-06-07 07:28:49 +02:00
|
|
|
def upload_drv_paths_and_outputs(env: RelengEnvironment, paths: list[str]):
|
2024-06-01 01:35:13 +02:00
|
|
|
proc = subprocess.Popen([
|
|
|
|
'nix',
|
|
|
|
'copy',
|
|
|
|
'-v',
|
|
|
|
'--to',
|
2024-06-07 07:28:49 +02:00
|
|
|
env.cache_store_uri(),
|
2024-06-01 01:35:13 +02:00
|
|
|
'--stdin',
|
|
|
|
],
|
|
|
|
stdin=subprocess.PIPE,
|
|
|
|
env=__xonsh__.env.detype(),
|
|
|
|
)
|
|
|
|
|
|
|
|
proc.stdin.write('\n'.join(itertools.chain(paths, x + '^*' for x in paths)).encode())
|
|
|
|
proc.stdin.close()
|
|
|
|
rv = proc.wait()
|
|
|
|
if rv != 0:
|
|
|
|
raise subprocess.CalledProcessError(rv, proc.args)
|
|
|
|
|
|
|
|
|
2024-06-16 22:34:04 +02:00
|
|
|
def make_manifest(builds_by_system):
|
|
|
|
|
2024-06-01 01:35:13 +02:00
|
|
|
def manifest_line(system, out):
|
|
|
|
return f' {system} = "{out}";'
|
|
|
|
|
|
|
|
manifest_text = textwrap.dedent("""\
|
|
|
|
# This file was generated by releng/create_release.xsh in Lix
|
|
|
|
{{
|
2024-06-16 22:34:04 +02:00
|
|
|
{lines}
|
2024-06-01 01:35:13 +02:00
|
|
|
}}
|
2024-06-16 22:34:04 +02:00
|
|
|
""").format(lines='\n'.join(manifest_line(s, p) for (s, p) in builds_by_system.items()))
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
return manifest_text
|
|
|
|
|
|
|
|
|
|
|
|
def make_git_tarball(to: Path):
|
|
|
|
git archive --verbose --prefix=lix-@(VERSION)/ --format=tar.gz -o @(to) @(VERSION)
|
|
|
|
|
|
|
|
|
|
|
|
def confirm(prompt, expected):
|
|
|
|
resp = input(prompt)
|
|
|
|
|
|
|
|
if resp != expected:
|
|
|
|
raise ValueError('Unconfirmed')
|
|
|
|
|
|
|
|
|
|
|
|
def sha256_file(f: Path):
|
|
|
|
hasher = hashlib.sha256()
|
|
|
|
|
|
|
|
with open(f, 'rb') as h:
|
|
|
|
while data := h.read(1024 * 1024):
|
|
|
|
hasher.update(data)
|
|
|
|
|
|
|
|
return hasher.hexdigest()
|
|
|
|
|
|
|
|
|
2024-06-16 22:34:04 +02:00
|
|
|
def extract_builds_by_system(eval_result):
|
|
|
|
# This could be a dictionary comprehension, but we want to be absolutely
|
|
|
|
# sure we don't have duplicates.
|
|
|
|
ret = {}
|
|
|
|
for attr in eval_result:
|
|
|
|
if attr['attrPath'][0] != 'build':
|
|
|
|
continue
|
|
|
|
assert attr['system'] not in ret
|
|
|
|
ret[attr['system']] = attr['outputs']['out']
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2024-06-01 01:35:13 +02:00
|
|
|
def make_artifacts_dir(eval_result, d: Path):
|
|
|
|
d.mkdir(exist_ok=True, parents=True)
|
|
|
|
version_dir = d / 'lix' / f'lix-{VERSION}'
|
|
|
|
version_dir.mkdir(exist_ok=True, parents=True)
|
|
|
|
|
|
|
|
tarballs_drv = next(p for p in eval_result if p['attr'] == 'tarballs')
|
|
|
|
cp --no-preserve=mode -r @(tarballs_drv['outputs']['out'])/* @(version_dir)
|
|
|
|
|
2024-06-16 22:34:04 +02:00
|
|
|
builds_by_system = extract_builds_by_system(eval_result)
|
|
|
|
|
2024-06-01 01:35:13 +02:00
|
|
|
# FIXME: upgrade-nix searches for manifest.nix at root, which is rather annoying
|
|
|
|
with open(d / 'manifest.nix', 'w') as h:
|
2024-06-16 22:34:04 +02:00
|
|
|
h.write(make_manifest(builds_by_system))
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
with open(version_dir / 'manifest.nix', 'w') as h:
|
2024-06-16 22:34:04 +02:00
|
|
|
h.write(make_manifest(builds_by_system))
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
print('[+] Make sources tarball')
|
|
|
|
|
|
|
|
filename = f'lix-{VERSION}.tar.gz'
|
|
|
|
git_tarball = version_dir / filename
|
|
|
|
make_git_tarball(git_tarball)
|
|
|
|
|
|
|
|
file_hash = sha256_file(git_tarball)
|
|
|
|
|
|
|
|
print(f'Hash: {file_hash}')
|
|
|
|
with open(version_dir / f'{filename}.sha256', 'w') as h:
|
|
|
|
h.write(file_hash)
|
|
|
|
|
|
|
|
|
|
|
|
def prepare_release_notes():
|
2024-06-16 02:35:05 +02:00
|
|
|
rl_path = release_notes.build_release_notes_to_file()
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
commit_msg = textwrap.dedent("""\
|
|
|
|
release: release notes for {VERSION}
|
|
|
|
|
|
|
|
{RELENG_MSG}
|
|
|
|
""").format(VERSION=VERSION, RELENG_MSG=RELENG_MSG)
|
|
|
|
|
2024-06-16 02:35:05 +02:00
|
|
|
git add @(rl_path) @(release_notes.SUMMARY)
|
|
|
|
git rm --ignore-unmatch 'doc/manual/rl-next/*.md'
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
git commit -m @(commit_msg)
|
|
|
|
|
|
|
|
|
2024-06-07 07:28:49 +02:00
|
|
|
def upload_artifacts(env: RelengEnvironment, noconfirm=False, no_check_git=False, force_push_tag=False):
|
|
|
|
if not no_check_git:
|
|
|
|
verify_are_on_tag()
|
|
|
|
git_preconditions()
|
2024-06-01 01:35:13 +02:00
|
|
|
assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env
|
|
|
|
|
|
|
|
tree @(ARTIFACTS)
|
|
|
|
|
2024-06-09 10:26:21 +02:00
|
|
|
env_part = f'environment {env.name}'
|
2024-06-01 01:35:13 +02:00
|
|
|
not noconfirm and confirm(
|
2024-06-09 10:26:21 +02:00
|
|
|
f'Would you like to release {ARTIFACTS} as {VERSION} in {env.colour(env_part)}? Type "I want to release this to {env.name}" to confirm\n',
|
|
|
|
f'I want to release this to {env.name}'
|
2024-06-01 01:35:13 +02:00
|
|
|
)
|
|
|
|
|
2024-06-09 09:27:06 +02:00
|
|
|
docker_images = list((ARTIFACTS / f'lix/lix-{VERSION}').glob(f'lix-{VERSION}-docker-image-*.tar.gz'))
|
|
|
|
assert docker_images
|
|
|
|
|
2024-06-01 01:35:13 +02:00
|
|
|
print('[+] Upload to cache')
|
|
|
|
with open(DRVS_TXT) as fh:
|
2024-06-07 07:28:49 +02:00
|
|
|
upload_drv_paths_and_outputs(env, [x.strip() for x in fh.readlines() if x])
|
2024-06-01 01:35:13 +02:00
|
|
|
|
2024-06-07 07:28:49 +02:00
|
|
|
print('[+] Upload docker images')
|
2024-06-09 09:27:06 +02:00
|
|
|
for target in env.docker_targets:
|
|
|
|
docker.upload_docker_images(target, docker_images)
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
print('[+] Upload to release bucket')
|
2024-06-07 07:28:49 +02:00
|
|
|
aws s3 cp --recursive @(ARTIFACTS)/ @(env.releases_bucket)/
|
2024-06-06 23:40:59 +02:00
|
|
|
print('[+] Upload manual')
|
2024-06-07 07:28:49 +02:00
|
|
|
upload_manual(env)
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
print('[+] git push tag')
|
2024-06-07 07:28:49 +02:00
|
|
|
git push @(['-f'] if force_push_tag else []) @(env.git_repo) f'{VERSION}:refs/tags/{VERSION}'
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
def do_tag_merge(force_tag=False, no_check_git=False):
|
|
|
|
if not no_check_git:
|
|
|
|
git_preconditions()
|
|
|
|
prev_branch = official_release_commit_tag(force_tag=force_tag)
|
|
|
|
merge_to_release(prev_branch)
|
|
|
|
git switch --detach @(VERSION)
|
|
|
|
|
|
|
|
|
2024-06-06 23:40:59 +02:00
|
|
|
def build_manual(eval_result):
|
2024-06-12 05:42:50 +02:00
|
|
|
(drv, manual) = next((x['drvPath'], x['outputs']['doc']) for x in eval_result if x['attr'] == 'build.x86_64-linux')
|
2024-06-06 23:40:59 +02:00
|
|
|
print('[+] Building manual')
|
2024-06-12 05:42:50 +02:00
|
|
|
realise([drv])
|
2024-06-06 23:40:59 +02:00
|
|
|
|
2024-06-12 05:42:50 +02:00
|
|
|
cp --no-preserve=mode -T -vr @(manual)/share/doc/nix/manual @(MANUAL)
|
2024-06-06 23:40:59 +02:00
|
|
|
|
|
|
|
|
2024-06-07 07:28:49 +02:00
|
|
|
def upload_manual(env: RelengEnvironment):
|
2024-07-23 23:25:18 +02:00
|
|
|
if OFFICIAL_RELEASE:
|
2024-06-06 23:40:59 +02:00
|
|
|
version = MAJOR
|
|
|
|
else:
|
|
|
|
version = 'nightly'
|
|
|
|
|
|
|
|
print('[+] aws s3 sync manual')
|
2024-06-07 07:28:49 +02:00
|
|
|
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/@(version)/
|
2024-07-23 23:25:18 +02:00
|
|
|
if OFFICIAL_RELEASE:
|
2024-06-07 07:28:49 +02:00
|
|
|
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/stable/
|
2024-06-06 23:40:59 +02:00
|
|
|
|
|
|
|
|
2024-06-13 23:03:27 +02:00
|
|
|
def build_artifacts(build_profile, no_check_git=False):
|
2024-06-06 23:40:59 +02:00
|
|
|
rm -rf release/
|
2024-06-01 01:35:13 +02:00
|
|
|
if not no_check_git:
|
|
|
|
verify_are_on_tag()
|
|
|
|
git_preconditions()
|
|
|
|
|
|
|
|
print('[+] Evaluating')
|
2024-06-13 23:03:27 +02:00
|
|
|
eval_result = eval_jobs(build_profile)
|
2024-06-01 01:35:13 +02:00
|
|
|
drv_paths = [x['drvPath'] for x in eval_result]
|
|
|
|
|
|
|
|
print('[+] Building')
|
|
|
|
realise(drv_paths)
|
2024-06-06 23:40:59 +02:00
|
|
|
build_manual(eval_result)
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
with open(DRVS_TXT, 'w') as fh:
|
2024-06-09 09:27:06 +02:00
|
|
|
# don't bother putting the release tarballs themselves because they are duplicate and huge
|
|
|
|
fh.write('\n'.join(x['drvPath'] for x in eval_result if x['attr'] != 'lix-release-tarballs'))
|
2024-06-01 01:35:13 +02:00
|
|
|
|
|
|
|
make_artifacts_dir(eval_result, ARTIFACTS)
|
|
|
|
print(f'[+] Done! See {ARTIFACTS}')
|