Skip to content

Commit

Permalink
Add a release tag backfill script. (pantsbuild#17806)
Browse files Browse the repository at this point in the history
The new script was used to successfully backfill all Pants release tag mappings
to S3 and the release workflow is fixed to scope its sync to just tags and the
underlying helper deploy_to_s3.py code to skip index.html generation when there
is no data to generate it from.
  • Loading branch information
jsirois authored Dec 14, 2022
1 parent 2d8f1e3 commit a837b35
Show file tree
Hide file tree
Showing 5 changed files with 100 additions and 19 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
if: github.event_name == 'push' || github.event_name == 'workflow_dispatch'
name: Deploy to S3
run: ./build-support/bin/deploy_to_s3.py
run: ./build-support/bin/deploy_to_s3.py --scope tags/pantsbuild.pants
name: Record Release Commit
'on':
push:
Expand Down
39 changes: 39 additions & 0 deletions build-support/bin/backfill_s3_release_tag_mappings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

import argparse
import subprocess
from pathlib import Path

from deploy_to_s3 import perform_deploy


def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--aws-cli-symlink-path",
help=(
"The directory (on the $PATH) to symlink the `aws` cli binary into; by default a"
"standard PATH entry appropriate to the current operating system."
),
)
options = parser.parse_args()

tags_deploy_dir = Path("dist/deploy/tags/pantsbuild.pants")
tags_deploy_dir.mkdir(parents=True, exist_ok=False)

release_tags = subprocess.run(
["git", "tag", "--list", "release_*"], stdout=subprocess.PIPE, text=True, check=True
).stdout.splitlines()
for release_tag in release_tags:
tag = release_tag.strip()
commit = subprocess.run(
["git", "rev-parse", f"{tag}^{{commit}}"], stdout=subprocess.PIPE, text=True, check=True
).stdout.strip()
(tags_deploy_dir / tag).write_text(commit)

perform_deploy(aws_cli_symlink_path=options.aws_cli_symlink_path, scope="tags/pantsbuild.pants")


if __name__ == "__main__":
main()
56 changes: 47 additions & 9 deletions build-support/bin/deploy_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

from __future__ import annotations

import argparse
import os
import shutil
import subprocess
Expand All @@ -10,14 +13,40 @@


def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument(
"--scope",
help=(
"The subdirectory of dist/deploy to deploy to S3; by default, everything under that "
"directory."
),
)
options = parser.parse_args()
perform_deploy(aws_cli_symlink_path=options.aws_cli_symlink_path, scope=options.scope)


def perform_deploy(*, aws_cli_symlink_path: str | None = None, scope: str | None = None):
"""Deploy the contents of dist/deploy to S3.
The `aws` CLI app will be installed if needed and will be symlinked into the system standard
$PATH unless `aws_cli_symlink_path` is provided, in which case it will be symlinked into that
directory.
The full contents of the local dist/deploy directory will be synced to The S3 bucket mounted at
https://binaries.pantsbuild.org unless a scope is provided, in which case just that subdirectory
of dist/deploy will be synced to the corresponding "path" under https://binaries.pantsbuild.org.
"""
if shutil.which("aws") is None:
install_aws_cli()
install_aws_cli(symlink_path=aws_cli_symlink_path)
validate_authentication()
deploy()
deploy(scope=scope)


def install_aws_cli() -> None:
subprocess.run(["./build-support/bin/install_aws_cli_for_ci.sh"], check=True)
def install_aws_cli(symlink_path: str | None = None) -> None:
env = {"AWS_CLI_SYMLINK_PATH": symlink_path} if symlink_path else {}
subprocess.run(
["./build-support/bin/install_aws_cli.sh"], env={**os.environ, **env}, check=True
)


def validate_authentication() -> None:
Expand All @@ -29,9 +58,16 @@ def validate_authentication() -> None:
die(f"Must set {secret_access_key}.")


def deploy() -> None:
def deploy(scope: str | None = None) -> None:
# NB: we use the sync command to avoid transferring files that have not changed. See
# https://github.com/pantsbuild/pants/issues/7258.

local_path = "dist/deploy"
s3_dest = "s3://binaries.pantsbuild.org"
if scope:
local_path = f"{local_path}/{scope}"
s3_dest = f"{s3_dest}/{scope}"

subprocess.run(
[
"aws",
Expand All @@ -45,16 +81,18 @@ def deploy() -> None:
"--no-progress",
"--acl",
"public-read",
"dist/deploy",
"s3://binaries.pantsbuild.org",
str(local_path),
s3_dest,
],
check=True,
)

# Create/update the index file in S3. After running on both the MacOS and Linux shards
# the index file will contain the wheels for both.
for sha in os.listdir("dist/deploy/wheels/pantsbuild.pants/"):
subprocess.run(["./build-support/bin/create_s3_index_file.sh", sha])
wheels_dir = "dist/deploy/wheels/pantsbuild.pants"
if os.path.isdir(wheels_dir):
for sha in os.listdir(wheels_dir):
subprocess.run(["./build-support/bin/create_s3_index_file.sh", sha])


if __name__ == "__main__":
Expand Down
10 changes: 7 additions & 3 deletions build-support/bin/generate_github_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,10 +240,13 @@ def install_go() -> Step:
}


def deploy_to_s3(when: str = "github.event_name == 'push'") -> Step:
def deploy_to_s3(when: str = "github.event_name == 'push'", scope: str | None = None) -> Step:
run = "./build-support/bin/deploy_to_s3.py"
if scope:
run = f"{run} --scope {scope}"
return {
"name": "Deploy to S3",
"run": "./build-support/bin/deploy_to_s3.py",
"run": run,
"if": when,
"env": {
"AWS_SECRET_ACCESS_KEY": f"{gha_expr('secrets.AWS_SECRET_ACCESS_KEY')}",
Expand Down Expand Up @@ -851,7 +854,8 @@ def release_jobs_and_inputs() -> tuple[Jobs, dict[str, Any]]:
),
},
deploy_to_s3(
when="github.event_name == 'push' || github.event_name == 'workflow_dispatch'"
when="github.event_name == 'push' || github.event_name == 'workflow_dispatch'",
scope="tags/pantsbuild.pants",
),
],
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

set -euo pipefail

# Install the AWS CLI in CI jobs.
# Install the AWS CLI.

# This is the fastest, most reliable way to install the AWS CLI on Linux and, particularly, MacOS.
# Using pip is broken on some systems, and package managers (e.g., brew) must be updated prior
Expand All @@ -22,17 +22,17 @@ if [[ ! -x "${AWS_CLI_BIN}" ]]; then

curl --fail "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" -o "awscli-bundle.zip"
unzip awscli-bundle.zip
# NB: We must run this with python3 because it defaults to `python`, which refers to Python 2 in Linux GitHub
# Actions CI job and is no longer supported.
# NB: We must run this with python3 because it defaults to `python`, which refers to Python 2 in
# Linux GitHub Actions CI job and is no longer supported.
python3 ./awscli-bundle/install --install-dir "${AWS_CLI_ROOT}"

popd

fi

# We symlink so that `aws` is discoverable on the $PATH. Our Docker image does not have `sudo`, whereas
# we need it for macOS to symlink into /usr/local/bin.
symlink="/usr/local/bin/aws"
# We symlink so that `aws` is discoverable on the $PATH. Our Docker image does not have `sudo`,
# whereas we need it for macOS to symlink into /usr/local/bin.
symlink="${AWS_CLI_SYMLINK_PATH:-/usr/local/bin/}"
if [[ ! -L "${symlink}" ]]; then
case "$(uname)" in
"Darwin")
Expand Down

0 comments on commit a837b35

Please sign in to comment.