Skip to content

Commit

Permalink
[doc][build/2] upload doc build artifacts (ray-project#46749)
Browse files Browse the repository at this point in the history
Upload the doc build generated artifact to s3.

Test:
- CI
- aws s3 cp
s3://ray-ci-results/doc_build/6b33c4752fa83d36b1fb7248177d16bdc3fe22ae.tgz
.

Signed-off-by: can <[email protected]>
  • Loading branch information
can-anyscale authored Jul 23, 2024
1 parent 54e314f commit 19f13e5
Showing 1 changed file with 43 additions and 6 deletions.
49 changes: 43 additions & 6 deletions ci/ray_ci/doc/cmd_build.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
import subprocess
import os

import boto3
import click

from ci.ray_ci.utils import logger
from ci.ray_ci.utils import logger, ci_init
from ray_release.util import get_write_state_machine_aws_bucket


AWS_CACHE_KEY = "doc_build"


@click.command()
Expand All @@ -14,11 +20,19 @@ def main(ray_checkout_dir: str) -> None:
"""
This script builds ray doc and upload build artifacts to S3.
"""
ci_init()

logger.info("Building ray doc.")
_build(ray_checkout_dir)

if os.environ.get("BUILDKITE_BRANCH") != "master":
logger.info(
"Not uploading build artifacts to S3 because this is not the master branch."
)
return

logger.info("Uploading build artifacts to S3.")
_upload_build_artifacts()
_upload_build_artifacts(ray_checkout_dir)

return

Expand All @@ -36,13 +50,36 @@ def _build(ray_checkout_dir):
)


def _upload_build_artifacts():
def _upload_build_artifacts(ray_checkout_dir):
"""
Upload the build artifacts to S3.
TODO(can): to be implemented
"""
pass
# Get the list of the doc-generated files
subprocess.run(
["git", "config", "--global", "--add", "safe.directory", ray_checkout_dir],
check=True,
)
doc_generated_files = subprocess.check_output(
["git", "ls-files", "doc", "--others", "-z"],
cwd=ray_checkout_dir,
)

# Create a tarball of the doc-generated files
doc_tarball = f'{os.environ["BUILDKITE_COMMIT"]}.tgz'
with subprocess.Popen(
["tar", "-cvzf", doc_tarball, "--null", "-T", "-"],
stdin=subprocess.PIPE,
cwd=ray_checkout_dir,
) as proc:
proc.communicate(input=doc_generated_files)

# Upload the tarball to S3
boto3.client("s3").upload_file(
os.path.join(ray_checkout_dir, doc_tarball),
get_write_state_machine_aws_bucket(),
f"{AWS_CACHE_KEY}/{doc_tarball}",
)
logger.info(f"Successfully uploaded {doc_tarball} to S3.")


if __name__ == "__main__":
Expand Down

0 comments on commit 19f13e5

Please sign in to comment.