Skip to content

Commit

Permalink
Test artifacts not uploaded to the correct bucket (kubeflow#4321)
Browse files Browse the repository at this point in the history
* Fix test artifacts not uploaded to the correct bucket and group tests

* We should be using the kubernetes-jenkins bucket not a bucket in kubeflow-ci
  because spyglass and testgrid are still using the kubernetes-jenkins bucket.

* kubeflow/testing#488 needs to be submitted first before this will work.

* Related to kubeflow/kfctl#58

* We want all tests run as part of a particular workflow to be grouped together..

  * So we have kfctl_e2e_workflow.py set an environment variable
    which tests can then use to set the xmlattribute name

  * run_e2e_workflow.py will pass an argument that sets this to the name
    in the prow_config file

* Address comments.
  • Loading branch information
jlewi authored and k8s-ci-robot committed Oct 21, 2019
1 parent 32be850 commit 7727e57
Show file tree
Hide file tree
Showing 8 changed files with 64 additions and 18 deletions.
11 changes: 10 additions & 1 deletion kubeflow/jupyter/tests/jupyter_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,16 @@ def send_request(*args, **kwargs):
return r


def test_jupyter(env, namespace):
def test_jupyter(record_xml_attribute, env, namespace):
"""Test the jupyter notebook.
Args:
record_xml_attribute: Test fixture provided by pytest.
env: ksonnet environment.
namespace: namespace to run in.
"""
util.set_pytest_junit(record_xml_attribute, "jupyter_test")

app_credentials = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
if app_credentials:
logging.info("Activate service account")
Expand Down
43 changes: 33 additions & 10 deletions py/kubeflow/kubeflow/ci/kfctl_e2e_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,18 @@
TEMPLATE_LABEL = "kfctl_e2e"

MAIN_REPO = "kubeflow/kubeflow"

EXTRA_REPOS = ["kubeflow/testing@HEAD", "kubeflow/tf-operator@HEAD"]

class Builder:
def __init__(self, name=None, namespace=None,
config_path=("https://raw.githubusercontent.com/kubeflow"
"/manifests/master/kfdef/kfctl_gcp_iap.yaml"),
bucket="kubeflow-ci_temp",
bucket=None,
test_endpoint=False,
use_basic_auth=False,
build_and_apply=False,
test_target_name=None,
kf_app_name=None, delete_kf=True,
**kwargs):
"""Initialize a builder.
Expand All @@ -82,10 +84,12 @@ def __init__(self, name=None, namespace=None,
name: Name for the workflow.
namespace: Namespace for the workflow.
config_path: Path to the KFDef spec file.
bucket: The bucket to upload artifacts to.
bucket: The bucket to upload artifacts to. If not set use default determined by prow_artifacts.py.
test_endpoint: Whether to test the endpoint is ready. Should only
be true for IAP.
use_basic_auth: Whether to use basic_auth.
test_target_name: (Optional) Name to use as the test target to group
tests.
kf_app_name: (Optional) Name to use for the Kubeflow deployment.
If not set a unique name is assigned. Only set this if you want to
reuse an existing deployment across runs.
Expand Down Expand Up @@ -150,6 +154,20 @@ def __init__(self, name=None, namespace=None,
# files unique names.
self.config_name = os.path.splitext(os.path.basename(config_path))[0]

# The class name to label junit files.
# We want to be able to group related tests in test grid.
# Test grid allows grouping by target which corresponds to the classname
# attribute in junit files.
# So we set an environment variable to the desired class name.
# The pytest modules can then look at this environment variable to
# explicitly override the classname.
# The classname should be unique for each run so it should take into
# account the different parameters
if test_target_name:
self.test_target_name = test_target_name
else:
self.test_target_name = self.config_name

# app_name is the name of the Kubeflow deployment.
# This needs to be unique per run since we name GCP resources with it.
self.app_name = kf_app_name
Expand Down Expand Up @@ -243,7 +261,9 @@ def _build_task_template(self):
'container': {'command': [],
'env': [
{"name": "GOOGLE_APPLICATION_CREDENTIALS",
"value": "/secret/gcp-credentials/key.json"}
"value": "/secret/gcp-credentials/key.json"},
{"name": "TEST_TARGET_NAME",
"value": self.test_target_name},
],
'image': 'gcr.io/kubeflow-ci/test-worker:latest',
'imagePullPolicy': 'Always',
Expand Down Expand Up @@ -302,6 +322,7 @@ def _build_tests_dag(self):

#***************************************************************************
# Test TFJob
job_name = self.config_name.replace("_", "-")
step_name = "tfjob-test"
command = [
"python",
Expand All @@ -311,7 +332,7 @@ def _build_tests_dag(self):
"--tfjob_version=v1",
# Name is used for the test case name so it should be unique across
# all E2E tests.
"--params=name=smoke-tfjob-" + self.config_name + ",namespace=" +
"--params=name=smoke-tfjob-" + job_name + ",namespace=" +
self.steps_namespace,
"--artifacts_path=" + self.artifacts_dir,
# Skip GPU tests
Expand Down Expand Up @@ -436,9 +457,10 @@ def _build_exit_dag(self):
"kubeflow.testing.prow_artifacts",
"--artifacts_dir=" +
self.output_dir,
"copy_artifacts",
"--bucket=" + self.bucket,
"--suffix=fakesuffix",]
"copy_artifacts"]

if self.bucket:
command = append("--bucket=" + self.bucket)

dependences = []
if self.delete_kf:
Expand Down Expand Up @@ -595,9 +617,10 @@ def build(self):
"-m",
"kubeflow.testing.prow_artifacts",
"--artifacts_dir=" + self.output_dir,
"create_pr_symlink",
"--bucket=" + self.bucket,
]
"create_pr_symlink"]

if self.bucket:
command.append(self.bucket)

dependences = [checkout["name"]]
symlink = self._build_step(step_name, self.workflow, E2E_DAG_NAME, task_template,
Expand Down
4 changes: 3 additions & 1 deletion py/kubeflow/kubeflow/ci/kfctl_go_build_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@
from kubeflow.testing import util
import kfctl_go_test_utils as kfctl_util

def test_build_kfctl_go():
def test_build_kfctl_go(record_xml_attribute):
"""Test building of kfctl go.
"""
util.set_pytest_junit(record_xml_attribute, "test_build_kfctl_go")

# Need to activate account for scopes.
if os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
util.run([
Expand Down
5 changes: 4 additions & 1 deletion py/kubeflow/kubeflow/ci/kfctl_go_deploy_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,16 @@
import kfctl_go_test_utils as kfctl_util
from kubeflow.testing import util

def test_deploy_kfctl_go(app_path, project, use_basic_auth, use_istio, config_path):
def test_deploy_kfctl_go(record_xml_attribute, app_path, project,
use_basic_auth, use_istio, config_path):
"""Test deploying Kubeflow.
Args:
app_path: The path to the Kubeflow app.
project: The GCP project to use.
"""
util.set_pytest_junit(record_xml_attribute, "test_deploy_kfctl_go")

# Need to activate account for scopes.
if os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
util.run([
Expand Down
4 changes: 3 additions & 1 deletion testing/kfctl/endpoint_ready_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,15 @@
# We shouldn't need it to feel confident that kfctl is working.
@pytest.mark.skipif(os.getenv("JOB_TYPE") == "presubmit",
reason="test endpoint doesn't run in presubmits")
def test_endpoint_is_ready(project, app_name):
def test_endpoint_is_ready(record_xml_attribute, project, app_name):
"""Test that Kubeflow was successfully deployed.
Args:
project: The gcp project that we deployed kubeflow
app_name: The name of the kubeflow deployment
"""
util.set_pytest_junit(record_xml_attribute, "test_endpoint_is_ready")

# Owned by project kubeflow-ci-deployment.
os.environ["CLIENT_ID"] = "29647740582-7meo6c7a9a76jvg54j0g2lv8lrsb4l8g.apps.googleusercontent.com"
if not gcp_util.endpoint_is_ready(
Expand Down
5 changes: 3 additions & 2 deletions testing/kfctl/kf_is_ready_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,15 @@ def set_logging():
)
logging.getLogger().setLevel(logging.INFO)

def test_kf_is_ready(namespace, use_basic_auth, use_istio, app_path):
def test_kf_is_ready(record_xml_attribute, namespace, use_basic_auth, use_istio,
app_path):
"""Test that Kubeflow was successfully deployed.
Args:
namespace: The namespace Kubeflow is deployed to.
"""
set_logging()
logging.info("Using namespace %s", namespace)
util.set_pytest_junit(record_xml_attribute, "test_kf_is_ready")

# Need to activate account for scopes.
if os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
Expand Down
4 changes: 3 additions & 1 deletion testing/kfctl/kfctl_delete_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ def get_endpoints_list(project):
# and more importantly failures block upload of GCS artifacts so for now we mark
# it as expected to fail.
@pytest.mark.xfail
def test_kfctl_delete(kfctl_path, app_path, project, cluster_deletion_script):
def test_kfctl_delete(record_xml_attribute, kfctl_path, app_path, project,
cluster_deletion_script):
util.set_pytest_junit(record_xml_attribute, "test_kfctl_delete")

# TODO(yanniszark): split this into a separate workflow step
if cluster_deletion_script:
Expand Down
6 changes: 5 additions & 1 deletion testing/kfctl/kfctl_go_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@
from kubeflow.kubeflow.ci import kfctl_go_test_utils as kfctl_util
from kubeflow.testing import util

def test_build_kfctl_go(app_path, project, use_basic_auth, use_istio, config_path, build_and_apply, cluster_creation_script):
def test_build_kfctl_go(record_xml_attribute, app_path, project, use_basic_auth,
use_istio, config_path, build_and_apply,
cluster_creation_script):
"""Test building and deploying Kubeflow.
Args:
Expand All @@ -18,6 +20,8 @@ def test_build_kfctl_go(app_path, project, use_basic_auth, use_istio, config_pat
cluster_creation_script: script invoked to create a new cluster
build_and_apply: whether to build and apply or apply
"""
util.set_pytest_junit(record_xml_attribute, "test_build_kfctl_go")

# Need to activate account for scopes.
if os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
util.run([
Expand Down

0 comments on commit 7727e57

Please sign in to comment.