From cc998038b6b5b1b9692ad579f8b9b0b32f1ae658 Mon Sep 17 00:00:00 2001 From: Nikhil Kulkarni Date: Thu, 24 Jun 2021 02:29:56 -0700 Subject: [PATCH] Remove instances.yaml, address commends and unskip functions --- .gitignore | 5 +++++ docker/Dockerfile.dev | 2 +- test/benchmark/instances.yaml | 4 ---- test/benchmark/tests/conftest.py | 2 +- test/benchmark/tests/utils/__init__.py | 6 +++--- test/benchmark/tests/utils/apache_bench.py | 2 +- 6 files changed, 11 insertions(+), 10 deletions(-) delete mode 100644 test/benchmark/instances.yaml diff --git a/.gitignore b/.gitignore index c91ff4c82f..550ce2ab59 100644 --- a/.gitignore +++ b/.gitignore @@ -20,5 +20,10 @@ test/model_store/ test/ts_console.log test/config.properties + .vscode .scratch/ + +# Custom benchmark artifacts +instances.yaml +instances.yaml.backup diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev index 5d97752d0c..597e6f4cad 100644 --- a/docker/Dockerfile.dev +++ b/docker/Dockerfile.dev @@ -52,7 +52,7 @@ ARG CUDA_VERSION RUN if [ "$MACHINE_TYPE" = "gpu" ]; then export USE_CUDA=1; fi \ && git clone https://github.com/pytorch/serve.git \ && cd serve \ - && git checkout --track origin/release_0.4.0 \ + && git checkout --track ${BRANCH_NAME} \ && if [ -z "$CUDA_VERSION" ]; then python ts_scripts/install_dependencies.py --environment=dev; else python ts_scripts/install_dependencies.py --environment=dev --cuda $CUDA_VERSION; fi \ && python ts_scripts/install_from_src.py \ && useradd -m model-server \ diff --git a/test/benchmark/instances.yaml b/test/benchmark/instances.yaml deleted file mode 100644 index 47b2ae0b36..0000000000 --- a/test/benchmark/instances.yaml +++ /dev/null @@ -1,4 +0,0 @@ -test_vgg16_benchmark: - inf1.6xlarge: - instance_id: i-04b6adea9c066ad0f - key_filename: /Users/nikhilsk/nskool/serve/test/benchmark/ec2-key-name-6028.pem diff --git a/test/benchmark/tests/conftest.py b/test/benchmark/tests/conftest.py index 85b8fa5481..934af7dfa1 100644 --- a/test/benchmark/tests/conftest.py +++ b/test/benchmark/tests/conftest.py @@ -53,7 +53,7 @@ def pytest_addoption(parser): ) -#@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=True) def build_docker_container(request, docker_dev_image_config_path): LOGGER.info(f"Setting up docker image to be used") docker_config = YamlHandler.load_yaml(docker_dev_image_config_path) diff --git a/test/benchmark/tests/utils/__init__.py b/test/benchmark/tests/utils/__init__.py index 80d86335ca..f5a634ba03 100644 --- a/test/benchmark/tests/utils/__init__.py +++ b/test/benchmark/tests/utils/__init__.py @@ -18,7 +18,7 @@ DEFAULT_REGION = "us-west-2" IAM_INSTANCE_PROFILE = "EC2Admin" -S3_BUCKET_BENCHMARK_ARTIFACTS = "s3://nikhilsk-model-serving/benchmark_artifacts" +S3_BUCKET_BENCHMARK_ARTIFACTS = "s3://torchserve-model-serving/benchmark_artifacts" DEFAULT_DOCKER_DEV_ECR_REPO = "torchserve-benchmark" DEFAULT_DOCKER_DEV_ECR_TAG = "dev-image" @@ -28,9 +28,9 @@ GPU_INSTANCES = ["p2", "p3", "p4", "g2", "g3", "g4"] # DLAMI with nVidia Driver ver. 450.119.03 (support upto CUDA 11.2), Ubuntu 18.04 -# AMI_ID = "ami-0ff137c06803a8bb7" +AMI_ID = "ami-064696901389beb84" # AMI_ID = "ami-0198925303105158c", Base DLAMI 37.0 with apache2-utils installed -AMI_ID = "ami-00c5ebd9076702cbe"#, DLAMI 43.0 with apache2-utils installed +# AMI_ID = "ami-00c5ebd9076702cbe"#, DLAMI 43.0 with apache2-utils installed LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) diff --git a/test/benchmark/tests/utils/apache_bench.py b/test/benchmark/tests/utils/apache_bench.py index 159207b754..2c0c2af47a 100644 --- a/test/benchmark/tests/utils/apache_bench.py +++ b/test/benchmark/tests/utils/apache_bench.py @@ -33,7 +33,7 @@ def __init__(self, model_name="benchmark", connection=None): self.result_file = os.path.join(TMP_DIR, "benchmark/result.txt") self.ts_metric_log_file = os.path.join(TMP_DIR, "benchmark/logs/model_metrics.log") self.inference_url = "http://127.0.0.1:8080" - #self.install_dependencies() + self.install_dependencies() self.metrics = { "predict.txt": "PredictionTime",