Skip to content

Commit

Permalink
Bug 1767436 - Fail when visual-metrics are 0. r=perftest-reviewers,Al…
Browse files Browse the repository at this point in the history
…exandruIonescu

Differential Revision: https://phabricator.services.mozilla.com/D145353
  • Loading branch information
Gregory Mierzwinski committed May 4, 2022
1 parent 590c093 commit 51d8406
Showing 1 changed file with 18 additions and 4 deletions.
22 changes: 18 additions & 4 deletions testing/raptor/raptor/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,7 @@ def __init__(self, config, root_results_dir=None):
super(BrowsertimeResultsHandler, self).__init__(**config)
self._root_results_dir = root_results_dir
self.browsertime_visualmetrics = False
self.failed_vismets = []
if not os.path.exists(self._root_results_dir):
os.mkdir(self._root_results_dir)

Expand Down Expand Up @@ -350,6 +351,7 @@ def parse_browsertime_json(
measure,
page_count,
test_name,
accept_zero_vismet,
):
"""
Receive a json blob that contains the results direct from the browsertime tool. Parse
Expand Down Expand Up @@ -618,9 +620,14 @@ def _get_raptor_val(mdict, mname, retval=False):
if "progress" in metric.lower():
# Bug 1665750 - Determine if we should display progress
continue
bt_result["measurements"].setdefault(metric, []).append(
cycle[metric]
)

val = cycle[metric]
if not accept_zero_vismet:
if val == 0:
self.failed_vismets.append(metric)
continue

bt_result["measurements"].setdefault(metric, []).append(val)
bt_result["statistics"][metric] = raw_result["statistics"][
"visualMetrics"
][metric]
Expand Down Expand Up @@ -774,6 +781,7 @@ def summarize_and_output(self, test_config, tests, test_names):
test.get("measure"),
test_config.get("page_count", []),
test["name"],
accept_zero_vismet,
):

def _new_standard_result(new_result, subtest_unit="ms"):
Expand Down Expand Up @@ -869,6 +877,12 @@ def _is_supporting_data(res):
output.summarize(test_names)
success, out_perfdata = output.output(test_names)

if len(self.failed_vismets) > 0:
LOG.critical(
"TEST-UNEXPECTED-FAIL | Some visual metrics have an erroneous value of 0."
)
LOG.info("Visual metric tests failed: %s" % str(self.failed_vismets))

validate_success = True
if not self.gecko_profile:
validate_success = self._validate_treeherder_data(output, out_perfdata)
Expand All @@ -894,7 +908,7 @@ def _is_supporting_data(res):
with open(jobs_file, "w") as f:
f.write(json.dumps(jobs_json))

return success and validate_success
return (success and validate_success) and len(self.failed_vismets) == 0


class MissingResultsError(Exception):
Expand Down

0 comments on commit 51d8406

Please sign in to comment.