Skip to content

Commit

Permalink
Back out changeset c9dbc1119342 (bug 1203085) on suspicion of breakin…
Browse files Browse the repository at this point in the history
…g gaia tests
  • Loading branch information
philor committed Sep 19, 2015
1 parent 102da69 commit a94eeac
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 155 deletions.
32 changes: 6 additions & 26 deletions testing/mozharness/mozharness/base/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,14 +390,12 @@ def _download_file(self, url, file_name):
self.warning("Socket error when accessing %s: %s" % (url, str(e)))
raise

def _retry_download(self, url, error_level, file_name=None, retry_config=None):
""" Helper method to retry download methods
def _retry_download_file(self, url, file_name, error_level, retry_config=None):
""" Helper method to retry _download_file().
Split out so we can alter the retry logic in mozharness.mozilla.testing.gaia_test.
This method calls `self.retry` on `self._download_file` using the passed
parameters if a file_name is specified. If no file is specified, we will
instead call `self._urlopen`, which grabs the contents of a url but does
not create a file on disk.
parameters.
Args:
url (str): URL path where the file is located.
Expand All @@ -423,25 +421,12 @@ def _retry_download(self, url, error_level, file_name=None, retry_config=None):
if retry_config:
retry_args.update(retry_config)

download_func = self._urlopen
kwargs = {"url": url}
if file_name:
download_func = self._download_file
kwargs = {"url": url, "file_name": file_name}

return self.retry(
download_func,
kwargs=kwargs,
self._download_file,
args=(url, file_name),
**retry_args
)

def load_json_url(self, url, error_level=None, *args, **kwargs):
""" Returns a json object from a url (it retries). """
contents = self._retry_download(
url=url, error_level=error_level, *args, **kwargs
)
return json.loads(contents.read())

# http://www.techniqal.com/blog/2008/07/31/python-file-read-write-with-urllib2/
# TODO thinking about creating a transfer object.
def download_file(self, url, file_name=None, parent_dir=None,
Expand Down Expand Up @@ -482,12 +467,7 @@ def download_file(self, url, file_name=None, parent_dir=None,
if create_parent_dir:
self.mkdir_p(parent_dir, error_level=error_level)
self.info("Downloading %s to %s" % (url, file_name))
status = self._retry_download(
url=url,
error_level=error_level,
file_name=file_name,
retry_config=retry_config
)
status = self._retry_download_file(url, file_name, error_level, retry_config=retry_config)
if status == file_name:
self.info("Downloaded %d bytes." % os.path.getsize(file_name))
return status
Expand Down
64 changes: 0 additions & 64 deletions testing/mozharness/mozharness/mozilla/taskcluster_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
"""
import os
from datetime import datetime, timedelta
from urlparse import urljoin

from mozharness.base.log import LogMixin


Expand Down Expand Up @@ -114,65 +112,3 @@ def get_taskcluster_url(self, filename):
self.task_id,
os.path.basename(filename)
)


# TasckClusterArtifactFinderMixin {{{1
class TaskClusterArtifactFinderMixin(object):
# This class depends that you have extended from the base script
QUEUE_URL = 'https://queue.taskcluster.net/v1/task/'
SCHEDULER_URL = 'https://scheduler.taskcluster.net/v1/task-graph/'

def get_task(self, task_id):
""" Get Task Definition """
# Signature: task(taskId) : result
return self.load_json_url(urljoin(self.QUEUE_URL, task_id))

def get_list_latest_artifacts(self, task_id):
""" Get Artifacts from Latest Run """
# Signature: listLatestArtifacts(taskId) : result

# Notice that this grabs the most recent run of a task since we don't
# know the run_id. This slightly slower, however, it is more convenient
return self.load_json_url(urljoin(self.QUEUE_URL, '{}/artifacts'.format(task_id)))

def url_to_artifact(self, task_id, full_path):
""" Return a URL for an artifact. """
return urljoin(self.QUEUE_URL, '{}/artifacts/{}'.format(task_id, full_path))

def get_inspect_graph(self, task_group_id):
""" Inspect Task Graph """
# Signature: inspect(taskGraphId) : result
return self.load_json_url(urljoin(self.SCHEDULER_URL, '{}/inspect'.format(task_group_id)))

def find_parent_task_id(self, task_id):
""" Returns the task_id of the parent task associated to the given task_id."""
# Find group id to associated to all related tasks
task_group_id = self.get_task(task_id)['taskGroupId']

# Find child task and determine on which task it depends on
for task in self.get_inspect_graph(task_group_id)['tasks']:
if task['taskId'] == task_id:
parent_task_id = task['requires'][0]

return parent_task_id

def set_artifacts(self, task_id):
""" Sets installer, test and symbols URLs from the artifacts of a task.
In this case we set:
self.installer_url
self.test_url (points to test_packages.json)
self.symbols_url
"""
# The tasks which represent a buildbot job only uploads one artifact:
# the properties.json file
p = self.load_json_url(
self.url_to_artifact(task_id, 'public/properties.json'))

# Set importants artifacts for test jobs
self.installer_url = p['packageUrl'][0] if p.get('packageUrl') else None
self.test_url = p['testPackagesUrl'][0] if p.get('testPackagesUrl') else None
self.symbols_url = p['symbolsUrl'][0] if p.get('symbolsUrl') else None

def set_parent_artifacts(self, child_task_id):
self.set_artifacts(self.find_parent_task_id(child_task_id))
6 changes: 2 additions & 4 deletions testing/mozharness/mozharness/mozilla/testing/gaia_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def _query_proxxy(self):
self.proxxy = proxxy
return self.proxxy

def _retry_download(self, url, file_name, error_level=FATAL, retry_config=None):
def _retry_download_file(self, url, file_name, error_level=FATAL, retry_config=None):
if self.config.get("bypass_download_cache"):
n = 0
# ignore retry_config in this case
Expand All @@ -196,9 +196,7 @@ def _retry_download(self, url, file_name, error_level=FATAL, retry_config=None):
self.info("Sleeping %s before retrying..." % sleeptime)
time.sleep(sleeptime)
else:
# Since we're overwritting _retry_download() we can't call download_file()
# directly
return super(GaiaTest, self)._retry_download(
return super(GaiaTest, self)._retry_download_file(
url, file_name, error_level, retry_config=retry_config,
)

Expand Down
97 changes: 38 additions & 59 deletions testing/mozharness/mozharness/mozilla/testing/testbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
from mozharness.mozilla.buildbot import BuildbotMixin, TBPL_WARNING
from mozharness.mozilla.proxxy import Proxxy
from mozharness.mozilla.structuredlog import StructuredOutputParser
from mozharness.mozilla.taskcluster_helper import TaskClusterArtifactFinderMixin
from mozharness.mozilla.testing.unittest import DesktopUnittestOutputParser
from mozharness.mozilla.testing.try_tools import TryToolsMixin
from mozharness.mozilla.tooltool import TooltoolMixin
Expand Down Expand Up @@ -87,8 +86,8 @@


# TestingMixin {{{1
class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin,
TaskClusterArtifactFinderMixin, TooltoolMixin, TryToolsMixin):
class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin, TooltoolMixin,
TryToolsMixin):
"""
The steps to identify + download the proper bits for [browser] unit
tests and Talos.
Expand Down Expand Up @@ -197,11 +196,10 @@ def _replace_url(url, changes):
return new_url
return url

if c.get("installer_url") is None:
self.exception("You must use --installer-url with developer_config.py")
assert c["installer_url"], "You must use --installer-url with developer_config.py"
if c.get("require_test_zip"):
if not c.get('test_url') and not c.get('test_packages_url'):
self.exception("You must use --test-url or --test-packages-url with developer_config.py")
raise AssertionError("You must use --test-url or --test-packages-url with developer_config.py")

c["installer_url"] = _replace_url(c["installer_url"], c["replace_urls"])
if c.get("test_url"):
Expand Down Expand Up @@ -248,48 +246,6 @@ def _urlopen_basic_auth(url, **kwargs):

# read_buildbot_config is in BuildbotMixin.

def find_artifacts_from_buildbot_changes(self):
c = self.config
try:
files = self.buildbot_config['sourcestamp']['changes'][-1]['files']
buildbot_prop_branch = self.buildbot_config['properties']['branch']

# Bug 868490 - Only require exactly two files if require_test_zip;
# otherwise accept either 1 or 2, since we'll be getting a
# test_zip url that we don't need.
expected_length = [1, 2, 3]
if c.get("require_test_zip") and not self.test_url:
expected_length = [2, 3]
if buildbot_prop_branch.startswith('gaia-try'):
expected_length = range(1, 1000)
actual_length = len(files)
if actual_length not in expected_length:
self.fatal("Unexpected number of files in buildbot config %s.\nExpected these number(s) of files: %s, but got: %d" %
(c['buildbot_json_path'], str(expected_length), actual_length))
for f in files:
if f['name'].endswith('tests.zip'): # yuk
if not self.test_url:
# str() because of unicode issues on mac
self.test_url = str(f['name'])
self.info("Found test url %s." % self.test_url)
elif f['name'].endswith('crashreporter-symbols.zip'): # yuk
self.symbols_url = str(f['name'])
self.info("Found symbols url %s." % self.symbols_url)
elif f['name'].endswith('test_packages.json'):
self.test_packages_url = str(f['name'])
self.info("Found a test packages url %s." % self.test_packages_url)
elif not any(f['name'].endswith(s) for s in ('code-coverage-gcno.zip',)):
if not self.installer_url:
self.installer_url = str(f['name'])
self.info("Found installer url %s." % self.installer_url)
except IndexError, e:
self.error(str(e))

def find_artifacts_from_taskcluster(self):
self.info("Finding installer, test and symbols from parent task. ")
task_id = self.buildbot_config['properties']['taskId']
self.set_parent_artifacts(task_id)

def postflight_read_buildbot_config(self):
"""
Determine which files to download from the buildprops.json file
Expand All @@ -304,17 +260,40 @@ def postflight_read_buildbot_config(self):
self.test_url = c['test_url']
if c.get("test_packages_url"):
self.test_packages_url = c['test_packages_url']

if self.buildbot_config['sourcestamp']['changes']:
self.find_artifacts_from_buildbot_changes()
elif 'taskId' in self.buildbot_config['properties']:
self.find_artifacts_from_taskcluster()
else:
self.exception(
"We have not been able to determine which artifacts "
"to use in order to run the tests."
)

try:
files = self.buildbot_config['sourcestamp']['changes'][-1]['files']
buildbot_prop_branch = self.buildbot_config['properties']['branch']

# Bug 868490 - Only require exactly two files if require_test_zip;
# otherwise accept either 1 or 2, since we'll be getting a
# test_zip url that we don't need.
expected_length = [1, 2, 3]
if c.get("require_test_zip") and not self.test_url:
expected_length = [2, 3]
if buildbot_prop_branch.startswith('gaia-try'):
expected_length = range(1, 1000)
actual_length = len(files)
if actual_length not in expected_length:
self.fatal("Unexpected number of files in buildbot config %s.\nExpected these number(s) of files: %s, but got: %d" %
(c['buildbot_json_path'], str(expected_length), actual_length))
for f in files:
if f['name'].endswith('tests.zip'): # yuk
if not self.test_url:
# str() because of unicode issues on mac
self.test_url = str(f['name'])
self.info("Found test url %s." % self.test_url)
elif f['name'].endswith('crashreporter-symbols.zip'): # yuk
self.symbols_url = str(f['name'])
self.info("Found symbols url %s." % self.symbols_url)
elif f['name'].endswith('test_packages.json'):
self.test_packages_url = str(f['name'])
self.info("Found a test packages url %s." % self.test_packages_url)
elif not any(f['name'].endswith(s) for s in ('code-coverage-gcno.zip',)):
if not self.installer_url:
self.installer_url = str(f['name'])
self.info("Found installer url %s." % self.installer_url)
except IndexError, e:
self.error(str(e))
missing = []
if not self.installer_url:
missing.append("installer_url")
Expand Down
3 changes: 1 addition & 2 deletions testing/mozharness/scripts/desktop_l10n.py
Original file line number Diff line number Diff line change
Expand Up @@ -697,8 +697,7 @@ def make_wget_en_US(self):
# has a different version number from the one in the current
# checkout.
self.bootstrap_env['ZIP_IN'] = dst_filename
return self.download_file(url=binary_file, file_name=dst_filename,
error_level=FATAL)
return self._retry_download_file(binary_file, dst_filename, error_level=FATAL)

# binary url is not an installer, use make wget-en-US to download it
return self._make(target=["wget-en-US"], cwd=cwd, env=env)
Expand Down

0 comments on commit a94eeac

Please sign in to comment.