Skip to content

Commit

Permalink
benchmark info during jobs polling
Browse files Browse the repository at this point in the history
  • Loading branch information
Kevin Normoyle committed Apr 15, 2013
1 parent a753573 commit dc82eb1
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 8 deletions.
7 changes: 6 additions & 1 deletion py/h2o_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

# poll the Jobs queue and wait if not all done. Return matching keys to a pattern for 'destination_key"
# for a job (model usually)
def pollWaitJobs(pattern=None, timeoutSecs=30, retryDelaySecs=5):
def pollWaitJobs(pattern=None, timeoutSecs=30, retryDelaySecs=5, benchmarkLogging=None):
anyBusy = True
waitTime = 0
while (anyBusy):
Expand Down Expand Up @@ -37,6 +37,11 @@ def pollWaitJobs(pattern=None, timeoutSecs=30, retryDelaySecs=5):
sys.stdout.flush()
time.sleep(retryDelaySecs)
waitTime += retryDelaySecs

# any time we're sitting around polling we might want to save logging info (cpu/disk/jstack)
# test would pass ['cpu','disk','jstack'] kind of list
if benchmarkLogging:
h2o.cloudPerfH2O.get_log_save(benchmarkLogging)
return patternKeys


4 changes: 2 additions & 2 deletions py/h2o_perf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class PerfH2O(object):
def change_logfile(self, subtest_name):
# change to another logfile after we've already been going
blog = 'benchmark_' + subtest_name + '.log'
print "Switch. Now appending to %s." % blog, "Between tests, you may want to delete it if it gets too big"
print "\nSwitch. Now appending to %s." % blog, "Between tests, you may want to delete it if it gets too big"

# http://stackoverflow.com/questions/5296130/restart-logging-to-a-new-file-python
# manually reassign the handler
Expand All @@ -27,7 +27,7 @@ def init_logfile(self, subtest_name):
# just use local directory? (python_test_name global set below before this)
blog = 'benchmark_' + subtest_name + '.log'
self.subtest_name = subtest_name
print "Appending to %s." % blog, "Between tests, you may want to delete it if it gets too big"
print "\nAppending to %s." % blog, "Between tests, you may want to delete it if it gets too big"
logging.basicConfig(filename=blog,
# we use CRITICAL for the benchmark logging to avoid info/warn stuff
# from other python packages
Expand Down
13 changes: 8 additions & 5 deletions py/testdir_multi_jvm/test_benchmark_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,10 @@ def test_benchmark_import(self):
# rebuild the cloud for each file
base_port = 54321
tryHeap = 28
# can fire a parse off and go wait on the jobs queue (inspect afterwards is enough?)
noPoll = False
benchmarkLogging = ['cpu','disk']

for (csvFilepattern, csvFilename, totalBytes, timeoutSecs) in csvFilenameList:
localhost = h2o.decide_if_localhost()
if (localhost):
Expand All @@ -125,10 +129,9 @@ def test_benchmark_import(self):
h2o.cloudPerfH2O.message("Parse " + csvFilename + " Start--------------------------------")
start = time.time()
parseKey = h2i.parseImportFolderFile(None, csvFilepattern, importFolderPath,
noPoll=True,
key2=csvFilename + ".hex", timeoutSecs=timeoutSecs, retryDelaySecs = 5,
# benchmarkLogging=['cpu','disk', 'jstack'])
benchmarkLogging=['cpu','disk','jstack'])
key2=csvFilename + ".hex", timeoutSecs=timeoutSecs, retryDelaySecs=5,
noPoll=noPoll,
benchmarkLogging=benchmarkLogging)
elapsed = time.time() - start
print "Parse #", trial, "completed in", "%6.2f" % elapsed, "seconds.", \
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
Expand All @@ -143,7 +146,7 @@ def test_benchmark_import(self):

# does it take a little while to show up in Jobs, from where we issued the parse?
time.sleep(2)
h2o_jobs.pollWaitJobs(pattern=csvFilename)
h2o_jobs.pollWaitJobs(pattern=csvFilename, benchmarkLogging=benchmarkLogging)
# We should be able to see the parse result?
inspect = h2o_cmd.runInspect(key=parseKey['destination_key'])

Expand Down

0 comments on commit dc82eb1

Please sign in to comment.