forked from linkedin/dr-elephant
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
LIHADOOP-19850: Test for AnalyticJob (linkedin#42)
* Test for AnalyticJob Added test to perform analysis on an analytic job and compute results. Dependencies in getAnalysis() method are mocked out using JMockit. $>../tools/play "test-only com.linkedin.drelephant.analysis.AnalyticJobTest" ... [info] com.linkedin.drelephant.analysis.AnalyticJobTest [info] + testGetAnalysis [info] [info] [info] Total for test com.linkedin.drelephant.analysis.AnalyticJobTest [info] Finished in 0.01 seconds [info] 1 tests, 0 failures, 0 errors [info] Passed: Total 1, Failed 0, Errors 0, Passed 1 [success] Total time: 2 s, completed May 9, 2016 1:50:18 PM Notes: 1. Jacoco and JMockit doesn't work well together. This test would have to be ignored if generating code coverage with jacoco. Alternately, coverage can be generated with JMockit itself.
- Loading branch information
Showing
12 changed files
with
297 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
198 changes: 198 additions & 0 deletions
198
test/com/linkedin/drelephant/analysis/AnalyticJobTest.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,198 @@ | ||
/* | ||
* Copyright 2016 LinkedIn Corp. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not | ||
* use this file except in compliance with the License. You may obtain a copy of | ||
* the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | ||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | ||
* License for the specific language governing permissions and limitations under | ||
* the License. | ||
*/ | ||
|
||
package com.linkedin.drelephant.analysis; | ||
|
||
import com.linkedin.drelephant.ElephantContext; | ||
import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData; | ||
import com.linkedin.drelephant.mapreduce.MapReduceFetcherHadoop2; | ||
import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData; | ||
import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData; | ||
import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData; | ||
import com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic; | ||
import common.TestUtil; | ||
import java.io.ByteArrayInputStream; | ||
import java.io.IOException; | ||
import java.util.ArrayList; | ||
import java.util.HashMap; | ||
import java.util.List; | ||
import java.util.Map; | ||
import java.util.Properties; | ||
import mockit.Expectations; | ||
import mockit.Mocked; | ||
import models.AppResult; | ||
import org.junit.Test; | ||
|
||
import static common.TestConstants.*; | ||
import static org.junit.Assert.assertFalse; | ||
import static org.junit.Assert.assertTrue; | ||
|
||
|
||
/** | ||
* Test aims to exercise {@code getAnalysis()} method in {@code AnalyticJob}.<br> | ||
* Dependencies to {@code ElephantContext, ElephantFetcher and Heuristics} are mocked | ||
* out with JMockit. | ||
*/ | ||
public class AnalyticJobTest { | ||
@Mocked(stubOutClassInitialization = true) | ||
ElephantContext elephantContext = null; | ||
@Mocked | ||
MapReduceFetcherHadoop2 fetcher; | ||
|
||
@Test | ||
public void testGetAnalysis() | ||
throws Exception { | ||
try { | ||
// Setup analytic job | ||
final AnalyticJob analyticJob = new AnalyticJob(). | ||
setAppId(TEST_JOB_ID1).setAppType(new ApplicationType(TEST_APP_TYPE)). | ||
setFinishTime(1462178403).setStartTime(1462178412).setName(TEST_JOB_NAME). | ||
setQueueName(TEST_DEFAULT_QUEUE_NAME).setUser(TEST_USERNAME).setTrackingUrl(TEST_TRACKING_URL); | ||
|
||
// Setup job counter data | ||
String filePath = FILENAME_JOBCOUNTER; | ||
MapReduceCounterData jobCounter = new MapReduceCounterData(); | ||
setCounterData(jobCounter, filePath); | ||
|
||
// Setup mapper data | ||
long[][] mapperTasksTime = {{2563, 0, 0}, {2562, 0, 0}, {2567, 0, 0}}; | ||
MapReduceTaskData[] mappers = new MapReduceTaskData[3]; | ||
for (int i = 1; i <= mappers.length; i++) { | ||
MapReduceCounterData taskCounter = new MapReduceCounterData(); | ||
setCounterData(taskCounter, FILENAME_MAPPERTASK.replaceFirst("\\$", Integer.toString(i))); | ||
mappers[i - 1] = new MapReduceTaskData(taskCounter, mapperTasksTime[i - 1]); | ||
} | ||
|
||
// Setup reducer data | ||
long[][] reducerTasksTime = {{1870, 1665, 14}}; | ||
MapReduceTaskData[] reducers = new MapReduceTaskData[1]; | ||
for (int i = 1; i <= reducers.length; i++) { | ||
MapReduceCounterData taskCounter = new MapReduceCounterData(); | ||
setCounterData(taskCounter, FILENAME_REDUCERTASK.replaceFirst("\\$", Integer.toString(i))); | ||
reducers[i - 1] = new MapReduceTaskData(taskCounter, reducerTasksTime[i - 1]); | ||
} | ||
|
||
// Setup job configuration data | ||
filePath = FILENAME_JOBCONF; | ||
Properties jobConf = TestUtil.loadProperties(filePath); | ||
|
||
// Setup application data | ||
final MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter). | ||
setMapperData(mappers).setReducerData(reducers).setJobConf(jobConf).setSucceeded(true). | ||
setDiagnosticInfo("").setUsername(TEST_USERNAME).setUrl("").setJobName(TEST_JOB_NAME). | ||
setStartTime(1462178412).setFinishTime(1462178403).setRetry(false).setAppId(TEST_JOB_ID1); | ||
|
||
// Setup heuristics | ||
final List<Heuristic> heuristics = loadHeuristics(); | ||
|
||
// Setup job type | ||
final JobType jobType = new JobType(TEST_JOB_TYPE, TEST_JOBCONF_NAME, TEST_JOBCONF_PATTERN); | ||
|
||
// Set expectations in JMockit | ||
new Expectations() {{ | ||
fetcher.fetchData(analyticJob); | ||
result = data; | ||
|
||
elephantContext.getHeuristicsForApplicationType(analyticJob.getAppType()); | ||
result = heuristics; | ||
|
||
elephantContext.matchJobType(data); | ||
result = jobType; | ||
}}; | ||
|
||
// Call the method under test | ||
AppResult result = analyticJob.getAnalysis(); | ||
|
||
// Make assertions on result | ||
assertTrue("Result is null", result != null); | ||
assertTrue("Score did not match", result.score == TEST_SCORE); | ||
assertTrue("Severity did not match", result.severity.toString().equals(TEST_SEVERITY)); | ||
assertTrue("APP ID did not match", result.id.equals(TEST_JOB_ID1)); | ||
assertTrue("Scheduler did not match", result.scheduler.equals(TEST_SCHEDULER)); | ||
} catch (Exception e) { | ||
e.printStackTrace(); | ||
assertFalse("Test failed with exception", true); | ||
} | ||
} | ||
|
||
private void setCounterData(MapReduceCounterData counter, String filePath) | ||
throws IOException { | ||
Properties counterData = TestUtil.loadProperties(filePath); | ||
|
||
for (Object groupName : counterData.keySet()) { | ||
String counterValueString = (String) counterData.get(groupName); | ||
counterValueString = counterValueString.replaceAll("\\{|\\}", ""); | ||
|
||
StringBuilder stringBuilder = new StringBuilder(); | ||
|
||
for (String counterKeyValue : counterValueString.split(",")) { | ||
stringBuilder.append(counterKeyValue.trim()).append('\n'); | ||
} | ||
ByteArrayInputStream inputStream = new ByteArrayInputStream(stringBuilder.toString().getBytes(DEFAULT_ENCODING)); | ||
Properties counterProperties = new Properties(); | ||
counterProperties.load(inputStream); | ||
|
||
for (Object counterKey : counterProperties.keySet()) { | ||
long counterValue = Long.parseLong(counterProperties.get(counterKey).toString()); | ||
counter.set(groupName.toString(), counterKey.toString(), counterValue); | ||
} | ||
} | ||
} | ||
|
||
private List<Heuristic> loadHeuristics() { | ||
List<Heuristic> heuristics = new ArrayList<Heuristic>(); | ||
// dummy hash map | ||
Map<String, String> paramsMap = new HashMap<String, String>(); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Data Skew", | ||
"com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic", | ||
"views.html.help.mapreduce.helpMapperDataSkew", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic( | ||
new HeuristicConfigurationData("Mapper GC", "com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic", | ||
"views.html.help.mapreduce.helpGC", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Time", | ||
"com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic", "views.html.help.mapreduce.helpMapperTime", | ||
new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Speed", | ||
"com.linkedin.drelephant.mapreduce.heuristics.MapperSpeedHeuristic", | ||
"views.html.help.mapreduce.helpMapperSpeed", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Spill", | ||
"com.linkedin.drelephant.mapreduce.heuristics.MapperSpillHeuristic", | ||
"views.html.help.mapreduce.helpMapperSpill", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Memory", | ||
"com.linkedin.drelephant.mapreduce.heuristics.MapperMemoryHeuristic", | ||
"views.html.help.mapreduce.helpMapperMemory", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Data Skew", | ||
"com.linkedin.drelephant.mapreduce.heuristics.ReducerDataSkewHeuristic", | ||
"views.html.help.mapreduce.helpReducerDataSkew", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic( | ||
new HeuristicConfigurationData("Reducer GC", "com.linkedin.drelephant.mapreduce.heuristics.ReducerGCHeuristic", | ||
"views.html.help.mapreduce.helpGC", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Time", | ||
"com.linkedin.drelephant.mapreduce.heuristics.ReducerTimeHeuristic", | ||
"views.html.help.mapreduce.helpReducerTime", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Memory", | ||
"com.linkedin.drelephant.mapreduce.heuristics.ReducerMemoryHeuristic", | ||
"views.html.help.mapreduce.helpReducerMemory", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Shuffle & Sort", | ||
"com.linkedin.drelephant.mapreduce.heuristics.ShuffleSortHeuristic", | ||
"views.html.help.mapreduce.helpShuffleSort", new ApplicationType("mapreduce"), paramsMap))); | ||
heuristics.add(new MapperDataSkewHeuristic( | ||
new HeuristicConfigurationData("Exception", "com.linkedin.drelephant.mapreduce.heuristics.ExceptionHeuristic", | ||
"views.html.help.mapreduce.helpException", new ApplicationType("mapreduce"), paramsMap))); | ||
|
||
return heuristics; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
/* | ||
* Copyright 2016 LinkedIn Corp. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not | ||
* use this file except in compliance with the License. You may obtain a copy of | ||
* the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT | ||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | ||
* License for the specific language governing permissions and limitations under | ||
* the License. | ||
*/ | ||
|
||
package common; | ||
|
||
import java.io.IOException; | ||
import java.io.InputStream; | ||
import java.util.Properties; | ||
import org.slf4j.Logger; | ||
import org.slf4j.LoggerFactory; | ||
|
||
|
||
public class TestUtil { | ||
|
||
private static final Logger logger = LoggerFactory.getLogger(TestUtil.class); | ||
|
||
// private on purpose | ||
private TestUtil() {} | ||
|
||
public static Properties loadProperties(String filePath) | ||
throws IOException { | ||
Properties properties = new Properties(); | ||
InputStream inputStream = TestUtil.class.getClassLoader().getResourceAsStream(filePath); | ||
if (inputStream == null) { | ||
logger.info("Configuation file not present in classpath. File: " + filePath); | ||
throw new RuntimeException("Unable to read " + filePath); | ||
} | ||
properties.load(inputStream); | ||
logger.info("Configuation file loaded. File: " + filePath); | ||
return properties; | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0} | ||
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118} | ||
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=46, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0} | ||
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118} | ||
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=46, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0} | ||
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118} | ||
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=43, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=3, HDFS_BYTES_READ=0, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117847, FILE_BYTES_READ=72, HDFS_WRITE_OPS=3, HDFS_BYTES_WRITTEN=215} | ||
org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter={BYTES_WRITTEN=97} | ||
org.apache.hadoop.mapreduce.TaskCounter={REDUCE_INPUT_RECORDS=6, SPILLED_RECORDS=6, MERGED_MAP_OUTPUTS=3, VIRTUAL_MEMORY_BYTES=0, FAILED_SHUFFLE=0, REDUCE_SHUFFLE_BYTES=84, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=38, REDUCE_INPUT_GROUPS=2, COMBINE_OUTPUT_RECORDS=0, SHUFFLED_MAPS=3, REDUCE_OUTPUT_RECORDS=0, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=147849216} | ||
Shuffle Errors={CONNECTION=0, WRONG_LENGTH=0, BAD_ID=0, WRONG_REDUCE=0, IO_ERROR=0, WRONG_MAP=0} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
azkaban.link.job.url=https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2 | ||
azkaban.link.attempt.url=https://elephant.linkedin.com:8443/executor?execid=1654676&job=overwriter-reminder2&attempt=0 | ||
azkaban.link.workflow.url=https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder | ||
azkaban.link.execution.url=https://elephant.linkedin.com:8443/executor?execid=1654676 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=15, HDFS_BYTES_READ=804, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=471421, FILE_BYTES_READ=72, HDFS_WRITE_OPS=3, HDFS_BYTES_WRITTEN=215} | ||
org.apache.hadoop.mapreduce.JobCounter={TOTAL_LAUNCHED_MAPS=3, VCORES_MILLIS_REDUCES=1870, MB_MILLIS_MAPS=7876608, TOTAL_LAUNCHED_REDUCES=1, SLOTS_MILLIS_REDUCES=1870, VCORES_MILLIS_MAPS=7692, MB_MILLIS_REDUCES=1914880, SLOTS_MILLIS_MAPS=7692, MILLIS_REDUCES=1870, MILLIS_MAPS=7692, DATA_LOCAL_MAPS=3} | ||
org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter={BYTES_WRITTEN=97} | ||
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=354} | ||
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=84, REDUCE_INPUT_RECORDS=6, SPILLED_RECORDS=12, MERGED_MAP_OUTPUTS=3, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=3, SPLIT_RAW_BYTES=450, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=54, REDUCE_SHUFFLE_BYTES=84, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=173, REDUCE_INPUT_GROUPS=2, COMBINE_OUTPUT_RECORDS=0, SHUFFLED_MAPS=3, REDUCE_OUTPUT_RECORDS=0, MAP_OUTPUT_RECORDS=6, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=751828992} | ||
Shuffle Errors={CONNECTION=0, WRONG_LENGTH=0, BAD_ID=0, WRONG_REDUCE=0, IO_ERROR=0, WRONG_MAP=0} |