Skip to content

Commit

Permalink
LIHADOOP-19850: Test for AnalyticJob (linkedin#42)
Browse files Browse the repository at this point in the history
* Test for AnalyticJob

Added test to perform analysis on an analytic job and compute results.
Dependencies in getAnalysis() method are mocked out using JMockit.

$>../tools/play "test-only com.linkedin.drelephant.analysis.AnalyticJobTest"
...
[info] com.linkedin.drelephant.analysis.AnalyticJobTest
[info] + testGetAnalysis
[info]
[info]
[info] Total for test com.linkedin.drelephant.analysis.AnalyticJobTest
[info] Finished in 0.01 seconds
[info] 1 tests, 0 failures, 0 errors
[info] Passed: Total 1, Failed 0, Errors 0, Passed 1
[success] Total time: 2 s, completed May 9, 2016 1:50:18 PM

Notes:
1. Jacoco and JMockit doesn't work well together. This test would have to be ignored if generating
code coverage with jacoco. Alternately, coverage can be generated with JMockit itself.
  • Loading branch information
rajagopr authored and akshayrai committed May 10, 2016
1 parent af70800 commit 97014ac
Show file tree
Hide file tree
Showing 12 changed files with 297 additions and 1 deletion.
8 changes: 8 additions & 0 deletions NOTICE
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,14 @@ This product requires the following play dependencies in addition to the core Pl
javaEbean, the Ebean plugin for Java.
javaJdbc, the Java database API.

This product includes/uses JMockit (http://jmockit.org/)
Notice: https://github.com/jmockit/jmockit1/blob/master/NOTICE.txt
License: The MIT License (https://github.com/jmockit/jmockit1/blob/master/LICENSE.txt)

This product includes/uses Mockito (http://mockito.org)
Copyright (c) 2007 Mockito contributors
License: The MIT License (https://github.com/mockito/mockito/blob/master/LICENSE)

------------------------------------------------------------------------------
Attribution for JavaScript Libraries
------------------------------------------------------------------------------
Expand Down
2 changes: 2 additions & 0 deletions app/com/linkedin/drelephant/analysis/AnalyticJob.java
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,8 @@ public AppResult getAnalysis() throws Exception {
AppHeuristicResultDetails.VALUE_LIMIT, getAppId());
heuristicDetail.details = Utils.truncateField(heuristicResultDetails.getDetails(),
AppHeuristicResultDetails.DETAILS_LIMIT, getAppId());
// added for test
detail.yarnAppHeuristicResultDetails = new ArrayList<AppHeuristicResultDetails>();
detail.yarnAppHeuristicResultDetails.add(heuristicDetail);
}
result.yarnAppHeuristicResults.add(detail);
Expand Down
3 changes: 2 additions & 1 deletion project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ object Dependencies {
"org.apache.hadoop" % "hadoop-hdfs" % hadoopVersion % Test,
"org.codehaus.jackson" % "jackson-mapper-asl" % jacksonMapperAslVersion,
"org.jsoup" % "jsoup" % jsoupVersion,
"org.mockito" % "mockito-core" % "1.10.19"
"org.mockito" % "mockito-core" % "1.10.19",
"org.jmockit" % "jmockit" % "1.23" % Test
) :+ sparkExclusion

var dependencies = Seq(javaJdbc, javaEbean, cache)
Expand Down
198 changes: 198 additions & 0 deletions test/com/linkedin/drelephant/analysis/AnalyticJobTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package com.linkedin.drelephant.analysis;

import com.linkedin.drelephant.ElephantContext;
import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData;
import com.linkedin.drelephant.mapreduce.MapReduceFetcherHadoop2;
import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData;
import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData;
import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData;
import com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic;
import common.TestUtil;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import mockit.Expectations;
import mockit.Mocked;
import models.AppResult;
import org.junit.Test;

import static common.TestConstants.*;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;


/**
* Test aims to exercise {@code getAnalysis()} method in {@code AnalyticJob}.<br>
* Dependencies to {@code ElephantContext, ElephantFetcher and Heuristics} are mocked
* out with JMockit.
*/
public class AnalyticJobTest {
@Mocked(stubOutClassInitialization = true)
ElephantContext elephantContext = null;
@Mocked
MapReduceFetcherHadoop2 fetcher;

@Test
public void testGetAnalysis()
throws Exception {
try {
// Setup analytic job
final AnalyticJob analyticJob = new AnalyticJob().
setAppId(TEST_JOB_ID1).setAppType(new ApplicationType(TEST_APP_TYPE)).
setFinishTime(1462178403).setStartTime(1462178412).setName(TEST_JOB_NAME).
setQueueName(TEST_DEFAULT_QUEUE_NAME).setUser(TEST_USERNAME).setTrackingUrl(TEST_TRACKING_URL);

// Setup job counter data
String filePath = FILENAME_JOBCOUNTER;
MapReduceCounterData jobCounter = new MapReduceCounterData();
setCounterData(jobCounter, filePath);

// Setup mapper data
long[][] mapperTasksTime = {{2563, 0, 0}, {2562, 0, 0}, {2567, 0, 0}};
MapReduceTaskData[] mappers = new MapReduceTaskData[3];
for (int i = 1; i <= mappers.length; i++) {
MapReduceCounterData taskCounter = new MapReduceCounterData();
setCounterData(taskCounter, FILENAME_MAPPERTASK.replaceFirst("\\$", Integer.toString(i)));
mappers[i - 1] = new MapReduceTaskData(taskCounter, mapperTasksTime[i - 1]);
}

// Setup reducer data
long[][] reducerTasksTime = {{1870, 1665, 14}};
MapReduceTaskData[] reducers = new MapReduceTaskData[1];
for (int i = 1; i <= reducers.length; i++) {
MapReduceCounterData taskCounter = new MapReduceCounterData();
setCounterData(taskCounter, FILENAME_REDUCERTASK.replaceFirst("\\$", Integer.toString(i)));
reducers[i - 1] = new MapReduceTaskData(taskCounter, reducerTasksTime[i - 1]);
}

// Setup job configuration data
filePath = FILENAME_JOBCONF;
Properties jobConf = TestUtil.loadProperties(filePath);

// Setup application data
final MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).
setMapperData(mappers).setReducerData(reducers).setJobConf(jobConf).setSucceeded(true).
setDiagnosticInfo("").setUsername(TEST_USERNAME).setUrl("").setJobName(TEST_JOB_NAME).
setStartTime(1462178412).setFinishTime(1462178403).setRetry(false).setAppId(TEST_JOB_ID1);

// Setup heuristics
final List<Heuristic> heuristics = loadHeuristics();

// Setup job type
final JobType jobType = new JobType(TEST_JOB_TYPE, TEST_JOBCONF_NAME, TEST_JOBCONF_PATTERN);

// Set expectations in JMockit
new Expectations() {{
fetcher.fetchData(analyticJob);
result = data;

elephantContext.getHeuristicsForApplicationType(analyticJob.getAppType());
result = heuristics;

elephantContext.matchJobType(data);
result = jobType;
}};

// Call the method under test
AppResult result = analyticJob.getAnalysis();

// Make assertions on result
assertTrue("Result is null", result != null);
assertTrue("Score did not match", result.score == TEST_SCORE);
assertTrue("Severity did not match", result.severity.toString().equals(TEST_SEVERITY));
assertTrue("APP ID did not match", result.id.equals(TEST_JOB_ID1));
assertTrue("Scheduler did not match", result.scheduler.equals(TEST_SCHEDULER));
} catch (Exception e) {
e.printStackTrace();
assertFalse("Test failed with exception", true);
}
}

private void setCounterData(MapReduceCounterData counter, String filePath)
throws IOException {
Properties counterData = TestUtil.loadProperties(filePath);

for (Object groupName : counterData.keySet()) {
String counterValueString = (String) counterData.get(groupName);
counterValueString = counterValueString.replaceAll("\\{|\\}", "");

StringBuilder stringBuilder = new StringBuilder();

for (String counterKeyValue : counterValueString.split(",")) {
stringBuilder.append(counterKeyValue.trim()).append('\n');
}
ByteArrayInputStream inputStream = new ByteArrayInputStream(stringBuilder.toString().getBytes(DEFAULT_ENCODING));
Properties counterProperties = new Properties();
counterProperties.load(inputStream);

for (Object counterKey : counterProperties.keySet()) {
long counterValue = Long.parseLong(counterProperties.get(counterKey).toString());
counter.set(groupName.toString(), counterKey.toString(), counterValue);
}
}
}

private List<Heuristic> loadHeuristics() {
List<Heuristic> heuristics = new ArrayList<Heuristic>();
// dummy hash map
Map<String, String> paramsMap = new HashMap<String, String>();
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Data Skew",
"com.linkedin.drelephant.mapreduce.heuristics.MapperDataSkewHeuristic",
"views.html.help.mapreduce.helpMapperDataSkew", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(
new HeuristicConfigurationData("Mapper GC", "com.linkedin.drelephant.mapreduce.heuristics.MapperGCHeuristic",
"views.html.help.mapreduce.helpGC", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Time",
"com.linkedin.drelephant.mapreduce.heuristics.MapperTimeHeuristic", "views.html.help.mapreduce.helpMapperTime",
new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Speed",
"com.linkedin.drelephant.mapreduce.heuristics.MapperSpeedHeuristic",
"views.html.help.mapreduce.helpMapperSpeed", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Spill",
"com.linkedin.drelephant.mapreduce.heuristics.MapperSpillHeuristic",
"views.html.help.mapreduce.helpMapperSpill", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Mapper Memory",
"com.linkedin.drelephant.mapreduce.heuristics.MapperMemoryHeuristic",
"views.html.help.mapreduce.helpMapperMemory", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Data Skew",
"com.linkedin.drelephant.mapreduce.heuristics.ReducerDataSkewHeuristic",
"views.html.help.mapreduce.helpReducerDataSkew", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(
new HeuristicConfigurationData("Reducer GC", "com.linkedin.drelephant.mapreduce.heuristics.ReducerGCHeuristic",
"views.html.help.mapreduce.helpGC", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Time",
"com.linkedin.drelephant.mapreduce.heuristics.ReducerTimeHeuristic",
"views.html.help.mapreduce.helpReducerTime", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Reducer Memory",
"com.linkedin.drelephant.mapreduce.heuristics.ReducerMemoryHeuristic",
"views.html.help.mapreduce.helpReducerMemory", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(new HeuristicConfigurationData("Shuffle &#38; Sort",
"com.linkedin.drelephant.mapreduce.heuristics.ShuffleSortHeuristic",
"views.html.help.mapreduce.helpShuffleSort", new ApplicationType("mapreduce"), paramsMap)));
heuristics.add(new MapperDataSkewHeuristic(
new HeuristicConfigurationData("Exception", "com.linkedin.drelephant.mapreduce.heuristics.ExceptionHeuristic",
"views.html.help.mapreduce.helpException", new ApplicationType("mapreduce"), paramsMap)));

return heuristics;
}
}
18 changes: 18 additions & 0 deletions test/common/TestConstants.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ public class TestConstants {
public static final String TEST_JOB_ID2 = "application_1458194917883_1453362";
public static final String TEST_JOB_NAME = "Email Overwriter";
public static final String TEST_JOB_TYPE = "HadoopJava";
public static final String TEST_APP_TYPE = "HadoopJava";
public static final String TEST_USERNAME = "growth";
public static final String TEST_DEFAULT_QUEUE_NAME = "default";

public static final String TEST_JOB_EXEC_ID1 =
"https://elephant.linkedin.com:8443/executor?execid=1654676&job=overwriter-reminder2&attempt=0";
Expand Down Expand Up @@ -62,4 +64,20 @@ public class TestConstants {
public static final String REST_COMPARE_PATH = "/rest/compare";
public static final String REST_FLOW_GRAPH_DATA_PATH = "/rest/flowgraphdata";
public static final String REST_JOB_GRAPH_DATA_PATH = "/rest/jobgraphdata";

public static final String DEFAULT_ENCODING = "UTF-8";

// Sample mapreduce constants
public static final String FILENAME_JOBCOUNTER = "mrdata/sampleJobCounter.properties";
public static final String FILENAME_MAPPERTASK = "mrdata/mapperTaskCounter$.properties";
public static final String FILENAME_REDUCERTASK = "mrdata/reducerTaskCounter$.properties";
public static final String FILENAME_JOBCONF = "mrdata/sampleJobConf.properties";

public static final String TEST_TRACKING_URL = "http://hostname/jobhistory/job/job_1460381439677_0001";
public static final String TEST_JOBCONF_NAME = "mapred.child.java.opts";
public static final String TEST_JOBCONF_PATTERN = ".*.";
public static final String TEST_SCHEDULER = "azkaban";
public static final long TEST_SCORE = 0;
public static final String TEST_SEVERITY = "NONE";

}
46 changes: 46 additions & 0 deletions test/common/TestUtil.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package common;

import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


public class TestUtil {

private static final Logger logger = LoggerFactory.getLogger(TestUtil.class);

// private on purpose
private TestUtil() {}

public static Properties loadProperties(String filePath)
throws IOException {
Properties properties = new Properties();
InputStream inputStream = TestUtil.class.getClassLoader().getResourceAsStream(filePath);
if (inputStream == null) {
logger.info("Configuation file not present in classpath. File: " + filePath);
throw new RuntimeException("Unable to read " + filePath);
}
properties.load(inputStream);
logger.info("Configuation file loaded. File: " + filePath);
return properties;
}

}
3 changes: 3 additions & 0 deletions test/resources/mrdata/mapperTaskCounter1.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0}
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118}
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=46, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592}
3 changes: 3 additions & 0 deletions test/resources/mrdata/mapperTaskCounter2.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0}
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118}
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=46, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592}
3 changes: 3 additions & 0 deletions test/resources/mrdata/mapperTaskCounter3.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=4, HDFS_BYTES_READ=268, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117858, FILE_BYTES_READ=0, HDFS_WRITE_OPS=0, HDFS_BYTES_WRITTEN=0}
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=118}
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=28, SPILLED_RECORDS=2, MERGED_MAP_OUTPUTS=0, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=1, SPLIT_RAW_BYTES=150, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=18, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=43, MAP_OUTPUT_RECORDS=2, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=201326592}
4 changes: 4 additions & 0 deletions test/resources/mrdata/reducerTaskCounter1.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=3, HDFS_BYTES_READ=0, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=117847, FILE_BYTES_READ=72, HDFS_WRITE_OPS=3, HDFS_BYTES_WRITTEN=215}
org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter={BYTES_WRITTEN=97}
org.apache.hadoop.mapreduce.TaskCounter={REDUCE_INPUT_RECORDS=6, SPILLED_RECORDS=6, MERGED_MAP_OUTPUTS=3, VIRTUAL_MEMORY_BYTES=0, FAILED_SHUFFLE=0, REDUCE_SHUFFLE_BYTES=84, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=38, REDUCE_INPUT_GROUPS=2, COMBINE_OUTPUT_RECORDS=0, SHUFFLED_MAPS=3, REDUCE_OUTPUT_RECORDS=0, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=147849216}
Shuffle&nbsp;Errors={CONNECTION=0, WRONG_LENGTH=0, BAD_ID=0, WRONG_REDUCE=0, IO_ERROR=0, WRONG_MAP=0}
4 changes: 4 additions & 0 deletions test/resources/mrdata/sampleJobConf.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
azkaban.link.job.url=https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder&job=overwriter-reminder2
azkaban.link.attempt.url=https://elephant.linkedin.com:8443/executor?execid=1654676&job=overwriter-reminder2&attempt=0
azkaban.link.workflow.url=https://elephant.linkedin.com:8443/manager?project=b2-confirm-email-reminder&flow=reminder
azkaban.link.execution.url=https://elephant.linkedin.com:8443/executor?execid=1654676
6 changes: 6 additions & 0 deletions test/resources/mrdata/sampleJobCounter.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
org.apache.hadoop.mapreduce.FileSystemCounter={FILE_LARGE_READ_OPS=0, FILE_WRITE_OPS=0, HDFS_READ_OPS=15, HDFS_BYTES_READ=804, HDFS_LARGE_READ_OPS=0, FILE_READ_OPS=0, FILE_BYTES_WRITTEN=471421, FILE_BYTES_READ=72, HDFS_WRITE_OPS=3, HDFS_BYTES_WRITTEN=215}
org.apache.hadoop.mapreduce.JobCounter={TOTAL_LAUNCHED_MAPS=3, VCORES_MILLIS_REDUCES=1870, MB_MILLIS_MAPS=7876608, TOTAL_LAUNCHED_REDUCES=1, SLOTS_MILLIS_REDUCES=1870, VCORES_MILLIS_MAPS=7692, MB_MILLIS_REDUCES=1914880, SLOTS_MILLIS_MAPS=7692, MILLIS_REDUCES=1870, MILLIS_MAPS=7692, DATA_LOCAL_MAPS=3}
org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter={BYTES_WRITTEN=97}
org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter={BYTES_READ=354}
org.apache.hadoop.mapreduce.TaskCounter={MAP_OUTPUT_MATERIALIZED_BYTES=84, REDUCE_INPUT_RECORDS=6, SPILLED_RECORDS=12, MERGED_MAP_OUTPUTS=3, VIRTUAL_MEMORY_BYTES=0, MAP_INPUT_RECORDS=3, SPLIT_RAW_BYTES=450, FAILED_SHUFFLE=0, MAP_OUTPUT_BYTES=54, REDUCE_SHUFFLE_BYTES=84, PHYSICAL_MEMORY_BYTES=0, GC_TIME_MILLIS=173, REDUCE_INPUT_GROUPS=2, COMBINE_OUTPUT_RECORDS=0, SHUFFLED_MAPS=3, REDUCE_OUTPUT_RECORDS=0, MAP_OUTPUT_RECORDS=6, COMBINE_INPUT_RECORDS=0, CPU_MILLISECONDS=0, COMMITTED_HEAP_BYTES=751828992}
Shuffle&nbspErrors={CONNECTION=0, WRONG_LENGTH=0, BAD_ID=0, WRONG_REDUCE=0, IO_ERROR=0, WRONG_MAP=0}

0 comments on commit 97014ac

Please sign in to comment.