Skip to content

Commit

Permalink
[FLINK-7042] [yarn] Fix jar file discovery flink-yarn-tests
Browse files Browse the repository at this point in the history
Add dependencies for batch and streaming WordCount programs and copies
the jar files into a new target/programs directory. The integration
tests now directly references the program jar files rather than the
prior brittle search.

This removes the flink-yarn-tests build-time dependency on the examples
modules (there remains a build-time dependency on flink-dist).

This closes apache#4264
  • Loading branch information
greghogan committed Jul 7, 2017
1 parent a76421e commit 709f23e
Show file tree
Hide file tree
Showing 2 changed files with 65 additions and 8 deletions.
60 changes: 60 additions & 0 deletions flink-yarn-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,24 @@ under the License.
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-examples-batch_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>jar</type>
<classifier>WordCount</classifier>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-examples-streaming_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>jar</type>
<classifier>WordCount</classifier>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-hadoop2</artifactId>
Expand Down Expand Up @@ -351,6 +369,48 @@ under the License.
</execution>
</executions>
</plugin>

<!--
Copy batch and streaming examples programs in to the flink-yarn-tests/target/programs
directory to be run during YARN integration tests.
-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>3.0.1</version>
<executions>
<execution>
<id>copy</id>
<phase>process-test-resources</phase>
<goals>
<goal>copy</goal>
</goals>
</execution>
</executions>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.apache.flink</groupId>
<artifactId>flink-examples-batch_${scala.binary.version}</artifactId>
<type>jar</type>
<classifier>WordCount</classifier>
<overWrite>true</overWrite>
<destFileName>BatchWordCount.jar</destFileName>
</artifactItem>
<artifactItem>
<groupId>org.apache.flink</groupId>
<artifactId>flink-examples-streaming_${scala.binary.version}</artifactId>
<type>jar</type>
<classifier>WordCount</classifier>
<overWrite>true</overWrite>
<destFileName>StreamingWordCount.jar</destFileName>
</artifactItem>
</artifactItems>
<outputDirectory>${project.build.directory}/programs</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>true</overWriteSnapshots>
</configuration>
</plugin>
</plugins>
</build>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ public void testClientStartup() {
public void perJobYarnCluster() {
LOG.info("Starting perJobYarnCluster()");
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = YarnTestBase.findFile("..", new ContainsName(new String[] {"-WordCount.jar"} , "streaming")); // exclude streaming wordcount here.
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
runWithArgs(new String[]{"run", "-m", "yarn-cluster",
"-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(),
Expand Down Expand Up @@ -339,7 +339,7 @@ public void perJobYarnClusterWithParallelism() {
// write log messages to stdout as well, so that the runWithArgs() method
// is catching the log output
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = YarnTestBase.findFile("..", new ContainsName(new String[] {"-WordCount.jar"}, "streaming")); // exclude streaming wordcount here.
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
runWithArgs(new String[]{"run",
"-p", "2", //test that the job is executed with a DOP of 2
Expand All @@ -364,9 +364,7 @@ public void perJobYarnClusterWithParallelism() {
public void testDetachedPerJobYarnCluster() {
LOG.info("Starting testDetachedPerJobYarnCluster()");

File exampleJarLocation = YarnTestBase.findFile(
".." + File.separator + "flink-examples" + File.separator + "flink-examples-batch",
new ContainsName(new String[] {"-WordCount.jar"}));
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");

Assert.assertNotNull("Could not find batch wordcount jar", exampleJarLocation);

Expand All @@ -382,9 +380,8 @@ public void testDetachedPerJobYarnCluster() {
public void testDetachedPerJobYarnClusterWithStreamingJob() {
LOG.info("Starting testDetachedPerJobYarnClusterWithStreamingJob()");

File exampleJarLocation = YarnTestBase.findFile(
".." + File.separator + "flink-examples" + File.separator + "flink-examples-streaming",
new ContainsName(new String[] {"-WordCount.jar"}));
File exampleJarLocation = new File("target/programs/StreamingWordCount.jar");

Assert.assertNotNull("Could not find streaming wordcount jar", exampleJarLocation);

testDetachedPerJobYarnClusterInternal(exampleJarLocation.getAbsolutePath());
Expand Down

0 comments on commit 709f23e

Please sign in to comment.