Skip to content

Commit

Permalink
Add JDK 11 to CI build, disable Spark 2.4 with JDK 11 (apache#1168)
Browse files Browse the repository at this point in the history
Co-authored-by: Fokko Driesprong <[email protected]>
  • Loading branch information
rdblue and Fokko authored Jul 6, 2020
1 parent 92ae3d3 commit 6bfa47d
Show file tree
Hide file tree
Showing 4 changed files with 136 additions and 113 deletions.
13 changes: 12 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,20 @@ dist: bionic

matrix:
include:
- language: java
jdk: openjdk11
name: java11
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/

- language: java
jdk: openjdk8
name: java
name: java8
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
Expand Down
221 changes: 113 additions & 108 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,14 @@ plugins {
id 'nebula.dependency-lock' version '9.0.0'
}

if (JavaVersion.current() != JavaVersion.VERSION_1_8) {
throw new GradleException("This build must be run with Java 8")
if (JavaVersion.current() == JavaVersion.VERSION_1_8) {
project.ext.jdkVersion = '8'
} else if (JavaVersion.current() == JavaVersion.VERSION_11) {
project.ext.jdkVersion = '11'
} else {
throw new GradleException("This build must be run with JDK 8 or 11")
}

apply from: 'jmh.gradle'

dependencyRecommendations {
propertiesFile file: file('versions.props')
}
Expand Down Expand Up @@ -136,7 +138,7 @@ project(':iceberg-bundled-guava') {

tasks.jar.dependsOn tasks.shadowJar

dependencies {
dependencies {
compileOnly('com.google.guava:guava') {
exclude group: 'com.google.code.findbugs'
// may be LGPL - use ALv2 findbugs-annotations instead
Expand Down Expand Up @@ -185,7 +187,7 @@ project(':iceberg-common') {
compile project(path: ':iceberg-bundled-guava', configuration: 'shadow')
}
}

project(':iceberg-core') {
dependencies {
compile project(':iceberg-api')
Expand Down Expand Up @@ -462,56 +464,122 @@ project(':iceberg-spark') {
}
}

project(':iceberg-spark2') {
if (jdkVersion == '8') {
apply from: 'jmh.gradle'

configurations.all {
resolutionStrategy {
// Spark 2.4.4 can only use the below datanucleus version, the versions introduced
// by Hive 2.3.6 will meet lots of unexpected issues, so here force to use the versions
// introduced by Hive 1.2.1.
force 'org.datanucleus:datanucleus-api-jdo:3.2.6'
force 'org.datanucleus:datanucleus-core:3.2.10'
force 'org.datanucleus:datanucleus-rdbms:3.2.9'
project(':iceberg-spark2') {
configurations.all {
resolutionStrategy {
// Spark 2.4.4 can only use the below datanucleus version, the versions introduced
// by Hive 2.3.6 will meet lots of unexpected issues, so here force to use the versions
// introduced by Hive 1.2.1.
force 'org.datanucleus:datanucleus-api-jdo:3.2.6'
force 'org.datanucleus:datanucleus-core:3.2.10'
force 'org.datanucleus:datanucleus-rdbms:3.2.9'
}
}
}

dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile project(':iceberg-core')
compile project(':iceberg-data')
compile project(':iceberg-orc')
compile project(':iceberg-parquet')
compile project(':iceberg-arrow')
compile project(':iceberg-hive')
compile project(':iceberg-spark')
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile project(':iceberg-core')
compile project(':iceberg-data')
compile project(':iceberg-orc')
compile project(':iceberg-parquet')
compile project(':iceberg-arrow')
compile project(':iceberg-hive')
compile project(':iceberg-spark')

compileOnly "org.apache.avro:avro"
compileOnly("org.apache.spark:spark-hive_2.11") {
exclude group: 'org.apache.avro', module: 'avro'
}

compileOnly "org.apache.avro:avro"
compileOnly("org.apache.spark:spark-hive_2.11") {
exclude group: 'org.apache.avro', module: 'avro'
testCompile project(path: ':iceberg-spark', configuration: 'testArtifacts')

testCompile "org.apache.hadoop:hadoop-hdfs::tests"
testCompile "org.apache.hadoop:hadoop-common::tests"
testCompile("org.apache.hadoop:hadoop-minicluster") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile project(path: ':iceberg-hive', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
}

testCompile project(path: ':iceberg-spark', configuration: 'testArtifacts')
test {
// For vectorized reads
// Allow unsafe memory access to avoid the costly check arrow does to check if index is within bounds
systemProperty("arrow.enable_unsafe_memory_access", "true")
// Disable expensive null check for every get(index) call.
// Iceberg manages nullability checks itself instead of relying on arrow.
systemProperty("arrow.enable_null_check_for_get", "false")

testCompile "org.apache.hadoop:hadoop-hdfs::tests"
testCompile "org.apache.hadoop:hadoop-common::tests"
testCompile("org.apache.hadoop:hadoop-minicluster") {
exclude group: 'org.apache.avro', module: 'avro'
// Vectorized reads need more memory
maxHeapSize '2500m'
}
testCompile project(path: ':iceberg-hive', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
}

test {
// For vectorized reads
// Allow unsafe memory access to avoid the costly check arrow does to check if index is within bounds
systemProperty("arrow.enable_unsafe_memory_access", "true")
// Disable expensive null check for every get(index) call.
// Iceberg manages nullability checks itself instead of relying on arrow.
systemProperty("arrow.enable_null_check_for_get", "false")
// the runtime jar is a self-contained artifact for testing in a notebook
project(':iceberg-spark-runtime') {
apply plugin: 'com.github.johnrengelman.shadow'

// Vectorized reads need more memory
maxHeapSize '2500m'
tasks.jar.dependsOn tasks.shadowJar

configurations {
compile {
exclude group: 'org.apache.spark'
// included in Spark
exclude group: 'org.slf4j'
exclude group: 'org.apache.commons'
exclude group: 'commons-pool'
exclude group: 'commons-codec'
exclude group: 'org.xerial.snappy'
exclude group: 'javax.xml.bind'
exclude group: 'javax.annotation'
}
}

dependencies {
compile project(':iceberg-spark2')
compile 'org.apache.spark:spark-hive_2.11'
}

shadowJar {
configurations = [project.configurations.compile]

zip64 true

// include the LICENSE and NOTICE files for the shaded Jar
from(projectDir) {
include 'LICENSE'
include 'NOTICE'
}

// Relocate dependencies to avoid conflicts
relocate 'com.google', 'org.apache.iceberg.shaded.com.google'
relocate 'com.fasterxml', 'org.apache.iceberg.shaded.com.fasterxml'
relocate 'com.github.benmanes', 'org.apache.iceberg.shaded.com.github.benmanes'
relocate 'org.checkerframework', 'org.apache.iceberg.shaded.org.checkerframework'
relocate 'org.apache.avro', 'org.apache.iceberg.shaded.org.apache.avro'
relocate 'avro.shaded', 'org.apache.iceberg.shaded.org.apache.avro.shaded'
relocate 'com.thoughtworks.paranamer', 'org.apache.iceberg.shaded.com.thoughtworks.paranamer'
relocate 'org.apache.parquet', 'org.apache.iceberg.shaded.org.apache.parquet'
relocate 'shaded.parquet', 'org.apache.iceberg.shaded.org.apache.parquet.shaded'
// relocate Avro's jackson dependency to share parquet-jackson locations
relocate 'org.codehaus.jackson', 'org.apache.iceberg.shaded.org.apache.parquet.shaded.org.codehaus.jackson'
relocate 'org.apache.orc', 'org.apache.iceberg.shaded.org.apache.orc'
relocate 'io.airlift', 'org.apache.iceberg.shaded.io.airlift'
// relocate Arrow and related deps to shade Iceberg specific version
relocate 'io.netty.buffer', 'org.apache.iceberg.shaded.io.netty.buffer'
relocate 'org.apache.arrow', 'org.apache.iceberg.shaded.org.apache.arrow'
relocate 'com.carrotsearch', 'org.apache.iceberg.shaded.com.carrotsearch'

classifier null
}

jar {
classifier = 'empty'
}
}
}

Expand Down Expand Up @@ -582,69 +650,6 @@ project(':iceberg-pig') {
}
}

// the runtime jar is a self-contained artifact for testing in a notebook
project(':iceberg-spark-runtime') {
apply plugin: 'com.github.johnrengelman.shadow'

tasks.jar.dependsOn tasks.shadowJar

configurations {
compile {
exclude group: 'org.apache.spark'
// included in Spark
exclude group: 'org.slf4j'
exclude group: 'org.apache.commons'
exclude group: 'commons-pool'
exclude group: 'commons-codec'
exclude group: 'org.xerial.snappy'
exclude group: 'javax.xml.bind'
exclude group: 'javax.annotation'
}
}

dependencies {
compile project(':iceberg-spark2')
compile 'org.apache.spark:spark-hive_2.11'
}

shadowJar {
configurations = [project.configurations.compile]

zip64 true

// include the LICENSE and NOTICE files for the shaded Jar
from(projectDir) {
include 'LICENSE'
include 'NOTICE'
}

// Relocate dependencies to avoid conflicts
relocate 'com.google', 'org.apache.iceberg.shaded.com.google'
relocate 'com.fasterxml', 'org.apache.iceberg.shaded.com.fasterxml'
relocate 'com.github.benmanes', 'org.apache.iceberg.shaded.com.github.benmanes'
relocate 'org.checkerframework', 'org.apache.iceberg.shaded.org.checkerframework'
relocate 'org.apache.avro', 'org.apache.iceberg.shaded.org.apache.avro'
relocate 'avro.shaded', 'org.apache.iceberg.shaded.org.apache.avro.shaded'
relocate 'com.thoughtworks.paranamer', 'org.apache.iceberg.shaded.com.thoughtworks.paranamer'
relocate 'org.apache.parquet', 'org.apache.iceberg.shaded.org.apache.parquet'
relocate 'shaded.parquet', 'org.apache.iceberg.shaded.org.apache.parquet.shaded'
// relocate Avro's jackson dependency to share parquet-jackson locations
relocate 'org.codehaus.jackson', 'org.apache.iceberg.shaded.org.apache.parquet.shaded.org.codehaus.jackson'
relocate 'org.apache.orc', 'org.apache.iceberg.shaded.org.apache.orc'
relocate 'io.airlift', 'org.apache.iceberg.shaded.io.airlift'
// relocate Arrow and related deps to shade Iceberg specific version
relocate 'io.netty.buffer', 'org.apache.iceberg.shaded.io.netty.buffer'
relocate 'org.apache.arrow', 'org.apache.iceberg.shaded.org.apache.arrow'
relocate 'com.carrotsearch', 'org.apache.iceberg.shaded.com.carrotsearch'

classifier null
}

jar {
classifier = 'empty'
}
}

@Memoized
boolean isVersionFileExists() {
return file('version.txt').exists()
Expand Down
4 changes: 4 additions & 0 deletions deploy.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@
* under the License.
*/

if (project.hasProperty('release') && jdkVersion != JavaVersion.VERSION_1_8) {
throw new GradleException("Releases must be built with Java 8")
}

subprojects {
apply plugin: 'maven' // make pom files for deployment
apply plugin: 'maven-publish'
Expand Down
11 changes: 7 additions & 4 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,8 @@ include 'arrow'
include 'parquet'
include 'bundled-guava'
include 'spark'
include 'spark2'
include 'spark3'
include 'spark3-runtime'
include 'spark-runtime'
include 'pig'
include 'hive'

Expand All @@ -47,10 +45,15 @@ project(':arrow').name = 'iceberg-arrow'
project(':parquet').name = 'iceberg-parquet'
project(':bundled-guava').name = 'iceberg-bundled-guava'
project(':spark').name = 'iceberg-spark'
project(':spark2').name = 'iceberg-spark2'
project(':spark-runtime').name = 'iceberg-spark-runtime'
project(':spark3').name = 'iceberg-spark3'
project(':spark3-runtime').name = 'iceberg-spark3-runtime'
project(':pig').name = 'iceberg-pig'
project(':hive').name = 'iceberg-hive'

if (JavaVersion.current() == JavaVersion.VERSION_1_8) {
include 'spark2'
include 'spark-runtime'

project(':spark2').name = 'iceberg-spark2'
project(':spark-runtime').name = 'iceberg-spark-runtime'
}

0 comments on commit 6bfa47d

Please sign in to comment.