[FLINK-36181] Use Java 17 by default (#25898)

* [FLINK-36181] Set CI to use Java 17 by default and drop Java 8

* [FLINK-36181] Update main POM to use Java 17

* [FLINK-36181] Bump Scala to 2.12.20

This is a breaking change for Flink 2.0, and allows us to next remove the specific Scala versions for both Java 17 and 21

* [FLINK-36181] Build Dockerfiles with Java 17 by default

* [FLINK-36181] Remove Scala version bump for Java11 and Java17 profiles

Since we've bumped Scala all together, we no longer need to bump it for these profiles

* [FLINK-36181] Add Java11 target profile

* [FLINK-36181] Set CI to have a dedicated Java11 cron

* [FLINK-36181] Update README

* [FLINK-36181] Make sure that JDK11 APIs can be used

* [FLINK-36181] Improve README

* [FLINK-36181] Address review comment on configurable Java version

* [FLINK-36181] Remove no longer required specific Scala parameters
pull/25991/head
Martijn Visser 1 week ago committed by GitHub
parent 8af0259247
commit 997b48340d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -29,11 +29,11 @@ jobs:
name: "Pre-compile Checks"
uses: ./.github/workflows/template.pre-compile-checks.yml
ci:
name: "Default (Java 11)"
name: "Default (Java 17)"
uses: ./.github/workflows/template.flink-ci.yml
with:
environment: 'PROFILE="-Dinclude_hadoop_aws"'
jdk_version: 11
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk17 -Pjava17-target"'
jdk_version: 17
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}

@ -39,23 +39,12 @@ jobs:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
java17:
name: "Java 17"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: java17
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Pjava17-target"'
jdk_version: 17
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
java21:
name: "Java 21"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: java21
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Djdk21 -Pjava21-target"'
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk21 -Pjava21-target"'
jdk_version: 21
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
@ -66,8 +55,8 @@ jobs:
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: hadoop313
environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3"'
jdk_version: 11
environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target"'
jdk_version: 17
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
@ -77,8 +66,8 @@ jobs:
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: adaptive-scheduler
environment: 'PROFILE="-Penable-adaptive-scheduler"'
jdk_version: 11
environment: 'PROFILE="-Penable-adaptive-scheduler -Djdk17 -Pjava17-target"'
jdk_version: 17
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}

@ -30,7 +30,7 @@ on:
type: string
jdk_version:
description: "The Java version to use."
default: 11
default: 17
type: number
secrets:
s3_bucket:

@ -23,7 +23,7 @@ on:
inputs:
jdk_version:
description: "The JDK version that shall be used as a default within the Flink CI Docker container."
default: "11"
default: "17"
type: choice
options: ["11", "17", "21"]
@ -31,7 +31,7 @@ on:
inputs:
jdk_version:
description: "The JDK version that shall be used as a default within the Flink CI Docker container."
default: 11
default: 17
type: number
permissions: read-all

@ -104,15 +104,46 @@ Prerequisites for building Flink:
* Unix-like environment (we use Linux, Mac OS X, Cygwin, WSL)
* Git
* Maven (we require version 3.8.6)
* Java 11
* Java (version 11, 17, or 21)
### Basic Build Instructions
First, clone the repository:
```
git clone https://github.com/apache/flink.git
cd flink
./mvnw clean package -DskipTests # this will take up to 10 minutes
```
Flink is now installed in `build-target`.
Then, choose one of the following commands based on your preferred Java version:
**For Java 11**
```
./mvnw clean package -DskipTests -Djdk11 -Pjava11-target
```
**For Java 17 (Default)**
```
./mvnw clean package -DskipTests -Djdk17 -Pjava17-target
```
**For Java 21**
```
./mvnw clean package -DskipTests -Djdk21 -Pjava21-target
```
The build process will take approximately 10 minutes to complete.
Flink will be installed in `build-target`.
### Notes
* Make sure your JAVA_HOME environment variable points to the correct JDK version
* The build command uses Maven wrapper (mvnw) which ensures the correct Maven version is used
* The -DskipTests flag skips running tests to speed up the build process
* Each Java version requires its corresponding profile (-Pjava<version>-target) and JDK flag (-Djdk<version>)
## Developing Flink

@ -76,10 +76,10 @@ stages:
vmImage: 'ubuntu-22.04'
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
run_end_to_end: false
container: flink-build-container
jdk: 11
jdk: 17
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'

@ -10,7 +10,7 @@ This project bundles the following dependencies under the Apache Software Licens
The following dependencies all share the same BSD license which you find under licenses/LICENSE.scala.
- org.scala-lang:scala-compiler:2.12.7
- org.scala-lang:scala-library:2.12.7
- org.scala-lang:scala-reflect:2.12.7
- org.scala-lang.modules:scala-xml_2.12:1.0.6
- org.scala-lang:scala-compiler:2.12.20
- org.scala-lang:scala-library:2.12.20
- org.scala-lang:scala-reflect:2.12.20
- org.scala-lang.modules:scala-xml_2.12:2.3.0

@ -46,10 +46,10 @@ function build_image() {
start_file_server
local server_pid=$!
echo "Preparing Dockeriles"
echo "Preparing Dockerfiles"
retry_times_with_exponential_backoff 5 git clone https://github.com/apache/flink-docker.git --branch dev-master --single-branch
local java_version=11
local java_version=17
if [[ ${PROFILE} == *"jdk17"* ]]; then
java_version=17
fi

@ -38,8 +38,6 @@ under the License.
<properties>
<pekko.version>1.1.2</pekko.version>
<scala.binary.version>2.12</scala.binary.version>
<scala.version>2.12.16</scala.version>
</properties>
<dependencies>

@ -6,6 +6,6 @@ The Apache Software Foundation (http://www.apache.org/).
The following dependencies all share the same BSD license which you find under licenses/LICENSE.scala.
- org.scala-lang:scala-compiler:2.12.7
- org.scala-lang:scala-library:2.12.7
- org.scala-lang:scala-reflect:2.12.7
- org.scala-lang:scala-compiler:2.12.20
- org.scala-lang:scala-library:2.12.20
- org.scala-lang:scala-reflect:2.12.20

@ -124,7 +124,8 @@ under the License.
<flink.shaded.jackson.version>2.15.3</flink.shaded.jackson.version>
<flink.shaded.jsonpath.version>2.7.0</flink.shaded.jsonpath.version>
<flink.markBundledAsOptional>true</flink.markBundledAsOptional>
<target.java.version>11</target.java.version>
<source.java.version>11</source.java.version>
<target.java.version>17</target.java.version>
<slf4j.version>1.7.36</slf4j.version>
<log4j.version>2.24.1</log4j.version>
<!-- Overwrite default values from parent pom.
@ -133,9 +134,7 @@ under the License.
<maven.compiler.source>${target.java.version}</maven.compiler.source>
<maven.compiler.target>${target.java.version}</maven.compiler.target>
<scala.macros.version>2.1.1</scala.macros.version>
<!-- Default scala versions, must be overwritten by build profiles, so we set something
invalid here -->
<scala.version>2.12.7</scala.version>
<scala.version>2.12.20</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<chill.version>0.7.6</chill.version>
<!-- keep FlinkTestcontainersConfigurator.configureZookeeperContainer in sync -->
@ -944,7 +943,7 @@ under the License.
<profile>
<id>scala-2.12</id>
<properties>
<scala.version>2.12.7</scala.version>
<scala.version>2.12.20</scala.version>
<scala.binary.version>2.12</scala.binary.version>
</properties>
<activation>
@ -1064,6 +1063,27 @@ under the License.
</build>
</profile>
<profile>
<id>java11-target</id>
<properties>
<target.java.version>11</target.java.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>java17</id>
<activation>
@ -1071,8 +1091,6 @@ under the License.
</activation>
<properties>
<!-- Bump Scala because 2.12.7 doesn't compile on Java 17. -->
<scala.version>2.12.15</scala.version>
<surefire.excludedGroups.jdk>org.apache.flink.testutils.junit.FailsOnJava17</surefire.excludedGroups.jdk>
</properties>
@ -1128,11 +1146,6 @@ under the License.
<jdk>[21,)</jdk>
</activation>
<properties>
<!-- Bump Scala because before 2.12.18 doesn't compile on Java 21. -->
<scala.version>2.12.18</scala.version>
</properties>
<build>
<pluginManagement>
<plugins>
@ -1386,7 +1399,7 @@ under the License.
</property>
</activation>
<properties>
<target.java.version>11</target.java.version>
<target.java.version>17</target.java.version>
</properties>
<build>
<plugins>
@ -1418,7 +1431,7 @@ under the License.
<!-- versions for certain build tools are enforced to match the CI setup -->
<!-- the rules below should stay in sync with Flink Release wiki documentation and the CI scripts -->
<requireJavaVersion>
<version>[11.0.0,11.1.0)</version>
<version>[${target.java.version}.*)</version>
</requireJavaVersion>
</rules>
</configuration>
@ -2075,7 +2088,8 @@ under the License.
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>${target.java.version}</source>
<!-- Make sure that we only use Java 11 compatible APIs -->
<source>${source.java.version}</source>
<target>${target.java.version}</target>
<!-- The semantics of this option are reversed, see MCOMPILER-209. -->
<useIncrementalCompilation>false</useIncrementalCompilation>

@ -69,10 +69,10 @@ stages:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
run_end_to_end: false
container: flink-build-container
jdk: 11
jdk: 17
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
@ -103,9 +103,9 @@ stages:
- template: build-nightly-dist.yml
parameters:
stage_name: cron_snapshot_deployment
environment: PROFILE="-Djdk11 -Pjava11-target"
environment: PROFILE="-Djdk17 -Pjava17-target"
container: flink-build-container
jdk: 11
jdk: 17
- template: jobs-template.yml
parameters:
stage_name: cron_azure
@ -113,10 +113,10 @@ stages:
vmImage: 'ubuntu-22.04'
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
jdk: 11
jdk: 17
- template: jobs-template.yml
parameters:
stage_name: cron_hadoop313
@ -124,21 +124,21 @@ stages:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk11 -Pjava11-target"
environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
jdk: 11
jdk: 17
- template: jobs-template.yml
parameters:
stage_name: cron_jdk17
stage_name: cron_jdk11
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Pjava17-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
run_end_to_end: true
container: flink-build-container
jdk: 17
jdk: 11
- template: jobs-template.yml
parameters:
stage_name: cron_jdk21
@ -146,7 +146,7 @@ stages:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Djdk21 -Pjava21-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk21 -Pjava21-target"
run_end_to_end: true
container: flink-build-container
jdk: 21
@ -157,10 +157,10 @@ stages:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk11 -Pjava11-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
jdk: 11
jdk: 17
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
@ -172,5 +172,5 @@ stages:
- template: build-python-wheels.yml
parameters:
stage_name: cron_python_wheels
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
container: flink-build-container

Loading…
Cancel
Save