diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b0de9840bf5..d8834a4fe46 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,11 +29,11 @@ jobs: name: "Pre-compile Checks" uses: ./.github/workflows/template.pre-compile-checks.yml ci: - name: "Default (Java 11)" + name: "Default (Java 17)" uses: ./.github/workflows/template.flink-ci.yml with: - environment: 'PROFILE="-Dinclude_hadoop_aws"' - jdk_version: 11 + environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk17 -Pjava17-target"' + jdk_version: 17 secrets: s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }} s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 3686a29c820..08d27bce06f 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -39,23 +39,12 @@ jobs: s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }} s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }} s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }} - java17: - name: "Java 17" - uses: ./.github/workflows/template.flink-ci.yml - with: - workflow-caller-id: java17 - environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Pjava17-target"' - jdk_version: 17 - secrets: - s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }} - s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }} - s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }} java21: name: "Java 21" uses: ./.github/workflows/template.flink-ci.yml with: workflow-caller-id: java21 - environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Djdk21 -Pjava21-target"' + environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk21 -Pjava21-target"' jdk_version: 21 secrets: s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }} @@ -66,8 +55,8 @@ jobs: uses: ./.github/workflows/template.flink-ci.yml with: workflow-caller-id: hadoop313 - environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3"' - jdk_version: 11 + environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target"' + jdk_version: 17 secrets: s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }} s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }} @@ -77,8 +66,8 @@ jobs: uses: ./.github/workflows/template.flink-ci.yml with: workflow-caller-id: adaptive-scheduler - environment: 'PROFILE="-Penable-adaptive-scheduler"' - jdk_version: 11 + environment: 'PROFILE="-Penable-adaptive-scheduler -Djdk17 -Pjava17-target"' + jdk_version: 17 secrets: s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }} s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }} diff --git a/.github/workflows/template.flink-ci.yml b/.github/workflows/template.flink-ci.yml index 85cc4843125..40e7758e7cc 100644 --- a/.github/workflows/template.flink-ci.yml +++ b/.github/workflows/template.flink-ci.yml @@ -30,7 +30,7 @@ on: type: string jdk_version: description: "The Java version to use." - default: 11 + default: 17 type: number secrets: s3_bucket: diff --git a/.github/workflows/template.pre-compile-checks.yml b/.github/workflows/template.pre-compile-checks.yml index 430d7763d23..3eac3819d4d 100644 --- a/.github/workflows/template.pre-compile-checks.yml +++ b/.github/workflows/template.pre-compile-checks.yml @@ -23,7 +23,7 @@ on: inputs: jdk_version: description: "The JDK version that shall be used as a default within the Flink CI Docker container." - default: "11" + default: "17" type: choice options: ["11", "17", "21"] @@ -31,7 +31,7 @@ on: inputs: jdk_version: description: "The JDK version that shall be used as a default within the Flink CI Docker container." - default: 11 + default: 17 type: number permissions: read-all diff --git a/README.md b/README.md index 7f6d767101c..de7257f2a35 100644 --- a/README.md +++ b/README.md @@ -104,15 +104,46 @@ Prerequisites for building Flink: * Unix-like environment (we use Linux, Mac OS X, Cygwin, WSL) * Git * Maven (we require version 3.8.6) -* Java 11 +* Java (version 11, 17, or 21) + +### Basic Build Instructions + +First, clone the repository: ``` git clone https://github.com/apache/flink.git cd flink -./mvnw clean package -DskipTests # this will take up to 10 minutes ``` -Flink is now installed in `build-target`. +Then, choose one of the following commands based on your preferred Java version: + +**For Java 11** + +``` +./mvnw clean package -DskipTests -Djdk11 -Pjava11-target +``` + +**For Java 17 (Default)** + +``` +./mvnw clean package -DskipTests -Djdk17 -Pjava17-target +``` + +**For Java 21** + +``` +./mvnw clean package -DskipTests -Djdk21 -Pjava21-target +``` + +The build process will take approximately 10 minutes to complete. +Flink will be installed in `build-target`. + +### Notes + +* Make sure your JAVA_HOME environment variable points to the correct JDK version +* The build command uses Maven wrapper (mvnw) which ensures the correct Maven version is used +* The -DskipTests flag skips running tests to speed up the build process +* Each Java version requires its corresponding profile (-Pjava-target) and JDK flag (-Djdk) ## Developing Flink diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 9a97855a70f..ebc7e5289d5 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -76,10 +76,10 @@ stages: vmImage: 'ubuntu-22.04' e2e_pool_definition: vmImage: 'ubuntu-22.04' - environment: PROFILE="-Dflink.hadoop.version=2.10.2" + environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target" run_end_to_end: false container: flink-build-container - jdk: 11 + jdk: 17 - job: docs_404_check # run on a MSFT provided machine pool: vmImage: 'ubuntu-22.04' diff --git a/flink-dist-scala/src/main/resources/META-INF/NOTICE b/flink-dist-scala/src/main/resources/META-INF/NOTICE index df24d4019e1..563a4034d3c 100644 --- a/flink-dist-scala/src/main/resources/META-INF/NOTICE +++ b/flink-dist-scala/src/main/resources/META-INF/NOTICE @@ -10,7 +10,7 @@ This project bundles the following dependencies under the Apache Software Licens The following dependencies all share the same BSD license which you find under licenses/LICENSE.scala. -- org.scala-lang:scala-compiler:2.12.7 -- org.scala-lang:scala-library:2.12.7 -- org.scala-lang:scala-reflect:2.12.7 -- org.scala-lang.modules:scala-xml_2.12:1.0.6 +- org.scala-lang:scala-compiler:2.12.20 +- org.scala-lang:scala-library:2.12.20 +- org.scala-lang:scala-reflect:2.12.20 +- org.scala-lang.modules:scala-xml_2.12:2.3.0 diff --git a/flink-end-to-end-tests/test-scripts/common_docker.sh b/flink-end-to-end-tests/test-scripts/common_docker.sh index 49a0abad354..d4ddbec669b 100644 --- a/flink-end-to-end-tests/test-scripts/common_docker.sh +++ b/flink-end-to-end-tests/test-scripts/common_docker.sh @@ -46,10 +46,10 @@ function build_image() { start_file_server local server_pid=$! - echo "Preparing Dockeriles" + echo "Preparing Dockerfiles" retry_times_with_exponential_backoff 5 git clone https://github.com/apache/flink-docker.git --branch dev-master --single-branch - local java_version=11 + local java_version=17 if [[ ${PROFILE} == *"jdk17"* ]]; then java_version=17 fi diff --git a/flink-rpc/flink-rpc-akka/pom.xml b/flink-rpc/flink-rpc-akka/pom.xml index 2f2f3ef4a65..9660b6ad995 100644 --- a/flink-rpc/flink-rpc-akka/pom.xml +++ b/flink-rpc/flink-rpc-akka/pom.xml @@ -38,8 +38,6 @@ under the License. 1.1.2 - 2.12 - 2.12.16 diff --git a/flink-table/flink-table-planner-loader-bundle/src/main/resources/META-INF/NOTICE b/flink-table/flink-table-planner-loader-bundle/src/main/resources/META-INF/NOTICE index 5e8b1f4c0e5..35ac48aecea 100644 --- a/flink-table/flink-table-planner-loader-bundle/src/main/resources/META-INF/NOTICE +++ b/flink-table/flink-table-planner-loader-bundle/src/main/resources/META-INF/NOTICE @@ -6,6 +6,6 @@ The Apache Software Foundation (http://www.apache.org/). The following dependencies all share the same BSD license which you find under licenses/LICENSE.scala. -- org.scala-lang:scala-compiler:2.12.7 -- org.scala-lang:scala-library:2.12.7 -- org.scala-lang:scala-reflect:2.12.7 +- org.scala-lang:scala-compiler:2.12.20 +- org.scala-lang:scala-library:2.12.20 +- org.scala-lang:scala-reflect:2.12.20 diff --git a/pom.xml b/pom.xml index b04a3aefc03..2d6c9933d1b 100644 --- a/pom.xml +++ b/pom.xml @@ -124,7 +124,8 @@ under the License. 2.15.3 2.7.0 true - 11 + 11 + 17 1.7.36 2.24.1 - 2.12.7 + 2.12.20 2.12 0.7.6 @@ -944,7 +943,7 @@ under the License. scala-2.12 - 2.12.7 + 2.12.20 2.12 @@ -1064,6 +1063,27 @@ under the License. + + java11-target + + + 11 + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 11 + 11 + + + + + + java17 @@ -1071,8 +1091,6 @@ under the License. - - 2.12.15 org.apache.flink.testutils.junit.FailsOnJava17 @@ -1128,11 +1146,6 @@ under the License. [21,) - - - 2.12.18 - - @@ -1386,7 +1399,7 @@ under the License. - 11 + 17 @@ -1418,7 +1431,7 @@ under the License. - [11.0.0,11.1.0) + [${target.java.version}.*) @@ -2075,7 +2088,8 @@ under the License. maven-compiler-plugin 3.8.0 - ${target.java.version} + + ${source.java.version} ${target.java.version} false diff --git a/tools/azure-pipelines/build-apache-repo.yml b/tools/azure-pipelines/build-apache-repo.yml index 5df8ced01c8..9c0c880c453 100644 --- a/tools/azure-pipelines/build-apache-repo.yml +++ b/tools/azure-pipelines/build-apache-repo.yml @@ -69,10 +69,10 @@ stages: name: Default e2e_pool_definition: vmImage: 'ubuntu-22.04' - environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target" + environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target" run_end_to_end: false container: flink-build-container - jdk: 11 + jdk: 17 - job: docs_404_check # run on a MSFT provided machine pool: vmImage: 'ubuntu-22.04' @@ -103,9 +103,9 @@ stages: - template: build-nightly-dist.yml parameters: stage_name: cron_snapshot_deployment - environment: PROFILE="-Djdk11 -Pjava11-target" + environment: PROFILE="-Djdk17 -Pjava17-target" container: flink-build-container - jdk: 11 + jdk: 17 - template: jobs-template.yml parameters: stage_name: cron_azure @@ -113,10 +113,10 @@ stages: vmImage: 'ubuntu-22.04' e2e_pool_definition: vmImage: 'ubuntu-22.04' - environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target" + environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target" run_end_to_end: true container: flink-build-container - jdk: 11 + jdk: 17 - template: jobs-template.yml parameters: stage_name: cron_hadoop313 @@ -124,21 +124,21 @@ stages: name: Default e2e_pool_definition: vmImage: 'ubuntu-22.04' - environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk11 -Pjava11-target" + environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target" run_end_to_end: true container: flink-build-container - jdk: 11 + jdk: 17 - template: jobs-template.yml parameters: - stage_name: cron_jdk17 + stage_name: cron_jdk11 test_pool_definition: name: Default e2e_pool_definition: vmImage: 'ubuntu-22.04' - environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Pjava17-target" + environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target" run_end_to_end: true container: flink-build-container - jdk: 17 + jdk: 11 - template: jobs-template.yml parameters: stage_name: cron_jdk21 @@ -146,7 +146,7 @@ stages: name: Default e2e_pool_definition: vmImage: 'ubuntu-22.04' - environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Djdk21 -Pjava21-target" + environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk21 -Pjava21-target" run_end_to_end: true container: flink-build-container jdk: 21 @@ -157,10 +157,10 @@ stages: name: Default e2e_pool_definition: vmImage: 'ubuntu-22.04' - environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk11 -Pjava11-target" + environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk17 -Pjava17-target" run_end_to_end: true container: flink-build-container - jdk: 11 + jdk: 17 - job: docs_404_check # run on a MSFT provided machine pool: vmImage: 'ubuntu-22.04' @@ -172,5 +172,5 @@ stages: - template: build-python-wheels.yml parameters: stage_name: cron_python_wheels - environment: PROFILE="-Dflink.hadoop.version=2.10.2" + environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target" container: flink-build-container