mirror of https://github.com/apache/flink.git
You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
101 lines
4.3 KiB
YAML
101 lines
4.3 KiB
YAML
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
#
|
|
# This file defines an Azure Pipeline build for testing Flink. It is intended to be used
|
|
# with a free Azure Pipelines account.
|
|
# It has the following features:
|
|
# - default builds for pushes / pull requests
|
|
# - end-to-end tests
|
|
#
|
|
#
|
|
# For the "apache/flink" repository, we are using the pipeline definition located in
|
|
# tools/azure-pipelines/build-apache-repo.yml
|
|
# That file points to custom, self-hosted build agents for faster pull request build processing and
|
|
# integration with Flinkbot.
|
|
# The custom pipeline definition file is configured in the "Pipeline settings" screen
|
|
# of the Azure Pipelines web ui.
|
|
#
|
|
|
|
trigger:
|
|
branches:
|
|
include:
|
|
- '*' # must quote since "*" is a YAML reserved character; we want a string
|
|
|
|
resources:
|
|
containers:
|
|
# Container with SSL to have the same environment everywhere.
|
|
# see https://github.com/apache/flink-connector-shared-utils/tree/ci_utils
|
|
- container: flink-build-container
|
|
image: chesnay/flink-ci:java_8_11_17_21_maven_386_jammy
|
|
# On AZP provided machines, set this flag to allow writing coredumps in docker
|
|
options: --privileged
|
|
|
|
# Define variables:
|
|
# - See tools/azure-pipelines/jobs-template.yml for a short summary of the caching
|
|
# - See https://stackoverflow.com/questions/60742105/how-can-i-access-a-secret-value-from-an-azure-pipelines-expression
|
|
# to understand why the secrets are handled like this
|
|
variables:
|
|
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
|
|
E2E_CACHE_FOLDER: $(Pipeline.Workspace)/e2e_cache
|
|
E2E_TARBALL_CACHE: $(Pipeline.Workspace)/e2e_artifact_cache
|
|
MAVEN_ARGS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
|
|
PIPELINE_START_YEAR: $[format('{0:yyyy}', pipeline.startTime)]
|
|
CACHE_KEY: maven | $(Agent.OS) | **/pom.xml, !**/target/**
|
|
CACHE_FALLBACK_KEY: maven | $(Agent.OS)
|
|
DOCKER_IMAGES_CACHE_KEY: docker-images-cache | $(Agent.OS) | **/cache_docker_images.sh | flink-test-utils-parent/**/DockerImageVersions.java
|
|
DOCKER_IMAGES_CACHE_FOLDER: $(Pipeline.Workspace)/.docker-cache
|
|
FLINK_ARTIFACT_DIR: $(Pipeline.Workspace)/flink_artifact
|
|
SECRET_S3_BUCKET: $[variables.IT_CASE_S3_BUCKET]
|
|
SECRET_S3_ACCESS_KEY: $[variables.IT_CASE_S3_ACCESS_KEY]
|
|
SECRET_S3_SECRET_KEY: $[variables.IT_CASE_S3_SECRET_KEY]
|
|
|
|
|
|
stages:
|
|
# CI / PR triggered stage:
|
|
- stage: ci
|
|
displayName: "CI build (custom builders)"
|
|
condition: not(eq(variables['MODE'], 'release'))
|
|
jobs:
|
|
- template: tools/azure-pipelines/jobs-template.yml
|
|
parameters: # see template file for a definition of the parameters.
|
|
stage_name: ci_build
|
|
test_pool_definition:
|
|
vmImage: 'ubuntu-22.04'
|
|
e2e_pool_definition:
|
|
vmImage: 'ubuntu-22.04'
|
|
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
|
|
run_end_to_end: false
|
|
container: flink-build-container
|
|
jdk: 17
|
|
- job: docs_404_check # run on a MSFT provided machine
|
|
pool:
|
|
vmImage: 'ubuntu-22.04'
|
|
steps:
|
|
- task: GoTool@0
|
|
inputs:
|
|
version: '1.18.1'
|
|
- script: ./tools/ci/docs.sh
|
|
# CI / Special stage for release, e.g. building PyFlink wheel packages, etc:
|
|
- stage: ci_release
|
|
displayName: "CI build (release)"
|
|
condition: and(eq(variables['Build.Reason'], 'Manual'), eq(variables['MODE'], 'release'))
|
|
jobs:
|
|
- template: tools/azure-pipelines/build-python-wheels.yml
|
|
parameters:
|
|
stage_name: cron_python_wheels
|
|
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
|
|
container: flink-build-container
|