Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/apache/beam into users/da…
Browse files Browse the repository at this point in the history
…mccorm/periodicImpulseDrain
  • Loading branch information
damccorm committed Nov 4, 2022
2 parents 950d8a6 + 7da182a commit a6381d6
Show file tree
Hide file tree
Showing 376 changed files with 28,210 additions and 3,297 deletions.
1 change: 1 addition & 0 deletions .asf.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ github:
collaborators:
- pcoet
- olehborysevych
- rshamunov

enabled_merge_buttons:
squash: true
Expand Down
1 change: 0 additions & 1 deletion .github/REVIEWERS.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ labels:
- ryanthompson591
- tvalentyn
- pabloem
- y1chi
exclusionList: []
- name: Java
reviewers:
Expand Down
1 change: 1 addition & 0 deletions .github/codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ ignore:
- "**/*_test_py3*.py"
- "**/*_microbenchmark.py"
- "sdks/go/pkg/beam/register/register.go"
- "sdks/python/apache_beam/testing/benchmarks/nexmark/**"

# See https://docs.codecov.com/docs/flags for options.
flag_management:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_playground_backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
steps:
- name: Check out the repo
uses: actions/checkout@v3
- uses: actions/setup-java@v3
- uses: actions/setup-java@v3.6.0
with:
distribution: 'zulu'
java-version: '8'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_playground_frontend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
steps:
- name: Check out the repo
uses: actions/checkout@v3
- uses: actions/setup-java@v3
- uses: actions/setup-java@v3.6.0
with:
distribution: 'zulu'
java-version: '8'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/java_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ jobs:
project_id: ${{ secrets.GCP_PROJECT_ID }}
export_default_credentials: true
- name: Set Java Version
uses: actions/setup-java@v3
uses: actions/setup-java@v3.6.0
with:
distribution: 'zulu'
java-version: 8
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/playground_deploy_examples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: '3.8'
- uses: actions/setup-java@v3
- uses: actions/setup-java@v3.6.0
with:
distribution: 'zulu'
java-version: '8'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/playground_examples_ci_reusable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: '3.8'
- uses: actions/setup-java@v3
- uses: actions/setup-java@v3.6.0
with:
distribution: 'zulu'
java-version: '8'
Expand Down
7 changes: 3 additions & 4 deletions .github/workflows/run_rc_validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,7 @@ jobs:
git config user.name $GITHUB_ACTOR
git config user.email actions@"$RUNNER_NAME".local
- name: Verify working branch name
run:
- sh ./ci_check_git_branch.sh $WORKING_BRANCH
run: ./scripts/ci/ci_check_git_branch.sh $WORKING_BRANCH
- name: Create Pull Request
run: |
git checkout -b ${{env.WORKING_BRANCH}} ${{ env.RC_TAG }} --quiet
Expand Down Expand Up @@ -121,7 +120,7 @@ jobs:


- name: Setup Java JDK
uses: actions/setup-java@v3.5.1
uses: actions/setup-java@v3.6.0
with:
distribution: 'temurin'
java-version: 11
Expand Down Expand Up @@ -188,7 +187,7 @@ jobs:
uses: azure/setup-kubectl@v3

- name: Setup Java JDK
uses: actions/setup-java@v3.5.1
uses: actions/setup-java@v3.6.0
with:
distribution: 'temurin'
java-version: 11
Expand Down
54 changes: 36 additions & 18 deletions .github/workflows/tour_of_beam_backend_integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,15 @@ on:
push:
branches: ['master', 'release-*']
tags: 'v*'
paths: ['learning/tour-of-beam/backend/**']
paths:
- 'learning/tour-of-beam/backend/**'
- 'playground/backend/**'
pull_request:
branches: ['master', 'release-*']
tags: 'v*'
paths: ['learning/tour-of-beam/backend/**']
paths:
- 'learning/tour-of-beam/backend/**'
- 'playground/backend/**'

# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
Expand All @@ -36,12 +40,23 @@ concurrency:

env:
TOB_LEARNING_ROOT: ./samples/learning-content
DATASTORE_PROJECT_ID: test-proj
# firebase
GOOGLE_CLOUD_PROJECT: demo-test-proj
FIREBASE_AUTH_EMULATOR_HOST: localhost:9099
# datastore
DATASTORE_PROJECT_ID: demo-test-proj
DATASTORE_EMULATOR_HOST: localhost:8081
DATASTORE_EMULATOR_DATADIR: ./datadir
# playground API
PLAYGROUND_ROUTER_HOST: localhost:8000

# GCF
PORT_SDK_LIST: 8801
PORT_GET_CONTENT_TREE: 8802
PORT_GET_UNIT_CONTENT: 8803
PORT_GET_USER_PROGRESS: 8804
PORT_POST_UNIT_COMPLETE: 8805
PORT_POST_USER_CODE: 8806


jobs:
Expand All @@ -56,20 +71,12 @@ jobs:
with:
# pin to the biggest Go version supported by Cloud Functions runtime
go-version: '1.16'

# 1. Datastore emulator
- name: 'Set up Cloud SDK'
uses: 'google-github-actions/setup-gcloud@v0'
with:
version: 397.0.0
project_id: ${{ env.DATASTORE_PROJECT_ID }}
install_components: 'beta,cloud-datastore-emulator'
- name: 'Start datastore emulator'
run: |
gcloud beta emulators datastore start \
--data-dir=${{ env.DATASTORE_EMULATOR_DATADIR }} \
--host-port=${{ env.DATASTORE_EMULATOR_HOST }} \
--consistency=1 &
- name: Build Playground router image
run: ./gradlew playground:backend:containers:router:docker
working-directory: ${{ env.GITHUB_WORKSPACE }}
# 1. Start emulators
- name: Start emulators
run: docker-compose up -d

# 2. start function-framework processes in BG
- name: Compile CF
Expand All @@ -80,14 +87,25 @@ jobs:
run: PORT=${{ env.PORT_GET_CONTENT_TREE }} FUNCTION_TARGET=getContentTree ./tob_function &
- name: Run getUnitContent in background
run: PORT=${{ env.PORT_GET_UNIT_CONTENT }} FUNCTION_TARGET=getUnitContent ./tob_function &
- name: Run getUserProgress in background
run: PORT=${{ env.PORT_GET_USER_PROGRESS }} FUNCTION_TARGET=getUserProgress ./tob_function &
- name: Run postUnitComplete in background
run: PORT=${{ env.PORT_POST_UNIT_COMPLETE }} FUNCTION_TARGET=postUnitComplete ./tob_function &
- name: Run postUserCode in background
run: PORT=${{ env.PORT_POST_USER_CODE }} FUNCTION_TARGET=postUserCode ./tob_function &

# 3. Load data in datastore: run CD step on samples/learning-content
- name: Run CI/CD to populate datastore
run: go run cmd/ci_cd/ci_cd.go

# 4. Check sdkList, getContentTree, getUnitContent: run integration tests
# 4. run integration tests
- name: Go integration tests
run: go test -v --tags integration ./integration_tests/...

- name: Stop emulators
if: always()
run: docker-compose down

# 5. Compare storage/datastore/index.yml VS generated
- name: Check index.yaml
run: |
Expand Down
5 changes: 2 additions & 3 deletions .github/workflows/verify_release_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,8 @@ jobs:
RELEASE_VER: ${{ github.event.inputs.RELEASE_VER }}
steps:
- name: Verify branch name
run:
- sh ./ci_check_git_branch.sh $WORKING_BRANCH
working-directory: 'scripts/ci'
run: ./scripts/ci/ci_check_git_branch.sh $WORKING_BRANCH

- name: Set RELEASE_BRANCH env variable
run: |
RELEASE_BRANCH=release-${{env.RELEASE_VER}}
Expand Down
4 changes: 2 additions & 2 deletions .test-infra/jenkins/Flink.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
*/

class Flink {
private static final String flinkDownloadUrl = 'https://archive.apache.org/dist/flink/flink-1.12.3/flink-1.12.3-bin-scala_2.11.tgz'
private static final String flinkDownloadUrl = 'https://archive.apache.org/dist/flink/flink-1.13.6/flink-1.13.6-bin-scala_2.12.tgz'
private static final String hadoopDownloadUrl = 'https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-10.0/flink-shaded-hadoop-2-uber-2.8.3-10.0.jar'
private static final String FLINK_DIR = '"$WORKSPACE/src/.test-infra/dataproc"'
private static final String FLINK_SCRIPT = 'flink_cluster.sh'
Expand Down Expand Up @@ -75,7 +75,7 @@ class Flink {
}

/**
* Updates the number of worker nodes in a cluster.
* Updates the number of worker nodes in a cluster.
*
* @param workerCount - the new number of worker nodes in the cluster
*/
Expand Down
7 changes: 4 additions & 3 deletions .test-infra/jenkins/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,9 @@ Beam Jenkins overview page: [link](https://ci-beam.apache.org/)
| beam_PerformanceTests_AvroIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_AvroIOIT/), [hdfs_cron](https://ci-beam.apache.org/job/beam_PerformanceTests_AvroIOIT_HDFS/) | `Run Java AvroIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_AvroIOIT/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_AvroIOIT) [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_AvroIOIT_HDFS/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_AvroIOIT_HDFS) |
| beam_PerformanceTests_BiqQueryIO_Read_Python | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Read_Python/), [phrase](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_BiqQueryIO_Read_Python_PR/) | `Run BigQueryIO Read Performance Test Python` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Read_Python/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Read_Python) |
| beam_PerformanceTests_BiqQueryIO_Write_Python_Batch | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch/), [phrase](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch_PR/) | `Run BigQueryIO Write Performance Test Python Batch` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch) |
| beam_BiqQueryIO_Batch_Performance_Test_Java_Avro | [cron](https://ci-beam.apache.org/job/beam_BiqQueryIO_Batch_Performance_Test_Java_Avro/) | `Run BigQueryIO Batch Performance Test Java Avro` | [![Build Status](https://ci-beam.apache.org/job/beam_BiqQueryIO_Batch_Performance_Test_Java_Avro/badge/icon)](https://ci-beam.apache.org/job/beam_BiqQueryIO_Batch_Performance_Test_Java_Avro/) |
| beam_BiqQueryIO_Batch_Performance_Test_Java_Json | [cron](https://ci-beam.apache.org/job/beam_BiqQueryIO_Batch_Performance_Test_Java_Json/) | `Run BigQueryIO Batch Performance Test Java Json` | [![Build Status](https://ci-beam.apache.org/job/beam_BiqQueryIO_Batch_Performance_Test_Java_Json/badge/icon)](https://ci-beam.apache.org/job/beam_BiqQueryIO_Batch_Performance_Test_Java_Json/) |
| beam_BiqQueryIO_Streaming_Performance_Test_Java | [cron](https://ci-beam.apache.org/job/beam_BiqQueryIO_Streaming_Performance_Test_Java/) | `Run BigQueryIO Streaming Performance Test Java` | [![Build Status](https://ci-beam.apache.org/job/beam_BiqQueryIO_Streaming_Performance_Test_Java/badge/icon)](https://ci-beam.apache.org/job/beam_BiqQueryIO_Streaming_Performance_Test_Java/) |
| beam_PerformanceTests_BiqQueryIO_Batch_Java_Avro | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Batch_Java_Avro/) | `Run BigQueryIO Batch Performance Test Java Avro` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Batch_Java_Avro/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Batch_Java_Avro/) |
| beam_PerformanceTests_BiqQueryIO_Batch_Java_Json | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Batch_Java_Json/) | `Run BigQueryIO Batch Performance Test Java Json` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Batch_Java_Json/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Batch_Java_Json/) |
| beam_PerformanceTests_BiqQueryIO_Streaming_Java | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Streaming_Java/) | `Run BigQueryIO Streaming Performance Test Java` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Streaming_Java/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_BiqQueryIO_Streaming_Java/) |
| beam_PerformanceTests_Cdap | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_Cdap/) | `Run Java CdapIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_Cdap/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_Cdap) |
| beam_PerformanceTests_Compressed_TextIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT/), [hdfs_cron](https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/) | `Run Java CompressedTextIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT) [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS) |
| beam_PerformanceTests_HadoopFormat | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_HadoopFormat/) | `Run Java HadoopFormatIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_HadoopFormat/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_HadoopFormat) |
Expand All @@ -155,6 +155,7 @@ Beam Jenkins overview page: [link](https://ci-beam.apache.org/)
| beam_PerformanceTests_PubsubIOIT_Python_Streaming | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_PubsubIOIT_Python_Streaming/), [phrase](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_PubsubIOIT_Python_Streaming_PR/) | `Run PubsubIO Performance Test Python` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_PubsubIOIT_Python_Streaming/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_PubsubIOIT_Python_Streaming) |
| beam_PerformanceTests_SpannerIO_Read_2GB_Python | [cron](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_SpannerIO_Read_2GB_Python/), [phrase](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_SpannerIO_Read_2GB_Python_PR/) | `Run SpannerIO Read 2GB Performance Test Python Batch` | [![Build Status](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_SpannerIO_Read_2GB_Python/badge/icon)](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_SpannerIO_Read_2GB_Python/) |
| beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch/), [phrase](https://ci-beam.apache.org/view/PerformanceTests/job/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch_PR/) | `Run SpannerIO Write 2GB Performance Test Python Batch` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch) |
| beam_PerformanceTests_SparkReceiverIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_SparkReceiverIOIT/) | `Run Java SparkReceiverIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_SparkReceiverIO/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_SparkReceiverIO) |
| beam_PerformanceTests_TFRecordIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_TFRecordIOIT/) | `Run Java TFRecordIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_TFRecordIOIT/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_TFRecordIOIT) |
| beam_PerformanceTests_TextIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT/), [hdfs_cron](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT_HDFS/) | `Run Java TextIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT) [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT_HDFS/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT_HDFS) |
| beam_PerformanceTests_WordCountIT_Py37 | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/) | `Run Python37 WordCountIT Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py37) |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
initialParallelism,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.13_job_server:latest")

// Execute all scenarios connected with initial parallelism.
loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.PYTHON, initialScenarios, 'Combine', mode)
Expand Down
2 changes: 1 addition & 1 deletion .test-infra/jenkins/job_LoadTests_GBK_Flink_Python.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def loadTest = { scope, triggeringContext ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.13_job_server:latest")

def configurations = testScenarios.findAll { it.pipelineOptions?.parallelism?.value == numberOfWorkers }
loadTestsBuilder.loadTests(scope, sdk, configurations, "GBK", "batch")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.13_job_server:latest")

loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.PYTHON, testScenarios, 'ParDo', mode)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def loadTest = { scope, triggeringContext ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.13_job_server:latest")

loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.PYTHON, testScenarios, 'CoGBK', 'batch')
}
Expand Down
Loading

0 comments on commit a6381d6

Please sign in to comment.