diff --git a/.github/workflows/playground_examples_ci_reusable.yml b/.github/workflows/playground_examples_ci_reusable.yml index 1b1e4efd8384..7c5f039c3488 100644 --- a/.github/workflows/playground_examples_ci_reusable.yml +++ b/.github/workflows/playground_examples_ci_reusable.yml @@ -91,6 +91,7 @@ jobs: working-directory: playground/infrastructure env: BEAM_ROOT_DIR: "../.." + BEAM_EXAMPLE_CATEGORIES: "../categories.yaml" ci_cd: name: ${{ inputs.step }} ${{ inputs.sdk }} ${{ inputs.origin }} diff --git a/.gitignore b/.gitignore index 73c9e05b4eec..5c1068399458 100644 --- a/.gitignore +++ b/.gitignore @@ -127,6 +127,8 @@ website/www/yarn-error.log **/.packages **/generated_plugin_registrant.dart playground/frontend/playground_components/pubspec.lock +playground/frontend/playground_components/test/tools/extract_symbols_java/dependencies +playground/frontend/playground_components_dev/pubspec.lock # Ignore Beam Playground Terraform **/.terraform @@ -136,4 +138,4 @@ playground/frontend/playground_components/pubspec.lock **/*.tfvars # Ignore Katas auto-generated files -**/*-remote-info.yaml \ No newline at end of file +**/*-remote-info.yaml diff --git a/.test-infra/jenkins/job_CloudMLBenchmarkTests_Python.groovy b/.test-infra/jenkins/job_CloudMLBenchmarkTests_Python.groovy new file mode 100644 index 000000000000..770d8d7367d0 --- /dev/null +++ b/.test-infra/jenkins/job_CloudMLBenchmarkTests_Python.groovy @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CommonJobProperties as commonJobProperties +import PhraseTriggeringPostCommitBuilder +import CronJobBuilder + +def cloudMLJob = { scope -> + scope.description('Runs the TFT Criteo Examples on the Dataflow runner.') + + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(scope, 'master', 360) + + // Gradle goals for this job. + scope.steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + commonJobProperties.setGradleSwitches(delegate) + tasks(':sdks:python:test-suites:dataflow:tftTests') + } + } +} + +PhraseTriggeringPostCommitBuilder.postCommitJob( + 'beam_CloudML_Benchmarks_Dataflow', + 'Run TFT Criteo Benchmarks', + 'TFT Criteo benchmarks on Dataflow(\"Run TFT Criteo Benchmarks"\"")', + this + ) { + cloudMLJob(delegate) + } + +CronJobBuilder.cronJob( + 'beam_CloudML_Benchmarks_Dataflow', + 'H 14 * * *', + this + ) { + cloudMLJob(delegate) + } diff --git a/.test-infra/jenkins/job_LoadTests_Combine_Python.groovy b/.test-infra/jenkins/job_LoadTests_Combine_Python.groovy index 7c4f1ead0cf7..99fd10e3bdde 100644 --- a/.test-infra/jenkins/job_LoadTests_Combine_Python.groovy +++ b/.test-infra/jenkins/job_LoadTests_Combine_Python.groovy @@ -100,8 +100,7 @@ def loadTestConfigurations = { datasetName, mode -> def addStreamingOptions(test){ test.pipelineOptions << [streaming: null, - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - experiments: "use_runner_v2,shuffle_mode=appliance" + experiments: "use_runner_v2" ] } diff --git a/.test-infra/jenkins/job_LoadTests_GBK_Python.groovy b/.test-infra/jenkins/job_LoadTests_GBK_Python.groovy index 9a38af439bde..1a772704ed7b 100644 --- a/.test-infra/jenkins/job_LoadTests_GBK_Python.groovy +++ b/.test-infra/jenkins/job_LoadTests_GBK_Python.groovy @@ -156,8 +156,7 @@ def addStreamingOptions(test) { // Use the new Dataflow runner, which offers improved efficiency of Dataflow jobs. // See https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#dataflow-runner-v2 // for more details. - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - experiments: 'use_runner_v2,shuffle_mode=appliance', + experiments: 'use_runner_v2', ] } diff --git a/.test-infra/jenkins/job_LoadTests_GBK_Python_reiterate.groovy b/.test-infra/jenkins/job_LoadTests_GBK_Python_reiterate.groovy index 3fa262ab5912..d1960abce170 100644 --- a/.test-infra/jenkins/job_LoadTests_GBK_Python_reiterate.groovy +++ b/.test-infra/jenkins/job_LoadTests_GBK_Python_reiterate.groovy @@ -86,8 +86,7 @@ def addStreamingOptions(test) { // Use the new Dataflow runner, which offers improved efficiency of Dataflow jobs. // See https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#dataflow-runner-v2 // for more details. - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - experiments: 'use_runner_v2,shuffle_mode=appliance', + experiments: 'use_runner_v2', ] } diff --git a/.test-infra/jenkins/job_LoadTests_ParDo_Python.groovy b/.test-infra/jenkins/job_LoadTests_ParDo_Python.groovy index 44e9497dac91..090361a21a5e 100644 --- a/.test-infra/jenkins/job_LoadTests_ParDo_Python.groovy +++ b/.test-infra/jenkins/job_LoadTests_ParDo_Python.groovy @@ -131,8 +131,7 @@ def addStreamingOptions(test) { // Use the new Dataflow runner, which offers improved efficiency of Dataflow jobs. // See https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#dataflow-runner-v2 // for more details. - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - experiments: 'use_runner_v2,shuffle_mode=appliance', + experiments: 'use_runner_v2', ] } diff --git a/.test-infra/jenkins/job_LoadTests_SideInput_Python.groovy b/.test-infra/jenkins/job_LoadTests_SideInput_Python.groovy index 404d74c41ad7..5ed7cc6381df 100644 --- a/.test-infra/jenkins/job_LoadTests_SideInput_Python.groovy +++ b/.test-infra/jenkins/job_LoadTests_SideInput_Python.groovy @@ -39,8 +39,7 @@ def fromTemplate = { mode, name, id, datasetName, testSpecificOptions -> influx_measurement : "python_${mode}_sideinput_${id}", num_workers : 10, autoscaling_algorithm: 'NONE', - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - experiments : 'use_runner_v2,shuffle_mode=appliance', + experiments : 'use_runner_v2', ] << testSpecificOptions ] } diff --git a/.test-infra/jenkins/job_PerformanceTests_KafkaIO_IT.groovy b/.test-infra/jenkins/job_PerformanceTests_KafkaIO_IT.groovy index 4403e8525bf3..d513dd96a7e2 100644 --- a/.test-infra/jenkins/job_PerformanceTests_KafkaIO_IT.groovy +++ b/.test-infra/jenkins/job_PerformanceTests_KafkaIO_IT.groovy @@ -106,8 +106,7 @@ job(jobName) { readTimeout : '1500', bigQueryTable : 'kafkaioit_results_runner_v2', influxMeasurement : 'kafkaioit_results_runner_v2', - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - experiments : 'use_runner_v2,shuffle_mode=appliance,use_unified_worker', + experiments : 'use_runner_v2,use_unified_worker', ] steps { diff --git a/.test-infra/metrics/sync/github/sync_workflows.py b/.test-infra/metrics/sync/github/sync_workflows.py new file mode 100644 index 000000000000..646bd78cf61b --- /dev/null +++ b/.test-infra/metrics/sync/github/sync_workflows.py @@ -0,0 +1,187 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +''' +This module queries GitHub to collect Beam-related workflows metrics and put them in +PostgreSQL. +This Script is running every 3 hours in a cloud function in apache-beam-testing project. +This cloud function is triggered by a pubsub topic. +You can find the cloud function in the next link +https://console.cloud.google.com/functions/details/us-central1/github_actions_workflows_dashboard_sync?env=gen1&project=apache-beam-testing +Pub sub topic : https://console.cloud.google.com/cloudpubsub/topic/detail/github_actions_workflows_sync?project=apache-beam-testing +Cron Job : https://console.cloud.google.com/cloudscheduler/jobs/edit/us-central1/github_actions_workflows_dashboard_sync?project=apache-beam-testing +Writing the latest 10 runs of every postcommit workflow in master branch in a beammetrics database +''' + +import os +import sys +import time +import re +import requests +import psycopg2 + +from datetime import datetime +from github import GithubIntegration + +DB_HOST = os.environ['DB_HOST'] +DB_PORT = os.environ['DB_PORT'] +DB_NAME = os.environ['DB_NAME'] +DB_USER_NAME = os.environ['DB_USER'] +DB_PASSWORD = os.environ['DB_PASS'] +GH_WORKFLOWS_TABLE_NAME = "github_workflows" +# Number of workflows that fetch github API +GH_NUMBER_OF_WORKFLOWS = 100 +GH_WORKFLOWS_NUMBER_EXECUTIONS = 100 +WORKFLOWS_OBJECT_LIST = [] + + +class Workflow: + def __init__(self,id,name,filename): + self.id = id + self.name = name + self.filename = filename + self.listOfRuns = [] + self.runUrl = [] + +# The table will save the latest ten run of every workflow +GH_WORKFLOWS_CREATE_TABLE_QUERY = f""" +CREATE TABLE IF NOT EXISTS {GH_WORKFLOWS_TABLE_NAME} ( + job_name text PRIMARY KEY, + job_yml_filename text""" +for i in range(0,GH_WORKFLOWS_NUMBER_EXECUTIONS): + i = i + 1 + GH_WORKFLOWS_CREATE_TABLE_QUERY += """,\n run{} text, + run{}Id text""".format(str(i),str(i)) +GH_WORKFLOWS_CREATE_TABLE_QUERY += ")\n" + +def githubWorkflowsGrafanaSync(data,context): + print('Started') + print('Updating table with recent workflow runs') + databaseOperations(initDbConnection(),fetchWorkflowData()) + print('Done') + return "Completed" + +def initDbConnection(): + '''Init connection with the Database''' + connection = None + maxRetries = 3 + i = 0 + while connection == None and i < maxRetries: + try: + connection = psycopg2.connect( + f"dbname='{DB_NAME}' user='{DB_USER_NAME}' host='{DB_HOST}'" + f" port='{DB_PORT}' password='{DB_PASSWORD}'") + except Exception as e: + print('Failed to connect to DB; retrying in 1 minute') + print(e) + time.sleep(60) + i = i + 1 + if i >= maxRetries: + print("Number of retries exceded ") + sys.exit(1) + return connection + +def getToken(): + git_integration = GithubIntegration( + os.environ["GH_APP_ID"], + os.environ["GH_PEM_KEY"]) + token=git_integration.get_access_token( + os.environ["GH_APP_INSTALLATION_ID"] + ).token + return token + +def retriesRequest(request): + requestSucceeded = False + retryFactor = 1 + while not requestSucceeded: + retryTime = 60 * retryFactor + if request.status_code != 200: + print('Failed to get the request with code {}'.format(request.status_code)) + time.sleep(retryTime) + retryFactor = retryFactor + retryFactor + if retryFactor * 60 >= 3600: + print("Error: The request take more than an hour") + sys.exit(1) + else: + requestSucceeded = True +def fetchWorkflowData(): + '''Return a json with all the workflows and the latests + ten executions''' + completed = False + page = 1 + workflows = [] + try: + while not completed: + url = "https://api.github.com/repos/apache/beam/actions/workflows" + queryOptions = { 'branch' : 'master', 'page': page, 'per_page' : GH_NUMBER_OF_WORKFLOWS } + response = requests.get(url = url, params = queryOptions) + retriesRequest(response) + jsonResponse = response.json() + if jsonResponse['total_count'] >= GH_NUMBER_OF_WORKFLOWS: + page = page + 1 + workflowsPage = jsonResponse['workflows'] + workflows.append(workflowsPage) + else: + completed = True + workflowsPage = jsonResponse['workflows'] + workflows.append(workflowsPage) + for pageItem in workflows: + for item in pageItem: + path =item['path'] + isPostCommit = re.search('(.*)postcommit(.*)',path) + if isPostCommit: + result = re.search('/(.*).yml', path) + path =(result.group(1)) + ".yml" + workflowObject = Workflow(item['id'],item['name'],path) + WORKFLOWS_OBJECT_LIST.append(workflowObject) + url = "https://api.github.com/repos/apache/beam/actions/workflows/" + queryOptions = { 'branch' : 'master', 'per_page' : GH_WORKFLOWS_NUMBER_EXECUTIONS, + 'page' :'1', 'exclude_pull_request':True } + for workflow in WORKFLOWS_OBJECT_LIST: + response = requests.get(url = "{}{}/runs".format(url,workflow.id), + params=queryOptions) + retriesRequest(response) + responseJson = response.json() + workflowsRuns = responseJson['workflow_runs'] + for item in workflowsRuns: + if item['status'] == 'completed': + workflow.runUrl.append(item['html_url']) + workflow.listOfRuns.append(item['conclusion']) + else: + workflow.listOfRuns.append(item['status']) + workflow.runUrl.append(item['html_url']) + for i in range(0,GH_WORKFLOWS_NUMBER_EXECUTIONS): + if i >= len(workflow.listOfRuns): + workflow.listOfRuns.append('None') + workflow.runUrl.append('None') + except Exception as e: + print('Failed to get GHA workflows') + print(e) + +def databaseOperations(connection,fetchWorkflows): + '''Create the table if not exist and update the table with the latest runs + of the workflows ''' + queryInsert = "INSERT INTO {} VALUES ".format(GH_WORKFLOWS_TABLE_NAME) + cursor = connection.cursor() + cursor.execute(GH_WORKFLOWS_CREATE_TABLE_QUERY) + cursor.execute("DELETE FROM {};".format(GH_WORKFLOWS_TABLE_NAME)) + query = "" + for workflow in WORKFLOWS_OBJECT_LIST: + rowInsert = "(\'{}\',\'{}\'".format(workflow.name,workflow.filename) + for run, runUrl in zip(workflow.listOfRuns,workflow.runUrl): + rowInsert += ",\'{}\',\'{}\'".format(run,runUrl) + query = query + rowInsert + query += ")," + query = query[:-1] + ";" + query = queryInsert + query + cursor.execute(query) + cursor.close() + connection.commit() + connection.close() \ No newline at end of file diff --git a/CHANGES.md b/CHANGES.md index e87cd3baf213..18b7ed989fb6 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -65,6 +65,9 @@ * RunInference Wrapper with Sklearn Model Handler support added in Go SDK ([#24497](https://github.com/apache/beam/issues/23382)). * X feature added (Java/Python) ([#X](https://github.com/apache/beam/issues/X)). +* Adding override of allowed TLS algorithms (Java), now maintaining the disabled/legacy algorithms + present in 2.43.0 (up to 1.8.0_342, 11.0.16, 17.0.2 for respective Java versions). This is accompanied + by an explicit re-enabling of TLSv1 and TLSv1.1 for Java 8 and Java 11. ## Breaking Changes @@ -81,6 +84,7 @@ ## Bugfixes * Avoids Cassandra syntax error when user-defined query has no where clause in it (Java) ([#24829](https://github.com/apache/beam/issues/24829)). +* Fixed JDBC connection failures (Java) during handshake due to deprecated TLSv1(.1) protocol for the JDK. ([#24623](https://github.com/apache/beam/issues/24623)) ## Known Issues @@ -147,7 +151,7 @@ * Decreased TextSource CPU utilization by 2.3x (Java) ([#23193](https://github.com/apache/beam/issues/23193)). * Fixed bug when using SpannerIO with RuntimeValueProvider options (Java) ([#22146](https://github.com/apache/beam/issues/22146)). -* Fixed issue for unicode rendering on WriteToBigQuery ([#10785](https://github.com/apache/beam/issues/10785)) +* Fixed issue for unicode rendering on WriteToBigQuery ([#22312](https://github.com/apache/beam/issues/22312)) * Remove obsolete variants of BigQuery Read and Write, always using Beam-native variant ([#23564](https://github.com/apache/beam/issues/23564) and [#23559](https://github.com/apache/beam/issues/23559)). * Bumped google-cloud-spanner dependency version to 3.x for Python SDK ([#21198](https://github.com/apache/beam/issues/21198)). diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index cdf096c1f092..2274906deacd 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -486,6 +486,7 @@ class BeamModulePlugin implements Plugin { def jsr305_version = "3.0.2" def everit_json_version = "1.14.1" def kafka_version = "2.4.1" + def log4j2_version = "2.17.2" def nemo_version = "0.1" def netty_version = "4.1.77.Final" def postgres_version = "42.2.16" @@ -573,6 +574,7 @@ class BeamModulePlugin implements Plugin { commons_csv : "org.apache.commons:commons-csv:1.8", commons_io : "commons-io:commons-io:2.7", commons_lang3 : "org.apache.commons:commons-lang3:3.9", + commons_logging : "commons-logging:commons-logging:1.2", commons_math3 : "org.apache.commons:commons-math3:3.6.1", dbcp2 : "org.apache.commons:commons-dbcp2:$dbcp2_version", error_prone_annotations : "com.google.errorprone:error_prone_annotations:$errorprone_version", @@ -675,6 +677,7 @@ class BeamModulePlugin implements Plugin { jamm : 'io.github.stephankoelle:jamm:0.4.1', jaxb_api : "jakarta.xml.bind:jakarta.xml.bind-api:$jaxb_api_version", jaxb_impl : "com.sun.xml.bind:jaxb-impl:$jaxb_api_version", + jcl_over_slf4j : "org.slf4j:jcl-over-slf4j:$slf4j_version", jmh_core : "org.openjdk.jmh:jmh-core:$jmh_version", joda_time : "joda-time:joda-time:2.10.10", jsonassert : "org.skyscreamer:jsonassert:1.5.0", @@ -684,6 +687,12 @@ class BeamModulePlugin implements Plugin { junit : "junit:junit:4.13.1", kafka : "org.apache.kafka:kafka_2.11:$kafka_version", kafka_clients : "org.apache.kafka:kafka-clients:$kafka_version", + log4j : "log4j:log4j:1.2.17", + log4j_over_slf4j : "org.slf4j:log4j-over-slf4j:$slf4j_version", + log4j2_api : "org.apache.logging.log4j:log4j-api:$log4j2_version", + log4j2_core : "org.apache.logging.log4j:log4j-core:$log4j2_version", + log4j2_to_slf4j : "org.apache.logging.log4j:log4j-to-slf4j:$log4j2_version", + log4j2_slf4j_impl : "org.apache.logging.log4j:log4j-slf4j-impl:$log4j2_version", mockito_core : "org.mockito:mockito-core:3.7.7", mockito_inline : "org.mockito:mockito-inline:4.5.1", mongo_java_driver : "org.mongodb:mongo-java-driver:3.12.11", @@ -711,9 +720,14 @@ class BeamModulePlugin implements Plugin { sbe_tool : "uk.co.real-logic:sbe-tool:$sbe_tool_version", singlestore_jdbc : "com.singlestore:singlestore-jdbc-client:$singlestore_jdbc_version", slf4j_api : "org.slf4j:slf4j-api:$slf4j_version", - slf4j_simple : "org.slf4j:slf4j-simple:$slf4j_version", + slf4j_android : "org.slf4j:slf4j-android:$slf4j_version", + slf4j_ext : "org.slf4j:slf4j-ext:$slf4j_version", slf4j_jdk14 : "org.slf4j:slf4j-jdk14:$slf4j_version", + slf4j_nop : "org.slf4j:slf4j-nop:$slf4j_version", + slf4j_simple : "org.slf4j:slf4j-simple:$slf4j_version", + slf4j_jul_to_slf4j : "org.slf4j:jul-to-slf4j:$slf4j_version", slf4j_log4j12 : "org.slf4j:slf4j-log4j12:$slf4j_version", + slf4j_jcl : "org.slf4j:slf4j-jcl:$slf4j_version", snappy_java : "org.xerial.snappy:snappy-java:1.1.8.4", spark_core : "org.apache.spark:spark-core_2.11:$spark2_version", spark_network_common : "org.apache.spark:spark-network-common_2.11:$spark2_version", diff --git a/examples/notebooks/beam-ml/run_inference_tensorflow.ipynb b/examples/notebooks/beam-ml/run_inference_tensorflow.ipynb index 3f7c7a9aa5ec..8a00ce47b74d 100644 --- a/examples/notebooks/beam-ml/run_inference_tensorflow.ipynb +++ b/examples/notebooks/beam-ml/run_inference_tensorflow.ipynb @@ -47,6 +47,9 @@ }, { "cell_type": "markdown", + "metadata": { + "id": "A8xNRyZMW1yK" + }, "source": [ "# Apache Beam RunInference with TensorFlow\n", "\n", @@ -62,9 +65,6 @@ }, { "cell_type": "markdown", - "metadata": { - "id": "A8xNRyZMW1yK" - }, "source": [ "This notebook demonstrates the use of the RunInference transform for [TensorFlow](https://www.tensorflow.org/).\n", "Beam [RunInference](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference) accepts a ModelHandler generated from [`tfx-bsl`](https://github.com/tensorflow/tfx-bsl) using `CreateModelHandler`.\n", @@ -612,4 +612,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/learning/tour-of-beam/frontend/pubspec.yaml b/learning/tour-of-beam/frontend/pubspec.yaml index 22fa61ebbacf..fa059b5153b8 100644 --- a/learning/tour-of-beam/frontend/pubspec.yaml +++ b/learning/tour-of-beam/frontend/pubspec.yaml @@ -27,7 +27,7 @@ environment: flutter: '>=3.3.2' dependencies: - app_state: ^0.8.1 + app_state: ^0.8.4 collection: ^1.16.0 easy_localization: ^3.0.1 easy_localization_ext: ^0.1.0 diff --git a/playground/frontend/README.md b/playground/frontend/README.md index 862fcdacc050..120d9da7e2ba 100644 --- a/playground/frontend/README.md +++ b/playground/frontend/README.md @@ -141,6 +141,37 @@ Code can be automatically reformatted using: flutter format ./lib ``` +### Unit Tests + +To delete all generated files and re-generate them again and then run tests: + +```bash +./gradlew :playground:frontend:playground_components_test +./gradlew :playground:frontend:test +``` + +To run tests without re-generating files: + +```bash +cd playground/frontend/playground_components +flutter test +cd .. +flutter test +``` + +### Integration Tests + +Integration tests currently can be run only on a local development machine. +Server testing has not been verified yet. + +1. Install and run Chrome Driver: https://chromedriver.chromium.org/downloads +2. Run it on port 4444: `chromedriver --port=4444` +3. Run: + +```bash +./gradlew :playground:frontend:integrationTest +``` + ## Localization The project is in the process of migrating from diff --git a/playground/frontend/assets/drag_horizontal.svg b/playground/frontend/assets/drag_horizontal.svg deleted file mode 100644 index f5e8dcda558a..000000000000 --- a/playground/frontend/assets/drag_horizontal.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - diff --git a/playground/frontend/assets/drag_vertical.svg b/playground/frontend/assets/drag_vertical.svg deleted file mode 100644 index fea5377776ef..000000000000 --- a/playground/frontend/assets/drag_vertical.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - diff --git a/playground/frontend/assets/streaming.svg b/playground/frontend/assets/streaming.svg new file mode 100644 index 000000000000..d4e4f94cc734 --- /dev/null +++ b/playground/frontend/assets/streaming.svg @@ -0,0 +1,20 @@ + + + diff --git a/playground/frontend/assets/translations/en.yaml b/playground/frontend/assets/translations/en.yaml index c7d74f96d44f..023e0324c8e7 100644 --- a/playground/frontend/assets/translations/en.yaml +++ b/playground/frontend/assets/translations/en.yaml @@ -19,3 +19,4 @@ intents: playground: clearOutput: 'Clear Output' newExample: 'New Example' + usesEmulatedData: 'This examples uses emulated data' diff --git a/playground/frontend/build.gradle b/playground/frontend/build.gradle index a70799df4774..0ca56258ecb2 100644 --- a/playground/frontend/build.gradle +++ b/playground/frontend/build.gradle @@ -17,18 +17,11 @@ */ -apply plugin: 'org.apache.beam.module' -apply plugin: 'base' +apply(plugin: "org.apache.beam.module") +apply(plugin: "base") applyDockerNature() -def playgroundBackendUrl = project.playgroundBackendUrl -def analyticsUA = project.analyticsUA -def playgroundBackendJavaRouteUrl = project.playgroundBackendJavaRouteUrl -def playgroundBackendGoRouteUrl = project.playgroundBackendGoRouteUrl -def playgroundBackendPythonRouteUrl = project.playgroundBackendPythonRouteUrl -def playgroundBackendScioRouteUrl = project.playgroundBackendScioRouteUrl - -def playgroundJobServerProject = "${project.path.replace('-container', '')}" +def playgroundJobServerProject = "${project.path.replace("-container", "")}" description = project(playgroundJobServerProject).description + " :: Container" @@ -37,10 +30,10 @@ configurations { } dependencies { - dockerDependency project(path: playgroundJobServerProject, configuration: "shadow") + dockerDependency(project(path: playgroundJobServerProject, configuration: "shadow")) } -task generate { +tasks.register("generate") { dependsOn("playground_components:generate") dependsOn("generateCode") @@ -49,7 +42,7 @@ task generate { description = "Generates all generated files." } -task printPath { +tasks.register("printPath") { doLast { exec { executable("printenv") @@ -58,7 +51,7 @@ task printPath { } } -task analyze { +tasks.register("analyze") { dependsOn("playground_components:generateCode") dependsOn("generateCode") @@ -73,7 +66,7 @@ task analyze { } } -task pubGet { +tasks.register("pubGet") { group = "build" description = "Get packages for the frontend project" doLast { @@ -84,7 +77,7 @@ task pubGet { } } -task format { +tasks.register("format") { group = "build" description = "Idiomatically format Dart source code" doLast { @@ -95,9 +88,10 @@ task format { } } -task run { +tasks.register("run") { group = "application" description = "Run application on Google Chrome" + doLast { exec { executable("flutter") @@ -106,7 +100,7 @@ task run { } } -task test { +tasks.register("test") { dependsOn("playground_components:generateCode") dependsOn("generateCode") @@ -121,14 +115,14 @@ task test { } } -task precommit { +tasks.register("precommit") { dependsOn("playground_components:precommit") dependsOn("analyze") dependsOn("test") } -task generateCode { +tasks.register("generateCode") { dependsOn("playground_components:generateCode") dependsOn("cleanFlutter") @@ -145,7 +139,7 @@ task generateCode { } } -task cleanFlutter { +tasks.register("cleanFlutter") { group = "build" description = "Remove build artifacts" @@ -157,7 +151,7 @@ task cleanFlutter { } } -task cleanGenerated { +tasks.register("cleanGenerated") { dependsOn("playground_components:cleanGenerated") group = "build" @@ -186,75 +180,59 @@ ext.deleteFilesByRegExp = { re -> } } +tasks.register("integrationTest") { + dependsOn("integrationTest_standalone_change_example_sdk_run") + dependsOn("integrationTest_standalone_miscellaneous_ui") +} + +tasks.register("integrationTest_standalone_change_example_sdk_run") { + runIntegrationTest("standalone_change_example_sdk_run", "/") +} + +tasks.register("integrationTest_standalone_miscellaneous_ui") { + runIntegrationTest("standalone_miscellaneous_ui", "/") +} + +void runIntegrationTest(String path, String url) { + exec { + executable("flutter") + args( + "drive", + "--driver=test_driver/integration_test.dart", + "--target=integration_test/${path}_test.dart", + "--web-launch-url='$url'", + "--device-id=chrome", + ) + } +} + task copyDockerfileDependencies(type: Copy) { group = "build" description = "Copy files that required to build docker container" copy { - from '.' - into 'build/' - exclude 'build' - exclude 'Dockerfile' + from(".") + into("build/") + exclude("build") + exclude("Dockerfile") } copy { - from '../playground' - into 'build/playground' + from("../playground") + into("build/playground") } } docker { group = "build" - description = "Build container for the frontend project" - name containerImageName( - name: project.docker_image_default_repo_prefix + "playground-frontend", - root: project.rootProject.hasProperty(["docker-repository-root"]) ? - project.rootProject["docker-repository-root"] : - project.docker_image_default_repo_root) - files "./build/" - tags containerImageTags() - buildArgs(['FLUTTER_VERSION': project.rootProject.hasProperty(["flutter-version"]) ? - project.rootProject["flutter-version"] : - "3.3.2" ]) + description = "Build container for frontend application" + name = containerImageName( + name: project.docker_image_default_repo_prefix + "playground-frontend", + root: project.rootProject.hasProperty(["docker-repository-root"]) + ? project.rootProject["docker-repository-root"] + : project.docker_image_default_repo_root + ) + files("./build/") + tags(containerImageTags()) } // Ensure that we build the required resources and copy and file dependencies from related projects -dockerPrepare.dependsOn copyDockerfileDependencies - -task("createConfig") { - group = "build" - description = "Generate config for the frontend project" - doLast { - def configFileName = "config.g.dart" - def modulePath = project(":playground:frontend").projectDir.absolutePath - def file = new File(modulePath + "/lib", configFileName) - file.write("""/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -const String kApiClientURL = - '${playgroundBackendUrl}'; -const String kAnalyticsUA = '${analyticsUA}'; -const String kApiJavaClientURL = - '${playgroundBackendJavaRouteUrl}'; -const String kApiGoClientURL = - '${playgroundBackendGoRouteUrl}'; -const String kApiPythonClientURL = - '${playgroundBackendPythonRouteUrl}'; -const String kApiScioClientURL = - '${playgroundBackendScioRouteUrl}'; -""") - } -} +dockerPrepare.dependsOn(copyDockerfileDependencies) diff --git a/playground/frontend/integration_test/common/common.dart b/playground/frontend/integration_test/common/common.dart new file mode 100644 index 000000000000..83575a99cb6c --- /dev/null +++ b/playground/frontend/integration_test/common/common.dart @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground/main.dart' as app; + +Future init(WidgetTester wt) async { + app.main(); + await wt.pumpAndSettle(); +} + +void expectHasDescendant(Finder ancestor, Finder descendant) { + expect( + find.descendant(of: ancestor, matching: descendant), + findsOneWidget, + ); +} + +void expectSimilar(double a, double b) { + Matcher closeToFraction(num value, double fraction) => + closeTo(value, value * fraction); + Matcher onePerCentTolerance(num value) => closeToFraction(value, 0.01); + expect(a, onePerCentTolerance(b)); +} diff --git a/playground/frontend/integration_test/common/common_finders.dart b/playground/frontend/integration_test/common/common_finders.dart new file mode 100644 index 000000000000..7d906ee055ad --- /dev/null +++ b/playground/frontend/integration_test/common/common_finders.dart @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/widgets.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground/modules/examples/components/description_popover/description_popover.dart'; +import 'package:playground/modules/examples/components/description_popover/description_popover_button.dart'; +import 'package:playground/modules/examples/example_selector.dart'; +import 'package:playground/modules/sdk/components/sdk_selector.dart'; +import 'package:playground/modules/sdk/components/sdk_selector_row.dart'; +import 'package:playground/modules/shortcuts/components/shortcuts_modal.dart'; +import 'package:playground/pages/standalone_playground/widgets/editor_textarea_wrapper.dart'; +import 'package:playground/pages/standalone_playground/widgets/feedback/feedback_dropdown_content.dart'; +import 'package:playground/pages/standalone_playground/widgets/feedback/playground_feedback.dart'; +import 'package:playground/pages/standalone_playground/widgets/more_actions.dart'; +import 'package:playground_components/playground_components.dart'; +import 'package:playground_components/src/widgets/drag_handle.dart'; +import 'package:playground_components_dev/playground_components_dev.dart'; + +extension CommonFindersExtension on CommonFinders { + Finder codeTextAreaWrapper() { + return byType(CodeTextAreaWrapper); + } + + Finder descriptionPopoverButton() { + return byType(DescriptionPopoverButton); + } + + Finder descriptionPopover() { + return byType(DescriptionPopover); + } + + Finder dragHandle() { + return byType(DragHandle); + } + + Finder exampleItemInDropdown(String name) { + return widgetWithText(GestureDetector, name); + } + + Finder exampleSelector() { + return byType(ExampleSelector); + } + + Finder feedbackDropdownCancelButton() { + return find.byKey(FeedbackDropdownContent.cancelButtonKey); + } + + Finder feedbackDropdownContent() { + return byType(FeedbackDropdownContent); + } + + Finder feedbackDropdownSendButton() { + return find.byKey(FeedbackDropdownContent.sendButtonKey); + } + + Finder feedbackDropdownTextField() { + return find.byKey(FeedbackDropdownContent.textFieldKey); + } + + Finder feedbackThumbDown() { + return find.byKey(PlaygroundFeedback.thumbDownKey); + } + + Finder feedbackThumbUp() { + return find.byKey(PlaygroundFeedback.thumbUpKey); + } + + Finder moreActions() { + return byType(MoreActions); + } + + Finder sdkItemInDropdown(Sdk sdk) { + return find.byType(SdkSelectorRow).and(find.byKey(ValueKey(sdk))); + } + + Finder sdkSelector() { + return byType(SDKSelector); + } + + Finder shortcutsModal() { + return byType(ShortcutsModal); + } +} diff --git a/playground/frontend/integration_test/miscellaneous_ui/description_test.dart b/playground/frontend/integration_test/miscellaneous_ui/description_test.dart new file mode 100644 index 000000000000..17c9e29df337 --- /dev/null +++ b/playground/frontend/integration_test/miscellaneous_ui/description_test.dart @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/services.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground_components_dev/playground_components_dev.dart'; + +import '../common/common.dart'; +import '../common/common_finders.dart'; + +Future checkDescription(WidgetTester wt) async { + await wt.tap(find.descriptionPopoverButton()); + await wt.pumpAndSettle(); + + expect(find.descriptionPopover(), findsOneWidget); + + final example = wt.findPlaygroundController().selectedExample!; + + expectHasDescendant(find.descriptionPopover(), find.text(example.name)); + expectHasDescendant( + find.descriptionPopover(), + find.text(example.description), + ); + + // //TODO Check contains github and colab links, + // //when https://github.com/apache/beam/pull/24820 will be merged + + await wt.sendKeyEvent(LogicalKeyboardKey.escape); + await wt.pumpAndSettle(); + + expect(find.descriptionPopover(), findsNothing); +} diff --git a/playground/frontend/integration_test/miscellaneous_ui/enjoy_playground_test.dart b/playground/frontend/integration_test/miscellaneous_ui/enjoy_playground_test.dart new file mode 100644 index 000000000000..a69d9eac115f --- /dev/null +++ b/playground/frontend/integration_test/miscellaneous_ui/enjoy_playground_test.dart @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground/modules/analytics/analytics_event.dart'; +import 'package:playground/modules/analytics/analytics_events.dart'; +import 'package:playground/modules/analytics/analytics_service.dart'; + +import '../common/common_finders.dart'; + +Future checkEnjoyPlayground(WidgetTester wt) async { + await _checkEnjoyingAndSendFeedback(wt); + await _checkNotEnjoyingAndSendFeedback(wt); + await _checkNotEnjoyingAndClose(wt); +} + +Future _checkNotEnjoyingAndClose(WidgetTester wt) async { + await wt.tap(find.feedbackThumbDown()); + await wt.pumpAndSettle(); + + expect(find.feedbackDropdownContent(), findsOneWidget); + + await wt.tap(find.feedbackDropdownCancelButton()); + await wt.pumpAndSettle(); + + expect(find.feedbackDropdownContent(), findsNothing); +} + +Future _checkEnjoyingAndSendFeedback(WidgetTester wt) async { + expect(find.feedbackDropdownContent(), findsNothing); + + await wt.tap(find.feedbackThumbUp()); + await wt.pumpAndSettle(); + + expect(find.feedbackDropdownContent(), findsOneWidget); + + const text = 'This is enjoying text'; + await wt.enterText(find.feedbackDropdownTextField(), text); + await wt.pumpAndSettle(); + + expect(find.text(text), findsOneWidget); + + await wt.tap(find.feedbackDropdownSendButton()); + await wt.pumpAndSettle(); + + final context = wt.element(find.feedbackThumbUp()); + final lastSentEvent = AnalyticsService.get(context).lastSentEvent; + expect( + lastSentEvent, + AnalyticsEvent( + category: kFeedbackCategory, + action: kClickSendFeedbackEvent, + label: text, + ), + ); + + expect(find.feedbackDropdownContent(), findsNothing); +} + +Future _checkNotEnjoyingAndSendFeedback(WidgetTester wt) async { + await wt.tap(find.feedbackThumbDown()); + await wt.pumpAndSettle(); + + expect(find.feedbackDropdownContent(), findsOneWidget); + + const text = 'This is not enjoying text'; + await wt.enterText(find.feedbackDropdownTextField(), text); + await wt.pumpAndSettle(); + + expect(find.text(text), findsOneWidget); + + await wt.tap(find.feedbackDropdownSendButton()); + await wt.pumpAndSettle(); + + final context = wt.element(find.feedbackThumbDown()); + final lastSentEvent = AnalyticsService.get(context).lastSentEvent; + expect( + lastSentEvent, + AnalyticsEvent( + category: kFeedbackCategory, + action: kClickSendFeedbackEvent, + label: text, + ), + ); + + expect(find.feedbackDropdownContent(), findsNothing); +} diff --git a/playground/frontend/integration_test/miscellaneous_ui/output_placement_test.dart b/playground/frontend/integration_test/miscellaneous_ui/output_placement_test.dart new file mode 100644 index 000000000000..d0c639d65be5 --- /dev/null +++ b/playground/frontend/integration_test/miscellaneous_ui/output_placement_test.dart @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/widgets.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground/modules/output/models/output_placement.dart'; +import 'package:playground_components_dev/playground_components_dev.dart'; + +import '../common/common.dart'; +import '../common/common_finders.dart'; + +Future checkOutputPlacement(WidgetTester wt) async { + Offset getCodeAreaCenter() => wt.getCenter(find.codeTextAreaWrapper()); + Offset getOutputCenter() => wt.getCenter(find.outputWidget()); + + await wt.tap(find.byKey(const ValueKey(OutputPlacement.left))); + await wt.pumpAndSettle(); + expect(getCodeAreaCenter().dx > getOutputCenter().dx, true); + expectSimilar(getCodeAreaCenter().dy, getOutputCenter().dy); + + await wt.tap(find.byKey(const ValueKey(OutputPlacement.right))); + await wt.pumpAndSettle(); + expect(getCodeAreaCenter().dx < getOutputCenter().dx, true); + expectSimilar(getCodeAreaCenter().dy, getOutputCenter().dy); + + await wt.tap(find.byKey(const ValueKey(OutputPlacement.bottom))); + await wt.pumpAndSettle(); + expect(getCodeAreaCenter().dy < getOutputCenter().dy, true); + expectSimilar(getCodeAreaCenter().dx, getOutputCenter().dx); +} diff --git a/playground/frontend/integration_test/miscellaneous_ui/resize_output_test.dart b/playground/frontend/integration_test/miscellaneous_ui/resize_output_test.dart new file mode 100644 index 000000000000..ae2daaa54224 --- /dev/null +++ b/playground/frontend/integration_test/miscellaneous_ui/resize_output_test.dart @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/widgets.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground/modules/output/models/output_placement.dart'; +import 'package:playground_components/playground_components.dart'; +import 'package:playground_components_dev/playground_components_dev.dart'; + +import '../common/common.dart'; +import '../common/common_finders.dart'; + +Future checkResizeOutput(WidgetTester wt) async { + final dragHandleStartPosition = wt.getCenter(find.dragHandle()); + Future resetSplitViewRatio() async { + final currentPosition = wt.getCenter(find.dragHandle()); + final offset = dragHandleStartPosition - currentPosition; + await wt.drag(find.dragHandle(), offset); + await wt.pumpAndSettle(); + } + + await _checkDragVertically(wt); + await resetSplitViewRatio(); + + await _checkExcessivelyDragVertically(wt); + await resetSplitViewRatio(); + + await wt.tap(find.byKey(const ValueKey(OutputPlacement.left))); + await wt.pumpAndSettle(); + + await _checkDragHorizontally(wt); + await resetSplitViewRatio(); + + await _checkExcessivelyDragHorizontally(wt); + await resetSplitViewRatio(); +} + +Future _checkDragVertically(WidgetTester wt) async { + final height = wt.getSize(find.splitView()).height; + var dragHandlePosition = wt.getCenter(find.dragHandle()); + + await wt.drag(find.dragHandle(), Offset(0, height * 0.1)); + await wt.pumpAndSettle(); + + var newPosition = wt.getCenter(find.dragHandle()); + expectSimilar(newPosition.dy, dragHandlePosition.dy + height * 0.1); +} + +Future _checkExcessivelyDragVertically(WidgetTester wt) async { + final height = wt.getSize(find.splitView()).height; + final dragHandlePosition = wt.getCenter(find.dragHandle()); + + await wt.drag(find.dragHandle(), Offset(0, height * 0.9)); + await wt.pumpAndSettle(); + + final newPosition = wt.getCenter(find.dragHandle()); + final maxDy = height * (maxRatio - defaultRatio); + expectSimilar( + newPosition.dy, + dragHandlePosition.dy + maxDy, + ); +} + +Future _checkDragHorizontally(WidgetTester wt) async { + final width = wt.getSize(find.splitView()).width; + final dragHandlePosition = wt.getCenter(find.dragHandle()); + + await wt.drag(find.dragHandle(), Offset(width * 0.1, 0)); + await wt.pumpAndSettle(); + + final newPosition = wt.getCenter(find.dragHandle()); + expectSimilar(newPosition.dx, dragHandlePosition.dx + width * 0.1); +} + +Future _checkExcessivelyDragHorizontally(WidgetTester wt) async { + final width = wt.getSize(find.splitView()).width; + final dragHandlePosition = wt.getCenter(find.dragHandle()); + + await wt.drag(find.dragHandle(), Offset(width * 0.9, 0)); + await wt.pumpAndSettle(); + + final newPosition = wt.getCenter(find.dragHandle()); + final maxDx = width * (maxRatio - defaultRatio); + expectSimilar( + newPosition.dx, + dragHandlePosition.dx + maxDx, + ); +} diff --git a/playground/frontend/integration_test/miscellaneous_ui/shortcuts_modal_test.dart b/playground/frontend/integration_test/miscellaneous_ui/shortcuts_modal_test.dart new file mode 100644 index 000000000000..e12752abef3c --- /dev/null +++ b/playground/frontend/integration_test/miscellaneous_ui/shortcuts_modal_test.dart @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/services.dart'; +import 'package:flutter_gen/gen_l10n/app_localizations.dart'; +import 'package:flutter_test/flutter_test.dart'; + +import '../common/common_finders.dart'; + +Future checkShortcutsModal(WidgetTester wt) async { + expect(find.shortcutsModal(), findsNothing); + + AppLocalizations appLocale = + AppLocalizations.of(wt.element(find.moreActions()))!; + + await wt.tap(find.moreActions()); + await wt.pumpAndSettle(); + + expect(find.text(appLocale.shortcuts), findsOneWidget); + + await wt.tap(find.text(appLocale.shortcuts)); + await wt.pumpAndSettle(); + + expect(find.shortcutsModal(), findsOneWidget); + + await wt.tap(find.text(appLocale.close)); + await wt.pumpAndSettle(); + + expect(find.shortcutsModal(), findsNothing); + + await wt.sendKeyEvent(LogicalKeyboardKey.escape); + await wt.pumpAndSettle(); +} diff --git a/playground/frontend/integration_test/miscellaneous_ui/toggle_brightness_mode_test.dart b/playground/frontend/integration_test/miscellaneous_ui/toggle_brightness_mode_test.dart new file mode 100644 index 000000000000..16bb9a3cc33d --- /dev/null +++ b/playground/frontend/integration_test/miscellaneous_ui/toggle_brightness_mode_test.dart @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/material.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground_components_dev/playground_components_dev.dart'; + +Future checkToggleBrightnessMode(WidgetTester wt) async { + Brightness getBrightness() { + return Theme.of(wt.element(find.toggleThemeButton())).brightness; + } + + Future toggleTheme() async { + await wt.tap(find.toggleThemeButton()); + await wt.pumpAndSettle(); + } + + final startBrightness = getBrightness(); + final invertedBrightness = + startBrightness == Brightness.light ? Brightness.dark : Brightness.light; + + await toggleTheme(); + expect(getBrightness(), invertedBrightness); + await toggleTheme(); + expect(getBrightness(), startBrightness); +} diff --git a/playground/frontend/integration_test/standalone_change_example_sdk_run_test.dart b/playground/frontend/integration_test/standalone_change_example_sdk_run_test.dart new file mode 100644 index 000000000000..f7b601c877ea --- /dev/null +++ b/playground/frontend/integration_test/standalone_change_example_sdk_run_test.dart @@ -0,0 +1,174 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_test/flutter_test.dart'; +import 'package:highlight/languages/java.dart'; +import 'package:highlight/languages/python.dart'; +import 'package:integration_test/integration_test.dart'; +import 'package:playground_components/playground_components.dart'; +import 'package:playground_components_dev/playground_components_dev.dart'; + +import 'common/common.dart'; +import 'common/common_finders.dart'; + +const _outputPrefix = 'The processing has started\n'; + +void main() { + IntegrationTestWidgetsFlutterBinding.ensureInitialized(); + + /// Runs and expects that the execution is as fast as it should be for cache. + Future runExpectCached(WidgetTester wt) async { + final dateTimeStart = DateTime.now(); + + await wt.tap(find.runOrCancelButton()); + await wt.pumpAndSettle(); + + expect( + DateTime.now().difference(dateTimeStart), + lessThan(const Duration(milliseconds: 2000)), + ); + } + + Future expectJavaMinimalWordCount(WidgetTester wt) async { + expect( + wt.findOneCodeController().lastTextSpan!.toPlainText().isAsIfCutFrom( + await Examples.getVisibleTextByPath( + ExamplePaths.javaMinimalWordCount, + java, + ), + ), + true, + ); + + expect(find.graphTab(), findsOneWidget); + expect(find.resultTab(), findsOneWidget); + expect(wt.findOutputTabController().index, 0); + } + + Future changeToJavaAggregationMax(WidgetTester wt) async { + await wt.tap(find.exampleSelector()); + await wt.pumpAndSettle(); + + await wt.tap(find.exampleItemInDropdown(ExampleNames.aggregationMax)); + await wt.pumpAndSettle(); + + expect( + wt.findOneCodeController().lastTextSpan!.toPlainText().isAsIfCutFrom( + await Examples.getVisibleTextByPath( + ExamplePaths.javaAggregationMax, + java, + ), + ), + true, + ); + } + + Future runExpectJavaAggregationMax(WidgetTester wt) async { + await runExpectCached(wt); + expectOutputEndsWith(ExampleOutputs.javaAggregationMaxTail, wt); + } + + Future runCustomJava(WidgetTester wt) async { + const text = 'OK'; + const code = ''' +public class MyClass { + public static void main(String[] args) { + System.out.print("$text"); + } +} +'''; + + await wt.enterText(find.codeField(), code); + await wt.pumpAndSettle(); + + await wt.tap(find.runOrCancelButton()); + await wt.pumpAndSettle(); + + expectOutput('$_outputPrefix$text', wt); + } + + Future switchToPython(WidgetTester wt) async { + await wt.tap(find.sdkSelector()); + await wt.pumpAndSettle(); + + await wt.tap(find.sdkItemInDropdown(Sdk.python)); + await wt.pumpAndSettle(); + + expect( + wt.findOneCodeController().lastTextSpan!.toPlainText().isAsIfCutFrom( + await Examples.getVisibleTextByPath( + ExamplePaths.pythonMinimalWordCountWithMetrics, + python, + ), + ), + true, + ); + } + + Future changeToPythonAggregationMean(WidgetTester wt) async { + await wt.tap(find.exampleSelector()); + await wt.pumpAndSettle(); + + await wt.tap(find.exampleItemInDropdown(ExampleNames.aggregationMean)); + await wt.pumpAndSettle(); + + // Cannot test this because the DB examples differ from GitHub now. + // TODO(alexeyinkin): Uncomment when DB is up-to-date. + // expect( + // wt.findOneCodeController().lastTextSpan!.toPlainText().isAsIfCutFrom( + // await Examples.getVisibleTextByPath( + // ExamplePaths.pythonAggregationMean, + // python, + // ), + // ), + // true, + // ); + } + + Future runExpectPythonAggregationMean(WidgetTester wt) async { + await runExpectCached(wt); + expectOutputContains(ExampleOutputs.pythonAggregationMeanContains, wt); + } + + Future runCustomPython(WidgetTester wt) async { + const text = 'OK'; + const code = 'print("$text", end="")'; + + await wt.enterText(find.codeField(), code); + await wt.pumpAndSettle(); + + await wt.tap(find.runOrCancelButton()); + await wt.pumpAndSettle(); + + expectOutput('$_outputPrefix$text', wt); + } + + testWidgets('Change example, change SDK, run', (WidgetTester wt) async { + await init(wt); + + await expectJavaMinimalWordCount(wt); + await changeToJavaAggregationMax(wt); + await runExpectJavaAggregationMax(wt); + await runCustomJava(wt); + + await switchToPython(wt); + await changeToPythonAggregationMean(wt); + await runExpectPythonAggregationMean(wt); + await runCustomPython(wt); + }); +} diff --git a/playground/frontend/integration_test/standalone_miscellaneous_ui_test.dart b/playground/frontend/integration_test/standalone_miscellaneous_ui_test.dart new file mode 100644 index 000000000000..82bfd7175df6 --- /dev/null +++ b/playground/frontend/integration_test/standalone_miscellaneous_ui_test.dart @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_test/flutter_test.dart'; +import 'package:integration_test/integration_test.dart'; + +import 'common/common.dart'; +import 'miscellaneous_ui/description_test.dart'; +import 'miscellaneous_ui/enjoy_playground_test.dart'; +import 'miscellaneous_ui/output_placement_test.dart'; +import 'miscellaneous_ui/resize_output_test.dart'; +import 'miscellaneous_ui/shortcuts_modal_test.dart'; +import 'miscellaneous_ui/toggle_brightness_mode_test.dart'; + +void main() { + IntegrationTestWidgetsFlutterBinding.ensureInitialized(); + testWidgets( + 'Check UI, not connected with running examples', + (WidgetTester wt) async { + await init(wt); + + await checkEnjoyPlayground(wt); + await checkDescription(wt); + await checkOutputPlacement(wt); + await checkResizeOutput(wt); + await checkShortcutsModal(wt); + await checkToggleBrightnessMode(wt); + }, + ); +} diff --git a/playground/frontend/lib/components/banner/banner_button.dart b/playground/frontend/lib/components/banner/banner_button.dart index 78e7002cf8f7..f0a9ddf835cc 100644 --- a/playground/frontend/lib/components/banner/banner_button.dart +++ b/playground/frontend/lib/components/banner/banner_button.dart @@ -19,8 +19,9 @@ import 'package:aligned_dialog/aligned_dialog.dart'; import 'package:flutter/material.dart'; import 'package:flutter_svg/flutter_svg.dart'; -import 'package:playground/components/banner/banner_description.dart'; -import 'package:playground/constants/assets.dart'; + +import '../../src/assets/assets.gen.dart'; +import 'banner_description.dart'; class BannerButton extends StatelessWidget { const BannerButton({Key? key}) : super(key: key); @@ -38,7 +39,7 @@ class BannerButton extends StatelessWidget { barrierColor: Colors.transparent, ); }, - child: SvgPicture.asset(kBeamIconAsset), + child: SvgPicture.asset(Assets.beam.path), ); } } diff --git a/playground/frontend/lib/components/dropdown_button/dropdown_button.dart b/playground/frontend/lib/components/dropdown_button/dropdown_button.dart index c2aff6f2f950..17a0d692f92d 100644 --- a/playground/frontend/lib/components/dropdown_button/dropdown_button.dart +++ b/playground/frontend/lib/components/dropdown_button/dropdown_button.dart @@ -37,7 +37,7 @@ enum DropdownAlignment { class AppDropdownButton extends StatefulWidget { final Widget buttonText; final Widget Function(void Function()) createDropdown; - final double height; + final double? height; final double width; final Widget? leading; final bool showArrow; @@ -47,8 +47,8 @@ class AppDropdownButton extends StatefulWidget { super.key, required this.buttonText, required this.createDropdown, - required this.height, required this.width, + this.height, this.leading, this.showArrow = true, this.dropdownAlign = DropdownAlignment.left, diff --git a/playground/frontend/lib/components/logo/logo_component.dart b/playground/frontend/lib/components/logo/logo_component.dart index b14f09614106..d65321aa34b1 100644 --- a/playground/frontend/lib/components/logo/logo_component.dart +++ b/playground/frontend/lib/components/logo/logo_component.dart @@ -17,10 +17,11 @@ */ import 'package:flutter/material.dart'; -import 'package:playground/constants/assets.dart'; -import 'package:playground/constants/font_weight.dart'; -import 'package:playground/constants/fonts.dart'; -import 'package:playground/constants/sizes.dart'; + +import '../../constants/font_weight.dart'; +import '../../constants/fonts.dart'; +import '../../constants/sizes.dart'; +import '../../src/assets/assets.gen.dart'; const double kTitleFontSize = 18; @@ -33,8 +34,8 @@ class Logo extends StatelessWidget { return Row( mainAxisSize: MainAxisSize.min, children: [ - const Image( - image: AssetImage(kBeamLgIconAsset), + Image( + image: AssetImage(Assets.beamLg.path), width: kIconSizeLg, height: kIconSizeLg, ), diff --git a/playground/frontend/lib/constants/assets.dart b/playground/frontend/lib/constants/assets.dart deleted file mode 100644 index 796a97daa87f..000000000000 --- a/playground/frontend/lib/constants/assets.dart +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -const kThemeIconAsset = 'theme.svg'; -const kResetIconAsset = 'reset.svg'; -const kOutputBottomIconAsset = 'output_bottom.svg'; -const kOutputRightIconAsset = 'output_right.svg'; -const kOutputLeftIconAsset = 'output_left.svg'; -const kShortcutsIconAsset = 'shortcuts.svg'; -const kGithubIconAsset = 'github.svg'; -const kBeamIconAsset = 'beam.png'; -const kBeamLgIconAsset = 'beam_lg.png'; -const kThumbUpIconAsset = 'thumb_up.svg'; -const kThumbUpIconAssetFilled = 'thumb_up_filled.svg'; -const kThumbDownIconAsset = 'thumb_down.svg'; -const kThumbDownIconAssetFilled = 'thumb_down_filled.svg'; -const kCopyIconAsset = 'copy.svg'; -const kLinkIconAsset = 'link.svg'; -const kDragHorizontalIconAsset = 'drag_horizontal.svg'; -const kDragVerticalIconAsset = 'drag_vertical.svg'; -const kMultifileIconAsset = 'multifile.svg'; - -// notifications icons -const kErrorNotificationIconAsset = 'error_notification.svg'; -const kWarningNotificationIconAsset = 'warning_notification.svg'; -const kSuccessNotificationIconAsset = 'success_notification.svg'; -const kInfoNotificationIconAsset = 'info_notification.svg'; diff --git a/playground/frontend/lib/main.dart b/playground/frontend/lib/main.dart index ead9321f5a12..f65bf8aa916f 100644 --- a/playground/frontend/lib/main.dart +++ b/playground/frontend/lib/main.dart @@ -42,7 +42,7 @@ void main() async { // Router API specific initialization. final pageStack = GetIt.instance.get(); - final routerDelegate = PageStackRouterDelegate(pageStack); + final routerDelegate = BeamRouterDelegate(pageStack); final routeInformationParser = PlaygroundRouteInformationParser(); final backButtonDispatcher = PageStackBackButtonDispatcher(pageStack); diff --git a/playground/frontend/lib/modules/analytics/analytics_event.dart b/playground/frontend/lib/modules/analytics/analytics_event.dart new file mode 100644 index 000000000000..d82169e0b7bd --- /dev/null +++ b/playground/frontend/lib/modules/analytics/analytics_event.dart @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:equatable/equatable.dart'; + +class AnalyticsEvent with EquatableMixin { + final String action; + final String category; + final String? label; + final Map? parameters; + final int? value; + + AnalyticsEvent({ + required this.action, + required this.category, + this.label, + this.parameters, + this.value, + }); + + @override + List get props => [ + action, + category, + label, + parameters, + value, + ]; +} diff --git a/playground/frontend/lib/modules/analytics/analytics_service.dart b/playground/frontend/lib/modules/analytics/analytics_service.dart index 0416e7dcc520..90853e9c3783 100644 --- a/playground/frontend/lib/modules/analytics/analytics_service.dart +++ b/playground/frontend/lib/modules/analytics/analytics_service.dart @@ -17,10 +17,13 @@ */ import 'package:flutter/widgets.dart'; +import 'package:playground/modules/analytics/analytics_event.dart'; import 'package:playground_components/playground_components.dart'; import 'package:provider/provider.dart'; abstract class AnalyticsService { + AnalyticsEvent? get lastSentEvent; + static AnalyticsService get(BuildContext context) { return Provider.of(context, listen: false); } diff --git a/playground/frontend/lib/modules/analytics/google_analytics_service.dart b/playground/frontend/lib/modules/analytics/google_analytics_service.dart index 7b083b3bc25e..ab3470708166 100644 --- a/playground/frontend/lib/modules/analytics/google_analytics_service.dart +++ b/playground/frontend/lib/modules/analytics/google_analytics_service.dart @@ -22,9 +22,14 @@ import 'package:playground/modules/analytics/analytics_service.dart'; import 'package:playground_components/playground_components.dart'; import 'package:usage/usage_html.dart'; +import 'analytics_event.dart'; + class GoogleAnalyticsService implements AnalyticsService { final _analytics = AnalyticsHtml(kAnalyticsUA, 'beam', '1.0'); + @override + AnalyticsEvent? lastSentEvent; + @override void trackSelectSdk(Sdk? oldSdk, Sdk newSdk) { safeSendEvent( @@ -137,6 +142,13 @@ class GoogleAnalyticsService implements AnalyticsService { value: value, parameters: parameters, ); + lastSentEvent = AnalyticsEvent( + category: category, + action: action, + label: label, + value: value, + parameters: parameters, + ); } catch (e) { // ignore analytics errors sync they don't affect app print(e); diff --git a/playground/frontend/lib/modules/examples/components/description_popover/description_popover.dart b/playground/frontend/lib/modules/examples/components/description_popover/description_popover.dart index e368973cb64b..929e2006b954 100644 --- a/playground/frontend/lib/modules/examples/components/description_popover/description_popover.dart +++ b/playground/frontend/lib/modules/examples/components/description_popover/description_popover.dart @@ -17,14 +17,15 @@ */ import 'package:flutter/material.dart'; -import 'package:flutter_svg/flutter_svg.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; -import 'package:playground/constants/assets.dart'; -import 'package:playground/constants/font_weight.dart'; -import 'package:playground/constants/sizes.dart'; +import 'package:flutter_svg/flutter_svg.dart'; import 'package:playground_components/playground_components.dart'; import 'package:url_launcher/url_launcher.dart'; +import '../../../../constants/font_weight.dart'; +import '../../../../constants/sizes.dart'; +import '../../../../src/assets/assets.gen.dart'; + const kDescriptionWidth = 300.0; class DescriptionPopover extends StatelessWidget { @@ -66,7 +67,7 @@ class DescriptionPopover extends StatelessWidget { Widget getViewOnGithub(BuildContext context) { AppLocalizations appLocale = AppLocalizations.of(context)!; return TextButton.icon( - icon: SvgPicture.asset(kGithubIconAsset), + icon: SvgPicture.asset(Assets.github), onPressed: () { launchUrl(Uri.parse(example.link ?? '')); }, diff --git a/playground/frontend/lib/modules/examples/components/example_list/example_item_actions.dart b/playground/frontend/lib/modules/examples/components/example_list/example_item_actions.dart index 30b67fa0edd3..df0c8a837501 100644 --- a/playground/frontend/lib/modules/examples/components/example_list/example_item_actions.dart +++ b/playground/frontend/lib/modules/examples/components/example_list/example_item_actions.dart @@ -16,13 +16,17 @@ * limitations under the License. */ +import 'package:easy_localization/easy_localization.dart'; import 'package:flutter/material.dart'; -import 'package:playground/modules/examples/components/description_popover/description_popover_button.dart'; -import 'package:playground/modules/examples/components/multifile_popover/multifile_popover_button.dart'; -import 'package:playground/modules/examples/models/popover_state.dart'; +import 'package:flutter_svg/flutter_svg.dart'; import 'package:playground_components/playground_components.dart'; import 'package:provider/provider.dart'; +import '../../../../src/assets/assets.gen.dart'; +import '../../models/popover_state.dart'; +import '../description_popover/description_popover_button.dart'; +import '../multifile_popover/multifile_popover_button.dart'; + class ExampleItemActions extends StatelessWidget { final ExampleBase example; final BuildContext parentContext; @@ -36,6 +40,7 @@ class ExampleItemActions extends StatelessWidget { return Row( children: [ if (example.isMultiFile) multifilePopover, + if (example.usesEmulatedData) const _EmulatedDataIcon(), if (example.complexity != null) ComplexityWidget(complexity: example.complexity!), descriptionPopover, @@ -65,3 +70,21 @@ class ExampleItemActions extends StatelessWidget { Provider.of(context, listen: false).setOpen(isOpen); } } + +class _EmulatedDataIcon extends StatelessWidget { + const _EmulatedDataIcon(); + + @override + Widget build(BuildContext context) { + return Padding( + padding: const EdgeInsets.only(right: 8.0), + child: Tooltip( + message: 'intents.playground.usesEmulatedData'.tr(), + child: SvgPicture.asset( + Assets.streaming, + color: Theme.of(context).extension()?.iconColor, + ), + ), + ); + } +} diff --git a/playground/frontend/lib/modules/examples/components/example_list/expansion_panel_item.dart b/playground/frontend/lib/modules/examples/components/example_list/expansion_panel_item.dart index 053968ff09a4..87e792169be1 100644 --- a/playground/frontend/lib/modules/examples/components/example_list/expansion_panel_item.dart +++ b/playground/frontend/lib/modules/examples/components/example_list/expansion_panel_item.dart @@ -47,21 +47,7 @@ class ExpansionPanelItem extends StatelessWidget { if (controller.selectedExample != example) { _closeDropdown(controller.exampleCache); AnalyticsService.get(context).trackSelectExample(example); - final exampleWithInfo = - await controller.exampleCache.loadExampleInfo(example); - // TODO: setCurrentSdk = false when we do - // per-SDK output and run status. - // Now using true to reset the output and run status. - // https://github.com/apache/beam/issues/23248 - final descriptor = StandardExampleLoadingDescriptor( - sdk: exampleWithInfo.sdk, - path: exampleWithInfo.path, - ); - controller.setExample( - exampleWithInfo, - descriptor: descriptor, - setCurrentSdk: true, - ); + controller.setExampleBase(example); } }, child: Container( diff --git a/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover.dart b/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover.dart index c4fb93fa4706..fb1c76fc0c73 100644 --- a/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover.dart +++ b/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover.dart @@ -17,14 +17,15 @@ */ import 'package:flutter/material.dart'; -import 'package:flutter_svg/flutter_svg.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; -import 'package:playground/constants/assets.dart'; -import 'package:playground/constants/font_weight.dart'; -import 'package:playground/constants/sizes.dart'; +import 'package:flutter_svg/flutter_svg.dart'; import 'package:playground_components/playground_components.dart'; import 'package:url_launcher/url_launcher.dart'; +import '../../../../constants/font_weight.dart'; +import '../../../../constants/sizes.dart'; +import '../../../../src/assets/assets.gen.dart'; + const kMultifileWidth = 300.0; class MultifilePopover extends StatelessWidget { @@ -52,7 +53,7 @@ class MultifilePopover extends StatelessWidget { ), Text(appLocale.multifileWarning), TextButton.icon( - icon: SvgPicture.asset(kGithubIconAsset), + icon: SvgPicture.asset(Assets.github), onPressed: () { launchUrl(Uri.parse(example.link ?? '')); }, diff --git a/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover_button.dart b/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover_button.dart index d530aa645ebe..78fbafdf51a9 100644 --- a/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover_button.dart +++ b/playground/frontend/lib/modules/examples/components/multifile_popover/multifile_popover_button.dart @@ -18,13 +18,14 @@ import 'package:aligned_dialog/aligned_dialog.dart'; import 'package:flutter/material.dart'; -import 'package:flutter_svg/flutter_svg.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; -import 'package:playground/constants/assets.dart'; -import 'package:playground/constants/sizes.dart'; -import 'package:playground/modules/examples/components/multifile_popover/multifile_popover.dart'; +import 'package:flutter_svg/flutter_svg.dart'; import 'package:playground_components/playground_components.dart'; +import '../../../../constants/sizes.dart'; +import '../../../../src/assets/assets.gen.dart'; +import 'multifile_popover.dart'; + class MultifilePopoverButton extends StatelessWidget { final BuildContext? parentContext; final ExampleBase example; @@ -51,7 +52,7 @@ class MultifilePopoverButton extends StatelessWidget { child: IconButton( iconSize: kIconSizeMd, splashRadius: kIconButtonSplashRadius, - icon: SvgPicture.asset(kMultifileIconAsset), + icon: SvgPicture.asset(Assets.multifile), tooltip: appLocale.exampleMultifile, onPressed: () { _showMultifilePopover( diff --git a/playground/frontend/lib/modules/output/components/output_header/output_placements.dart b/playground/frontend/lib/modules/output/components/output_header/output_placements.dart index f8eaf5710247..d82f0b12b884 100644 --- a/playground/frontend/lib/modules/output/components/output_header/output_placements.dart +++ b/playground/frontend/lib/modules/output/components/output_header/output_placements.dart @@ -42,6 +42,7 @@ class OutputPlacements extends StatelessWidget { '${AppLocalizations.of(context)!.outputPlacementSemantic}' ' ${placement.name(context)}', child: IconButton( + key: ValueKey(placement), splashRadius: kIconButtonSplashRadius, icon: SvgPicture.asset( placement.icon, diff --git a/playground/frontend/lib/modules/output/models/output_placement.dart b/playground/frontend/lib/modules/output/models/output_placement.dart index 642553751ccd..f7db5ea1900e 100644 --- a/playground/frontend/lib/modules/output/models/output_placement.dart +++ b/playground/frontend/lib/modules/output/models/output_placement.dart @@ -18,7 +18,8 @@ import 'package:flutter/cupertino.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; -import 'package:playground/constants/assets.dart'; + +import '../../../src/assets/assets.gen.dart'; enum OutputPlacement { right, @@ -27,9 +28,7 @@ enum OutputPlacement { ; Axis get graphDirection { - return this == OutputPlacement.bottom - ? Axis.horizontal - : Axis.vertical; + return this == OutputPlacement.bottom ? Axis.horizontal : Axis.vertical; } } @@ -37,11 +36,11 @@ extension OutputPlacementToIcon on OutputPlacement { String get icon { switch (this) { case OutputPlacement.bottom: - return kOutputBottomIconAsset; + return Assets.outputBottom; case OutputPlacement.right: - return kOutputRightIconAsset; + return Assets.outputRight; case OutputPlacement.left: - return kOutputLeftIconAsset; + return Assets.outputLeft; } } } diff --git a/playground/frontend/lib/modules/sdk/components/sdk_selector.dart b/playground/frontend/lib/modules/sdk/components/sdk_selector.dart index c4c1656a1748..8bba9ed7e6aa 100644 --- a/playground/frontend/lib/modules/sdk/components/sdk_selector.dart +++ b/playground/frontend/lib/modules/sdk/components/sdk_selector.dart @@ -18,26 +18,23 @@ import 'package:flutter/material.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; -import 'package:playground/components/dropdown_button/dropdown_button.dart'; -import 'package:playground/constants/sizes.dart'; -import 'package:playground/modules/sdk/components/sdk_selector_row.dart'; import 'package:playground_components/playground_components.dart'; import 'package:provider/provider.dart'; -const kEmptyExampleName = 'Catalog'; +import '../../../components/dropdown_button/dropdown_button.dart'; +import '../../../constants/sizes.dart'; +import 'sdk_selector_row.dart'; -const double kWidth = 150; -const double kHeight = 172; +const double _width = 150; class SDKSelector extends StatelessWidget { - final Sdk? value; final ValueChanged onChanged; + final Sdk? value; const SDKSelector({ - Key? key, - required this.value, required this.onChanged, - }) : super(key: key); + required this.value, + }); @override Widget build(BuildContext context) { @@ -68,10 +65,10 @@ class SDKSelector extends StatelessWidget { ), ); }), + const SizedBox(height: kMdSpacing), ], ), - width: kWidth, - height: kHeight, + width: _width, ), ), ); diff --git a/playground/frontend/lib/modules/sdk/components/sdk_selector_row.dart b/playground/frontend/lib/modules/sdk/components/sdk_selector_row.dart index 7993723bf25d..1039078b15e6 100644 --- a/playground/frontend/lib/modules/sdk/components/sdk_selector_row.dart +++ b/playground/frontend/lib/modules/sdk/components/sdk_selector_row.dart @@ -25,11 +25,10 @@ class SdkSelectorRow extends StatelessWidget { final Sdk sdk; final VoidCallback onSelect; - const SdkSelectorRow({ - Key? key, + SdkSelectorRow({ required this.sdk, required this.onSelect, - }) : super(key: key); + }) : super(key: ValueKey(sdk)); @override Widget build(BuildContext context) { diff --git a/playground/frontend/lib/modules/shortcuts/components/shortcut_row.dart b/playground/frontend/lib/modules/shortcuts/components/shortcut_row.dart index 27cd34357038..c68a1d95cbb8 100644 --- a/playground/frontend/lib/modules/shortcuts/components/shortcut_row.dart +++ b/playground/frontend/lib/modules/shortcuts/components/shortcut_row.dart @@ -31,15 +31,17 @@ class ShortcutRow extends StatelessWidget { // wrap with row to shrink container to child size return Row( children: [ - Container( - decoration: BoxDecoration( - border: Border.all(color: primaryColor), - borderRadius: BorderRadius.circular(kSmBorderRadius), - ), - padding: const EdgeInsets.all(kMdSpacing), - child: Text( - shortcut.title, - style: TextStyle(color: primaryColor), + Flexible( + child: Container( + decoration: BoxDecoration( + border: Border.all(color: primaryColor), + borderRadius: BorderRadius.circular(kSmBorderRadius), + ), + padding: const EdgeInsets.all(kMdSpacing), + child: Text( + shortcut.title, + style: TextStyle(color: primaryColor), + ), ), ), ], diff --git a/playground/frontend/lib/modules/shortcuts/components/shortcuts_modal.dart b/playground/frontend/lib/modules/shortcuts/components/shortcuts_modal.dart index 1f334327f6b0..f4332e55ebd4 100644 --- a/playground/frontend/lib/modules/shortcuts/components/shortcuts_modal.dart +++ b/playground/frontend/lib/modules/shortcuts/components/shortcuts_modal.dart @@ -64,6 +64,7 @@ class ShortcutsModal extends StatelessWidget { crossAxisAlignment: CrossAxisAlignment.center, children: [ Expanded(child: ShortcutRow(shortcut: shortcut)), + const SizedBox(width: kMdSpacing), Expanded( flex: 3, child: Text( diff --git a/playground/frontend/lib/pages/embedded_playground/screen.dart b/playground/frontend/lib/pages/embedded_playground/screen.dart index 0e85c7678aa5..05d39ce3e33f 100644 --- a/playground/frontend/lib/pages/embedded_playground/screen.dart +++ b/playground/frontend/lib/pages/embedded_playground/screen.dart @@ -41,21 +41,19 @@ class EmbeddedPlaygroundScreen extends StatelessWidget { playgroundController: notifier.playgroundController, child: PlaygroundShortcutsManager( playgroundController: notifier.playgroundController, - child: ToastListenerWidget( - child: Scaffold( - appBar: AppBar( - automaticallyImplyLeading: false, - title: const EmbeddedAppBarTitle(), - actions: const [EmbeddedActions()], - ), - body: EmbeddedSplitView( - first: EmbeddedEditor(isEditable: notifier.isEditable), - second: Container( - color: Theme.of(context).backgroundColor, - child: OutputWidget( - playgroundController: notifier.playgroundController, - graphDirection: Axis.horizontal, - ), + child: Scaffold( + appBar: AppBar( + automaticallyImplyLeading: false, + title: const EmbeddedAppBarTitle(), + actions: const [EmbeddedActions()], + ), + body: EmbeddedSplitView( + first: EmbeddedEditor(isEditable: notifier.isEditable), + second: Container( + color: Theme.of(context).backgroundColor, + child: OutputWidget( + playgroundController: notifier.playgroundController, + graphDirection: Axis.horizontal, ), ), ), diff --git a/playground/frontend/lib/pages/embedded_playground/widgets/embedded_actions.dart b/playground/frontend/lib/pages/embedded_playground/widgets/embedded_actions.dart index c66dff3ba92b..79c7dc587840 100644 --- a/playground/frontend/lib/pages/embedded_playground/widgets/embedded_actions.dart +++ b/playground/frontend/lib/pages/embedded_playground/widgets/embedded_actions.dart @@ -26,9 +26,9 @@ import 'package:flutter_svg/flutter_svg.dart'; import 'package:playground_components/playground_components.dart'; import 'package:provider/provider.dart'; -import '../../../constants/assets.dart'; import '../../../constants/sizes.dart'; import '../../../modules/messages/models/set_content_message.dart'; +import '../../../src/assets/assets.gen.dart'; import '../../../utils/javascript_post_message.dart'; import '../../standalone_playground/path.dart'; @@ -47,7 +47,7 @@ class EmbeddedActions extends StatelessWidget { height: kTryPlaygroundButtonHeight, child: Consumer( builder: (context, controller, child) => ElevatedButton.icon( - icon: SvgPicture.asset(kLinkIconAsset), + icon: SvgPicture.asset(Assets.link), label: Text(AppLocalizations.of(context)!.tryInPlayground), onPressed: () => _openStandalonePlayground(controller), ), diff --git a/playground/frontend/lib/pages/embedded_playground/widgets/embedded_appbar_title.dart b/playground/frontend/lib/pages/embedded_playground/widgets/embedded_appbar_title.dart index 112bea826ddb..75443f03b5a8 100644 --- a/playground/frontend/lib/pages/embedded_playground/widgets/embedded_appbar_title.dart +++ b/playground/frontend/lib/pages/embedded_playground/widgets/embedded_appbar_title.dart @@ -19,12 +19,13 @@ import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:flutter_svg/flutter_svg.dart'; -import 'package:playground/components/playground_run_or_cancel_button.dart'; -import 'package:playground/constants/assets.dart'; -import 'package:playground/constants/sizes.dart'; import 'package:playground_components/playground_components.dart'; import 'package:provider/provider.dart'; +import '../../../components/playground_run_or_cancel_button.dart'; +import '../../../constants/sizes.dart'; +import '../../../src/assets/assets.gen.dart'; + class EmbeddedAppBarTitle extends StatelessWidget { const EmbeddedAppBarTitle({Key? key}) : super(key: key); @@ -40,7 +41,7 @@ class EmbeddedAppBarTitle extends StatelessWidget { IconButton( iconSize: kIconSizeLg, splashRadius: kIconButtonSplashRadius, - icon: SvgPicture.asset(kCopyIconAsset), + icon: SvgPicture.asset(Assets.copy), onPressed: () { final source = controller.source; Clipboard.setData(ClipboardData(text: source)); diff --git a/playground/frontend/lib/pages/embedded_playground/widgets/embedded_editor.dart b/playground/frontend/lib/pages/embedded_playground/widgets/embedded_editor.dart index ac319426c79a..b117adac24ba 100644 --- a/playground/frontend/lib/pages/embedded_playground/widgets/embedded_editor.dart +++ b/playground/frontend/lib/pages/embedded_playground/widgets/embedded_editor.dart @@ -30,7 +30,7 @@ class EmbeddedEditor extends StatelessWidget { final controller = Provider.of(context); final snippetController = controller.snippetEditingController; - if (snippetController == null) { + if (snippetController == null || snippetController.isLoading) { return const LoadingIndicator(); } diff --git a/playground/frontend/lib/pages/standalone_playground/screen.dart b/playground/frontend/lib/pages/standalone_playground/screen.dart index d19960e32c18..1aace1bfb940 100644 --- a/playground/frontend/lib/pages/standalone_playground/screen.dart +++ b/playground/frontend/lib/pages/standalone_playground/screen.dart @@ -95,16 +95,14 @@ class StandalonePlaygroundScreen extends StatelessWidget { ), ], ), - body: ToastListenerWidget( - child: Column( - children: [ - const Expanded(child: PlaygroundPageBody()), - Semantics( - container: true, - child: const PlaygroundPageFooter(), - ), - ], - ), + body: Column( + children: [ + const Expanded(child: PlaygroundPageBody()), + Semantics( + container: true, + child: const PlaygroundPageFooter(), + ), + ], ), ); }, diff --git a/playground/frontend/lib/pages/standalone_playground/widgets/editor_textarea_wrapper.dart b/playground/frontend/lib/pages/standalone_playground/widgets/editor_textarea_wrapper.dart index 57c1ba3fa70e..5ee81cb05cf3 100644 --- a/playground/frontend/lib/pages/standalone_playground/widgets/editor_textarea_wrapper.dart +++ b/playground/frontend/lib/pages/standalone_playground/widgets/editor_textarea_wrapper.dart @@ -19,7 +19,6 @@ import 'package:flutter/material.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; import 'package:playground_components/playground_components.dart'; -import 'package:provider/provider.dart'; import '../../../components/playground_run_or_cancel_button.dart'; import '../../../constants/sizes.dart'; @@ -29,80 +28,81 @@ import '../../../modules/examples/components/multifile_popover/multifile_popover /// A code editor with controls stacked above it. class CodeTextAreaWrapper extends StatelessWidget { - const CodeTextAreaWrapper({Key? key}) : super(key: key); + final PlaygroundController controller; + + const CodeTextAreaWrapper({ + required this.controller, + }); @override Widget build(BuildContext context) { - return Consumer( - builder: (context, controller, child) { - if (controller.result?.errorMessage?.isNotEmpty ?? false) { - WidgetsBinding.instance.addPostFrameCallback((_) { - _handleError(context, controller); - }); - } + if (controller.result?.errorMessage?.isNotEmpty ?? false) { + WidgetsBinding.instance.addPostFrameCallback((_) { + _handleError(context, controller); + }); + } - final snippetController = controller.snippetEditingController; + final snippetController = controller.snippetEditingController; - if (snippetController == null) { - return const LoadingIndicator(); - } + if (snippetController == null) { + return const LoadingIndicator(); + } - return Column( - children: [ - Expanded( - child: Stack( - children: [ - Positioned.fill( - child: SnippetEditor( - controller: snippetController, - isEditable: true, - ), + return Column( + children: [ + Expanded( + child: Stack( + children: [ + Positioned.fill( + child: SnippetEditor( + controller: snippetController, + isEditable: true, ), - Positioned( - right: kXlSpacing, - top: kXlSpacing, - height: kButtonHeight, - child: Row( - children: [ - if (controller.selectedExample != null) ...[ - if (controller.selectedExample?.isMultiFile ?? false) - Semantics( - container: true, - child: MultifilePopoverButton( - example: controller.selectedExample!, - followerAnchor: Alignment.topRight, - targetAnchor: Alignment.bottomRight, - ), - ), + ), + Positioned( + right: kXlSpacing, + top: kXlSpacing, + height: kButtonHeight, + child: Row( + children: [ + if (controller.selectedExample != null) ...[ + if (controller.selectedExample?.isMultiFile ?? false) Semantics( container: true, - child: DescriptionPopoverButton( + child: MultifilePopoverButton( example: controller.selectedExample!, followerAnchor: Alignment.topRight, targetAnchor: Alignment.bottomRight, ), ), - ], Semantics( container: true, - child: ShareButton( - playgroundController: controller, + child: DescriptionPopoverButton( + example: controller.selectedExample!, + followerAnchor: Alignment.topRight, + targetAnchor: Alignment.bottomRight, ), ), - const SizedBox(width: kLgSpacing), - Semantics( - container: true, - child: const PlaygroundRunOrCancelButton(), - ), ], - ), + Semantics( + container: true, + child: ShareButton( + playgroundController: controller, + ), + ), + const SizedBox(width: kLgSpacing), + Semantics( + container: true, + child: const PlaygroundRunOrCancelButton(), + ), + ], ), - ], - ), + ), + ], ), - ], - ); - }); + ), + ], + ); } void _handleError(BuildContext context, PlaygroundController controller) { diff --git a/playground/frontend/lib/pages/standalone_playground/widgets/feedback/feedback_dropdown_content.dart b/playground/frontend/lib/pages/standalone_playground/widgets/feedback/feedback_dropdown_content.dart index 2f161cecc64c..e997a270be19 100644 --- a/playground/frontend/lib/pages/standalone_playground/widgets/feedback/feedback_dropdown_content.dart +++ b/playground/frontend/lib/pages/standalone_playground/widgets/feedback/feedback_dropdown_content.dart @@ -35,6 +35,10 @@ const String kFeedbackContentText = 'Have feedback? We\'d love to hear it,' '\nHave questions? Try help or support.'; class FeedbackDropdownContent extends StatelessWidget { + static const textFieldKey = Key('feedbackTextFieldKey'); + static const cancelButtonKey = Key('cancelButtonKey'); + static const sendButtonKey = Key('sendFeedbackButtonKey'); + final void Function() close; final TextEditingController textController; @@ -46,7 +50,8 @@ class FeedbackDropdownContent extends StatelessWidget { @override Widget build(BuildContext context) { - final borderColor = Theme.of(context).extension()!.borderColor; + final borderColor = + Theme.of(context).extension()!.borderColor; final OutlineInputBorder border = OutlineInputBorder( borderSide: BorderSide(color: borderColor), @@ -110,6 +115,7 @@ class FeedbackDropdownContent extends StatelessWidget { child: ClipRRect( borderRadius: BorderRadius.circular(kMdBorderRadius), child: TextFormField( + key: textFieldKey, controller: textController, decoration: InputDecoration( focusedBorder: border, @@ -147,6 +153,7 @@ class FeedbackDropdownContent extends StatelessWidget { ), ), child: TextButton( + key: cancelButtonKey, onPressed: () { close(); textController.clear(); @@ -162,6 +169,7 @@ class FeedbackDropdownContent extends StatelessWidget { borderRadius: BorderRadius.circular(kSmBorderRadius), ), child: ElevatedButton( + key: sendButtonKey, onPressed: () { if (textController.text.isNotEmpty) { AnalyticsService.get(context).trackClickSendFeedback( diff --git a/playground/frontend/lib/pages/standalone_playground/widgets/feedback/playground_feedback.dart b/playground/frontend/lib/pages/standalone_playground/widgets/feedback/playground_feedback.dart index 4faeaaf49208..eec7d69d64ff 100644 --- a/playground/frontend/lib/pages/standalone_playground/widgets/feedback/playground_feedback.dart +++ b/playground/frontend/lib/pages/standalone_playground/widgets/feedback/playground_feedback.dart @@ -20,14 +20,17 @@ import 'package:flutter/material.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; import 'package:provider/provider.dart'; -import '../../../../constants/assets.dart'; import '../../../../constants/font_weight.dart'; import '../../../../modules/analytics/analytics_service.dart'; +import '../../../../src/assets/assets.gen.dart'; import '../../notifiers/feedback_state.dart'; import 'feedback_dropdown_icon_button.dart'; /// A status bar item for feedback. class PlaygroundFeedback extends StatelessWidget { + static const thumbUpKey = Key('thumbUp'); + static const thumbDownKey = Key('thumbDown'); + const PlaygroundFeedback({Key? key}) : super(key: key); @override @@ -42,16 +45,18 @@ class PlaygroundFeedback extends StatelessWidget { style: const TextStyle(fontWeight: kBoldWeight), ), FeedbackDropdownIconButton( + key: thumbUpKey, label: appLocale.enjoying, - iconAsset: kThumbUpIconAsset, - filledIconAsset: kThumbUpIconAssetFilled, + iconAsset: Assets.thumbUp, + filledIconAsset: Assets.thumbUpFilled, onClick: _setEnjoying(context, true), isSelected: isEnjoying != null && isEnjoying, ), FeedbackDropdownIconButton( + key: thumbDownKey, label: appLocale.notEnjoying, - iconAsset: kThumbDownIconAsset, - filledIconAsset: kThumbDownIconAssetFilled, + iconAsset: Assets.thumbDown, + filledIconAsset: Assets.thumbDownFilled, onClick: _setEnjoying(context, false), isSelected: isEnjoying != null && !isEnjoying, ), @@ -62,8 +67,7 @@ class PlaygroundFeedback extends StatelessWidget { _setEnjoying(BuildContext context, bool isEnjoying) { return () { _getFeedbackState(context, false).setEnjoying(isEnjoying); - AnalyticsService.get(context) - .trackClickEnjoyPlayground(isEnjoying); + AnalyticsService.get(context).trackClickEnjoyPlayground(isEnjoying); }; } diff --git a/playground/frontend/lib/pages/standalone_playground/widgets/more_actions.dart b/playground/frontend/lib/pages/standalone_playground/widgets/more_actions.dart index 60f4e0ff9b34..f82fb93707f6 100644 --- a/playground/frontend/lib/pages/standalone_playground/widgets/more_actions.dart +++ b/playground/frontend/lib/pages/standalone_playground/widgets/more_actions.dart @@ -19,13 +19,14 @@ import 'package:flutter/material.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; import 'package:flutter_svg/flutter_svg.dart'; -import 'package:playground/constants/assets.dart'; -import 'package:playground/constants/links.dart'; -import 'package:playground/modules/analytics/analytics_service.dart'; -import 'package:playground/modules/shortcuts/components/shortcuts_modal.dart'; import 'package:playground_components/playground_components.dart'; import 'package:url_launcher/url_launcher.dart'; +import '../../../constants/links.dart'; +import '../../../modules/analytics/analytics_service.dart'; +import '../../../modules/shortcuts/components/shortcuts_modal.dart'; +import '../../../src/assets/assets.gen.dart'; + enum HeaderAction { shortcuts, beamPlaygroundGithub, @@ -63,7 +64,7 @@ class _MoreActionsState extends State { padding: EdgeInsets.zero, value: HeaderAction.shortcuts, child: ListTile( - leading: SvgPicture.asset(kShortcutsIconAsset), + leading: SvgPicture.asset(Assets.shortcuts), title: Text(appLocale.shortcuts), onTap: () { AnalyticsService.get(context).trackOpenShortcutsModal(); @@ -80,7 +81,7 @@ class _MoreActionsState extends State { padding: EdgeInsets.zero, value: HeaderAction.beamPlaygroundGithub, child: ListTile( - leading: SvgPicture.asset(kGithubIconAsset), + leading: SvgPicture.asset(Assets.github), title: Text(appLocale.beamPlaygroundOnGithub), onTap: () => _openLink(kBeamPlaygroundGithubLink, context), ), @@ -89,7 +90,7 @@ class _MoreActionsState extends State { padding: EdgeInsets.zero, value: HeaderAction.apacheBeamGithub, child: ListTile( - leading: SvgPicture.asset(kGithubIconAsset), + leading: SvgPicture.asset(Assets.github), title: Text(appLocale.apacheBeamOnGithub), onTap: () => _openLink(kApacheBeamGithubLink, context), ), @@ -98,7 +99,7 @@ class _MoreActionsState extends State { padding: EdgeInsets.zero, value: HeaderAction.scioGithub, child: ListTile( - leading: SvgPicture.asset(kGithubIconAsset), + leading: SvgPicture.asset(Assets.github), title: Text(appLocale.scioOnGithub), onTap: () => _openLink(kScioGithubLink, context), ), @@ -108,7 +109,7 @@ class _MoreActionsState extends State { padding: EdgeInsets.zero, value: HeaderAction.beamWebsite, child: ListTile( - leading: const Image(image: AssetImage(kBeamIconAsset)), + leading: Image(image: AssetImage(Assets.beam.path)), title: Text(appLocale.toApacheBeamWebsite), onTap: () => _openLink(kBeamWebsiteLink, context), ), diff --git a/playground/frontend/lib/pages/standalone_playground/widgets/playground_page_body.dart b/playground/frontend/lib/pages/standalone_playground/widgets/playground_page_body.dart index 83357abb0cd4..2ce17d79c35e 100644 --- a/playground/frontend/lib/pages/standalone_playground/widgets/playground_page_body.dart +++ b/playground/frontend/lib/pages/standalone_playground/widgets/playground_page_body.dart @@ -20,7 +20,6 @@ import 'package:flutter/material.dart'; import 'package:playground_components/playground_components.dart'; import 'package:provider/provider.dart'; -import '../../../constants/sizes.dart'; import '../../../modules/output/components/output_header/output_placements.dart'; import '../../../modules/output/models/output_placement.dart'; import '../../../modules/output/models/output_placement_state.dart'; @@ -32,13 +31,23 @@ class PlaygroundPageBody extends StatelessWidget { @override Widget build(BuildContext context) { return Consumer2( - builder: (context, outputState, playgroundState, child) { + builder: (context, outputState, controller, child) { + final snippetController = controller.snippetEditingController; + + if (snippetController == null || snippetController.isLoading) { + return const LoadingIndicator(); + } + final output = OutputWidget( graphDirection: outputState.placement.graphDirection, - playgroundController: playgroundState, + playgroundController: controller, trailing: const OutputPlacements(), ); + final codeTextArea = CodeTextAreaWrapper( + controller: controller, + ); + switch (outputState.placement) { case OutputPlacement.bottom: return SplitView( @@ -63,16 +72,4 @@ class PlaygroundPageBody extends StatelessWidget { } }); } - - Widget get codeTextArea => const CodeTextAreaWrapper(); - - Widget getVerticalSeparator(BuildContext context) => Container( - width: kMdSpacing, - color: Theme.of(context).dividerColor, - ); - - Widget getHorizontalSeparator(BuildContext context) => Container( - height: kMdSpacing, - color: Theme.of(context).dividerColor, - ); } diff --git a/playground/frontend/lib/src/assets/assets.gen.dart b/playground/frontend/lib/src/assets/assets.gen.dart new file mode 100644 index 000000000000..2548bf73f7ac --- /dev/null +++ b/playground/frontend/lib/src/assets/assets.gen.dart @@ -0,0 +1,127 @@ +/// GENERATED CODE - DO NOT MODIFY BY HAND +/// ***************************************************** +/// FlutterGen +/// ***************************************************** + +// coverage:ignore-file +// ignore_for_file: type=lint +// ignore_for_file: directives_ordering,unnecessary_import,implicit_dynamic_list_literal + +import 'package:flutter/widgets.dart'; + +class $AssetsTranslationsGen { + const $AssetsTranslationsGen(); + + /// File path: assets/translations/en.yaml + String get en => 'assets/translations/en.yaml'; + + /// List of all assets + List get values => [en]; +} + +class Assets { + Assets._(); + + static const AssetGenImage beam = AssetGenImage('assets/beam.png'); + static const AssetGenImage beamLg = AssetGenImage('assets/beam_lg.png'); + static const String copy = 'assets/copy.svg'; + static const String github = 'assets/github.svg'; + static const String link = 'assets/link.svg'; + static const String multifile = 'assets/multifile.svg'; + static const String outputBottom = 'assets/output_bottom.svg'; + static const String outputLeft = 'assets/output_left.svg'; + static const String outputRight = 'assets/output_right.svg'; + static const String sendFeedback = 'assets/send_feedback.svg'; + static const String shortcuts = 'assets/shortcuts.svg'; + static const String streaming = 'assets/streaming.svg'; + static const String thumbDown = 'assets/thumb_down.svg'; + static const String thumbDownFilled = 'assets/thumb_down_filled.svg'; + static const String thumbUp = 'assets/thumb_up.svg'; + static const String thumbUpFilled = 'assets/thumb_up_filled.svg'; + static const $AssetsTranslationsGen translations = $AssetsTranslationsGen(); + + /// List of all assets + List get values => [ + beam, + beamLg, + copy, + github, + link, + multifile, + outputBottom, + outputLeft, + outputRight, + sendFeedback, + shortcuts, + streaming, + thumbDown, + thumbDownFilled, + thumbUp, + thumbUpFilled + ]; +} + +class AssetGenImage { + const AssetGenImage(this._assetName); + + final String _assetName; + + Image image({ + Key? key, + AssetBundle? bundle, + ImageFrameBuilder? frameBuilder, + ImageErrorWidgetBuilder? errorBuilder, + String? semanticLabel, + bool excludeFromSemantics = false, + double? scale, + double? width, + double? height, + Color? color, + Animation? opacity, + BlendMode? colorBlendMode, + BoxFit? fit, + AlignmentGeometry alignment = Alignment.center, + ImageRepeat repeat = ImageRepeat.noRepeat, + Rect? centerSlice, + bool matchTextDirection = false, + bool gaplessPlayback = false, + bool isAntiAlias = false, + String? package, + FilterQuality filterQuality = FilterQuality.low, + int? cacheWidth, + int? cacheHeight, + }) { + return Image.asset( + _assetName, + key: key, + bundle: bundle, + frameBuilder: frameBuilder, + errorBuilder: errorBuilder, + semanticLabel: semanticLabel, + excludeFromSemantics: excludeFromSemantics, + scale: scale, + width: width, + height: height, + color: color, + opacity: opacity, + colorBlendMode: colorBlendMode, + fit: fit, + alignment: alignment, + repeat: repeat, + centerSlice: centerSlice, + matchTextDirection: matchTextDirection, + gaplessPlayback: gaplessPlayback, + isAntiAlias: isAntiAlias, + package: package, + filterQuality: filterQuality, + cacheWidth: cacheWidth, + cacheHeight: cacheHeight, + ); + } + + ImageProvider provider() => AssetImage(_assetName); + + String get path => _assetName; + + String get keyName => _assetName; +} diff --git a/playground/frontend/playground_components/LICENSE b/playground/frontend/playground_components/LICENSE deleted file mode 100644 index 8c048c96fb52..000000000000 --- a/playground/frontend/playground_components/LICENSE +++ /dev/null @@ -1,407 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - A part of several convenience binary distributions of this software is licensed as follows: - - Google Protobuf: - Copyright 2008 Google Inc. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - * Neither the name of Google Inc. nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - Code generated by the Protocol Buffer compiler is owned by the owner - of the input file used when generating it. This code is not - standalone and requires a support library to be linked with it. This - support library is itself covered by the above license. - - jsr-305: - Copyright (c) 2007-2009, JSR305 expert group - All rights reserved. - - https://opensource.org/licenses/BSD-3-Clause - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the JSR305 expert group nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, - THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE - LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - POSSIBILITY OF SUCH DAMAGE. - - janino-compiler: - Janino - An embedded Java[TM] compiler - - Copyright (c) 2001-2016, Arno Unkrig - Copyright (c) 2015-2016 TIBCO Software Inc. - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - 2. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials - provided with the distribution. - 3. Neither the name of JANINO nor the names of its contributors - may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER - IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN - IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - jline: - Copyright (c) 2002-2016, the original author or authors. - All rights reserved. - - http://www.opensource.org/licenses/bsd-license.php - - Redistribution and use in source and binary forms, with or - without modification, are permitted provided that the following - conditions are met: - - Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with - the distribution. - - Neither the name of JLine nor the names of its contributors - may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, - BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY - AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, - OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING - IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - OF THE POSSIBILITY OF SUCH DAMAGE. - - sqlline: - SQLLine - Shell for issuing SQL to relational databases via JDBC - - Copyright (c) 2002,2003,2004,2005,2006,2007 Marc Prud'hommeaux - Copyright (c) 2004-2010 The Eigenbase Project - Copyright (c) 2013-2017 Julian Hyde - All rights reserved. - - =============================================================================== - - Licensed under the Modified BSD License (the "License"); you may not - use this file except in compliance with the License. You may obtain a - copy of the License at: - - http://opensource.org/licenses/BSD-3-Clause - - Redistribution and use in source and binary forms, - with or without modification, are permitted provided - that the following conditions are met: - - (1) Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - (2) Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the - distribution. - - (3) The name of the author may not be used to endorse or promote - products derived from this software without specific prior written - permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - slf4j: - Copyright (c) 2004-2017 QOS.ch - All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining - a copy of this software and associated documentation files (the - "Software"), to deal in the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE - -See the adjacent LICENSE.python file, if present, for additional licenses that -apply to parts of Apache Beam Python. diff --git a/playground/frontend/playground_components/README.md b/playground/frontend/playground_components/README.md index 9c4ef73d25d0..6a99b6b645ce 100644 --- a/playground/frontend/playground_components/README.md +++ b/playground/frontend/playground_components/README.md @@ -17,29 +17,7 @@ under the License. --> -TODO: Put a short description of the package here that helps potential users -know whether this package might be useful for them. +# playground_components -## Features - -TODO: List what your package can do. Maybe include images, gifs, or videos. - -## Getting started - -TODO: List prerequisites and provide or point to information on how to -start using the package. - -## Usage - -TODO: Include short and useful examples for package users. Add longer examples -to `/example` folder. - -```dart -const like = 'sample'; -``` - -## Additional information - -TODO: Tell users more about the package: where to find more information, how to -contribute to the package, how to file issues, what response they can expect -from the package authors, and more. +This is a non-pub.dev Flutter package that contains common components +for both Beam Playground app and Tour of Beam app. diff --git a/playground/frontend/playground_components/analysis_options.yaml b/playground/frontend/playground_components/analysis_options.yaml index 318f01bfa2fd..fe2e0e8eb952 100644 --- a/playground/frontend/playground_components/analysis_options.yaml +++ b/playground/frontend/playground_components/analysis_options.yaml @@ -16,6 +16,3 @@ # under the License. include: package:total_lints/app.yaml - -# Additional information about this file can be found at -# https://dart.dev/guides/language/analysis-options diff --git a/playground/frontend/playground_components/assets/symbols/go.g.yaml b/playground/frontend/playground_components/assets/symbols/go.g.yaml index 218eee2710db..4df541c1a86e 100644 --- a/playground/frontend/playground_components/assets/symbols/go.g.yaml +++ b/playground/frontend/playground_components/assets/symbols/go.g.yaml @@ -555,6 +555,7 @@ - NewI - NewImpulse - NewIntervalWindow + - NewIntervalWindowCoder - NewJobServiceClient - NewKV - NewLegacyArtifactRetrievalServiceClient @@ -869,6 +870,7 @@ - UrnToType - UseAutomatedJavaExpansionService - UseAutomatedPythonExpansionService + - UseBatchSize - UseStandardSQL - UserLabels - UserStateCoderID @@ -3580,6 +3582,19 @@ MapTypeValue: - String properties: - Entries +MapWindows: + methods: + - Down + - FinishBundle + - ID + - ProcessElement + - StartBundle + - String + - Up + properties: + - Fn + - Out + - UID MavenPayload: methods: - Descriptor diff --git a/playground/frontend/playground_components/assets/symbols/java.g.yaml b/playground/frontend/playground_components/assets/symbols/java.g.yaml new file mode 100644 index 000000000000..1fe009d43672 --- /dev/null +++ b/playground/frontend/playground_components/assets/symbols/java.g.yaml @@ -0,0 +1,13371 @@ +AbstractBeamCalcRel: + methods: + - beamComputeSelfCost + - estimateNodeStats + - getLimitCountOfSortRel + - isInputSortRelAndLimitOnly +AbstractSimulator: + methods: + - hasNext + - next + - remove + - results + - resultsPerWindow +ActionFactory: + methods: + - childPartitionsRecordAction + - dataChangeRecordAction + - detectNewPartitionsAction + - heartbeatRecordAction + - queryChangeStreamAction +AdaptableCollector: + methods: + - asContext + - collect + - getCounter + - getHistogram + - getTimer + - setProcessContext +AddFields: + methods: + - create + - expand + - field + - processElement +AddHarnessIdInterceptor: + methods: + - create +AddUuidsTransform: + methods: + - expand +AdvancingPhaser: {} +AfterAll: + methods: + - getWatermarkThatGuaranteesFiring + - of + - toString +AfterEach: + methods: + - getWatermarkThatGuaranteesFiring + - inOrder + - mayFinish + - toString +AfterFirst: + methods: + - getWatermarkThatGuaranteesFiring + - of + - toString +AfterPane: + methods: + - elementCountAtLeast + - equals + - getElementCount + - getWatermarkThatGuaranteesFiring + - hashCode + - isCompatible + - toString +AfterProcessingTime: + methods: + - alignedTo + - equals + - getTimestampTransforms + - getWatermarkThatGuaranteesFiring + - hashCode + - isCompatible + - pastFirstElementInPane + - plusDelayOf + - toString +AfterSynchronizedProcessingTime: + methods: + - equals + - getWatermarkThatGuaranteesFiring + - hashCode + - ofFirstElement + - toString +AfterWatermark: + methods: + - equals + - getContinuationTrigger + - getEarlyTrigger + - getLateTrigger + - getWatermarkThatGuaranteesFiring + - hashCode + - mayFinish + - pastEndOfWindow + - toString + - withEarlyFirings + - withLateFirings +AggregationCombineFnAdapter: + methods: + - addInput + - createAccumulator + - createCombineFn + - createCombineFnAnalyticsFunctions + - createConstantCombineFn + - extractOutput + - getAccumulatorCoder + - getDefaultOutputCoder + - mergeAccumulators + properties: + - EMPTY_ROW + - EMPTY_SCHEMA + - INSTANCE +AggregationQuery: + methods: + - apply + - create + - withMongoDbPipeline +AmqpIO: + methods: + - advance + - close + - createReader + - expand + - finalizeCheckpoint + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getOutputCoder + - getWatermark + - populateDisplayData + - processElement + - read + - setup + - split + - start + - teardown + - withAddresses + - withMaxNumRecords + - withMaxReadTime + - write +AmqpMessageCoder: + methods: + - decode + - encode +AmqpMessageCoderProviderRegistrar: + methods: + - getCoderProviders +AnnotateText: + methods: + - build + - expand + - features + - languageHint + - newBuilder + - processElement + - setFeatures + - setLanguageHint +ApiSurface: + methods: + - classesInPackage + - containsOnlyClassesMatching + - containsOnlyPackages + - describeTo + - empty + - getAnyExposurePath + - getExposedClasses + - getRootClasses + - includingClass + - includingPackage + - ofClass + - ofPackage + - pruningClass + - pruningClassName + - pruningPattern + - pruningPrefix +AppliedCombineFn: + methods: + - getAccumulatorCoder + - getFn + - getKvCoder + - getSideInputViews + - getWindowingStrategy + - withAccumulatorCoder + - withInputCoder +AppliedPTransform: + methods: + - getFullName + - getInputs + - getMainInputs + - getOutputs + - getPipeline + - getResourceHints + - getTransform + - of +ApproximateCountDistinct: + methods: + - build + - expand + - getMapping + - getPrecision + - getUdaf + - globally + - perKey + - populateDisplayData + - setMapping + - setPrecision + - toBuilder + - via + - withPercision +ApproximateDistinct: + methods: + - addInput + - apply + - create + - createAccumulator + - decode + - encode + - expand + - extractOutput + - globally + - isRegisterByteSizeObserverCheap + - mergeAccumulators + - of + - perKey + - populateDisplayData + - precisionForRelativeError + - processElement + - relativeErrorForPrecision + - withPrecision + - withSparsePrecision + - withSparseRepresentation +ApproximateQuantiles: + methods: + - addInput + - create + - createAccumulator + - decode + - empty + - encode + - equals + - extractOutput + - getAccumulatorCoder + - globally + - hasNext + - hashCode + - isEmpty + - mergeAccumulator + - next + - perKey + - populateDisplayData + - registerByteSizeObserver + - singleton + - sizedIterator + - toString + - verifyDeterministic + - withEpsilon + - withMaxInputSize + properties: + - DEFAULT_MAX_NUM_ELEMENTS +ApproximateUnique: + methods: + - add + - addInput + - createAccumulator + - equals + - expand + - extractOutput + - getAccumulatorCoder + - globally + - hashCode + - mergeAccumulators + - perKey + - populateDisplayData +ArrayAgg: + methods: + - addInput + - createAccumulator + - extractOutput + - mergeAccumulators +ArrowConversion: + methods: + - arrowSchemaFromInput + - close + - create + - get + - hasNext + - name + - next + - rowsFromRecordBatch + - rowsFromSerializedRecordBatch + - toBeamSchema + - visit +AsJsons: + methods: + - apply + - exceptionsInto + - exceptionsVia + - expand + - of + - withMapper +AssignEventTime: + methods: + - expand + - getEventTimeExtractor + - named + - of + - output + - using +AtomicCoder: + methods: + - equals + - getCoderArguments + - getComponents + - hashCode + - verifyDeterministic +AttributeValueCoder: + methods: + - decode + - encode + - of +AttributeValueCoderProviderRegistrar: + methods: + - getCoderProviders +Auction: + methods: + - decode + - encode + - equals + - hasAnnotation + - hashCode + - sizeInBytes + - structuralValue + - toString + - withAnnotation + - withoutAnnotation + properties: + - CODER + - category + - dateTime + - description + - expires + - extra + - id + - initialBid + - itemName + - reserve + - seller +AuctionBid: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + properties: + - CODER + - auction + - bid +AuctionCount: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + properties: + - CODER + - auction + - num +AuctionGenerator: + methods: + - lastBase0AuctionId + - nextAuction + - nextBase0AuctionId +AuctionOrBid: + methods: + - apply +AuctionPrice: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + properties: + - CODER + - auction + - price +AutoValueSchema: + methods: + - fieldValueGetters + - fieldValueTypeInformations + - get + - schemaFor + - schemaTypeCreator + properties: + - INSTANCE +AutoValueUtils: + methods: + - appender + - getBaseAutoValueClass + - getBuilderCreator + - getConstructorCreator + - prepare +AvroCoder: + methods: + - check + - coderFor + - decode + - encode + - equals + - fromLong + - get + - getCoderProvider + - getConvertedType + - getEncodedTypeDescriptor + - getLogicalTypeName + - getSchema + - getType + - hashCode + - initialValue + - of + - toLong + - useReflectApi + - verifyDeterministic +AvroGenericCoder: + methods: + - of +AvroIO: + methods: + - apply + - constantDestinations + - expand + - flush + - from + - open + - parseAllGenericRecords + - parseFilesGenericRecords + - parseGenericRecords + - populateDisplayData + - read + - readAll + - readAllGenericRecords + - readFiles + - readFilesGenericRecords + - readGenericRecords + - sink + - sinkViaGenericRecords + - to + - toResource + - watchForNewFiles + - withBeamSchemas + - withCodec + - withCoder + - withDatumReaderFactory + - withDatumWriterFactory + - withEmptyMatchTreatment + - withFileExceptionHandler + - withFormatFunction + - withHintMatchesManyFiles + - withMatchConfiguration + - withMetadata + - withNoSpilling + - withNumShards + - withOutputFilenames + - withSchema + - withShardNameTemplate + - withSuffix + - withTempDirectory + - withUsesReshuffle + - withWindowedWrites + - withoutSharding + - write + - writeCustomType + - writeCustomTypeToGenericRecords + - writeGenericRecords +AvroPayloadSerializerProvider: + methods: + - getSerializer + - identifier +AvroRecordSchema: + methods: + - fieldValueGetters + - fieldValueTypeInformations + - schemaFor + - schemaTypeCreator +AvroSchemaIOProvider: + methods: + - buildReader + - buildWriter + - configurationSchema + - expand + - from + - identifier + - isBounded + - requiresDataSchema + - schema +AvroSink: + methods: + - createWriteOperation + - createWriter + - getDynamicDestinations + - write +AvroSource: + methods: + - close + - createForSubrangeOfFile + - from + - getCodec + - getCurrentBlock + - getCurrentBlockOffset + - getCurrentBlockSize + - getCurrentRecord + - getCurrentSource + - getFractionOfBlockConsumed + - getOutputCoder + - getSchemaString + - getSplitPointsRemaining + - getSyncMarker + - length + - read + - readNextBlock + - readNextRecord + - seek + - tell + - validate + - withDatumReaderFactory + - withEmptyMatchTreatment + - withMinBundleSize + - withParseFn + - withSchema +AvroTableProvider: + methods: + - getSchemaIOProvider + - getTableType +AvroUtils: + methods: + - apply + - convertAvroFieldStrict + - createGetterConversions + - createSetterConversions + - createTypeConversion + - equals + - fromAvroType + - fromBeamFieldType + - get + - getAvroBytesToRowFunction + - getCreator + - getFieldTypes + - getFromRowFunction + - getGenericRecordToRowFunction + - getGetters + - getRowToAvroBytesFunction + - getRowToGenericRecordFunction + - getSchema + - getSize + - getToRowFunction + - hashCode + - schemaCoder + - toAvroField + - toAvroSchema + - toAvroType + - toBeamField + - toBeamRowStrict + - toBeamSchema + - toBeamType + - toGenericRecord + - withSize + properties: + - nullable + - type +AvroWriteRequest: + methods: + - getElement + - getSchema +AwsBuilderFactory: {} +AwsCoders: + methods: + - awsResponseMetadata + - decode + - encode + - of + - responseMetadata + - sdkHttpMetadata + - sdkHttpMetadataWithoutHeaders + - sdkHttpResponse + - sdkHttpResponseWithoutHeaders + - verifyDeterministic +AwsModule: + methods: + - canCreateUsingDefault + - createUsingDefault + - deserialize + - deserializeWithType + - serialize + - serializeWithType + - setupModule + properties: + - CLIENT_EXECUTION_TIMEOUT + - CONNECTION_MAX_IDLE_TIME + - CONNECTION_TIMEOUT + - CONNECTION_TIME_TO_LIVE + - MAX_CONNECTIONS + - PROXY_HOST + - PROXY_PASSWORD + - PROXY_PORT + - PROXY_USERNAME + - REQUEST_TIMEOUT + - SOCKET_TIMEOUT +AwsPipelineOptionsRegistrar: + methods: + - getPipelineOptions +AwsSchemaProvider: + methods: + - apply + - create + - equals + - fieldValueGetters + - fieldValueTypeInformations + - fromRowFunction + - hashCode + - schemaFor + - schemaTypeCreator +AwsSchemaRegistrar: + methods: + - getSchemaProviders +AwsSerializableUtils: + methods: + - deserialize + - deserializeAwsCredentialsProvider + - serialize + - serializeAwsCredentialsProvider +AwsTypes: {} +AzureBlobStoreFileSystemRegistrar: + methods: + - fromOptions +AzureModule: + methods: + - deserialize + - deserializeWithType + - serialize + - serializeWithType +AzurePipelineOptionsRegistrar: + methods: + - getPipelineOptions +BackOffAdapter: + methods: + - nextBackOffMillis + - reset + - toGcpBackOff +BackOffUtils: + methods: + - next +BagUserState: + methods: + - append + - asyncClose + - clear + - get +BaseBeamTable: + methods: + - buildIOReader + - constructFilter + - getTableStatistics + - supportsProjects +BasicDynamoDBProvider: + methods: + - createDynamoDB +BasicDynamoDbClientProvider: + methods: + - equals + - getDynamoDbClient + - hashCode +BatchContextImpl: + methods: + - addProperties + - addTags + - createDataset + - datasetExists + - discardDataset + - getArguments + - getDataset + - getFailureCollector + - getInputFormatProvider + - getInputSchema + - getInputSchemas + - getLogicalStartTime + - getMetadata + - getMetrics + - getNamespace + - getOutputFormatProvider + - getOutputPortSchemas + - getOutputSchema + - getPipelineName + - getPluginProperties + - getServiceURL + - getStageName + - loadPluginClass + - newPluginInstance + - provide + - record + - releaseDataset + - removeMetadata + - removeProperties + - removeTags +BatchSinkContextImpl: + methods: + - addOutput + - isPreviewEnabled +BatchSourceContextImpl: + methods: + - getMaxPreviewRecords + - isPreviewEnabled + - setInput +BeamAccumulatorProvider: + methods: + - add + - create + - get + - getCounter + - getFactory + - getHistogram + - getName + - getNamespace + - getTimer + - increment +BeamAggregateProjectMergeRule: + methods: + - onMatch + properties: + - INSTANCE +BeamAggregationRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - explainTerms + - processElement +BeamAggregationRule: + methods: + - onMatch + properties: + - INSTANCE +BeamBasicAggregationRule: + methods: + - onMatch + properties: + - INSTANCE +BeamBigQuerySqlDialect: + methods: + - quoteIdentifier + - unparseCall + - unparseDateTimeLiteral + - unparseSqlIntervalLiteral + properties: + - DEFAULT + - DEFAULT_CONTEXT + - DOUBLE_NAN_WRAPPER + - DOUBLE_NEGATIVE_INF_WRAPPER + - DOUBLE_POSITIVE_INF_WRAPPER + - IN_ARRAY_OPERATOR + - NUMERIC_LITERAL_WRAPPER +BeamBuiltinAggregations: + methods: + - addInput + - apply + - create + - createAccumulator + - createBitXOr + - extractOutput + - getAccumulatorCoder + - identity + - mergeAccumulators + - toBigDecimal + properties: + - BUILTIN_AGGREGATOR_FACTORIES +BeamBuiltinAnalyticFunctions: + methods: + - addInput + - create + - createAccumulator + - extractOutput + - mergeAccumulators + - navigationFirstValue + - navigationLastValue + - numberingDenseRank + - numberingPercentRank + - numberingRank + - numberingRowNumber + properties: + - BUILTIN_ANALYTIC_FACTORIES +BeamBuiltinFunctionProvider: + methods: + - getBuiltinMethods +BeamBuiltinMethods: + properties: + - CHAR_LENGTH_METHOD + - CONCAT_METHOD + - DATE_METHOD + - ENDS_WITH_METHOD + - LIKE_METHOD + - LTRIM_METHOD + - REPLACE_METHOD + - REVERSE_METHOD + - RTRIM_METHOD + - STARTS_WITH_METHOD + - SUBSTR_METHOD + - TIMESTAMP_METHOD + - TRIM_METHOD +BeamCalcMergeRule: + methods: + - onMatch + properties: + - INSTANCE +BeamCalcRel: + methods: + - buildPTransform + - copy + - entrySet + - expand + - field + - get + - getQueryProvider + - getRootSchema + - getTypeFactory + - processElement + - setup + - size +BeamCalcRule: + methods: + - convert + - matches + properties: + - INSTANCE +BeamCalcSplittingRule: + methods: + - matches + - onMatch +BeamCalciteSchema: + methods: + - getExpression + - getFunctionNames + - getFunctions + - getPipelineOptions + - getSubSchema + - getSubSchemaNames + - getTable + - getTableNames + - getTableProvider + - getType + - getTypeNames + - isMutable + - removeAllPipelineOptions + - removePipelineOption + - setPipelineOption + - snapshot +BeamCalciteTable: + methods: + - asQueryable + - getModifiableCollection + - getRowType + - getStatistic + - of + - toModificationRel + - toRel +BeamCoGBKJoinRel: + methods: + - buildPTransform + - copy + - expand +BeamCoGBKJoinRule: + methods: + - matches + - onMatch + properties: + - INSTANCE +BeamCodegenUtils: + methods: + - toStringTimestamp + - toStringUTF8 +BeamCostModel: + methods: + - convertRelOptCost + - divideBy + - equals + - getCpu + - getCpuRate + - getIo + - getRows + - hashCode + - isEqWithEpsilon + - isInfinite + - isLe + - isLt + - makeCost + - makeHugeCost + - makeInfiniteCost + - makeTinyCost + - makeZeroCost + - minus + - multiplyBy + - plus + - toString + properties: + - FACTORY +BeamEnumerableConverter: + methods: + - computeSelfCost + - copy + - createPipelineOptions + - implement + - processElement + - startBundle + - toEnumerable + - toRowList + - visitValue +BeamEnumerableConverterRule: + methods: + - convert + properties: + - INSTANCE +BeamFnControlClient: + methods: + - delegateOnInstructionRequestType + - onCompleted + - onError + - onNext + - sendInstructionResponse + - waitForTermination +BeamFnDataGrpcClient: + methods: + - createOutboundAggregator + - registerReceiver + - unregisterReceiver +BeamFnDataGrpcMultiplexer: + methods: + - close + - getInboundObserver + - getOutboundObserver + - onCompleted + - onError + - onNext + - registerConsumer + - toString +BeamFnDataGrpcMultiplexer2: + methods: + - close + - getInboundObserver + - getOutboundObserver + - onCompleted + - onError + - onNext + - registerConsumer + - toString + - unregisterConsumer +BeamFnDataInboundObserver: + methods: + - accept + - awaitCompletion + - cancel + - complete + - fail + - forConsumer + - isDone + - runWhenComplete +BeamFnDataInboundObserver2: + methods: + - accept + - awaitCompletion + - close + - flush + - forConsumers + - getUnfinishedEndpoints + - multiplexElements + - reset + properties: + - INSTANCE +BeamFnDataOutboundAggregator: + methods: + - accept + - bufferedSize + - discard + - equals + - getByteCount + - getElementCount + - hashCode + - registerOutputDataLocation + - registerOutputTimersLocation + - resetStats + - sendElements + - sendOrCollectBufferedDataAndFinishOutboundStreams + - start + - toByteStringAndResetBuffer + - toString + properties: + - DATA_BUFFER_SIZE_LIMIT + - DATA_BUFFER_TIME_LIMIT_MS + - DEFAULT_BUFFER_LIMIT_BYTES + - DEFAULT_BUFFER_LIMIT_TIME_MS +BeamFnDataOutboundObserver: + methods: + - accept + - close + - flush +BeamFnDataReadRunner: + methods: + - blockTillReadFinishes + - createRunnerForPTransform + - forwardElementToConsumer + - getCache + - getCurrentInstructionId + - getPTransformRunnerFactories + - getStateClient + - reset + - trySplit + - updateFinalMonitoringData + - updateIntermediateMonitoringData +BeamFnDataWriteRunner: + methods: + - createRunnerForPTransform + - getCache + - getCurrentInstructionId + - getPTransformRunnerFactories + - getStateClient +BeamFnLoggingClient: + methods: + - beforeStart + - close + - flush + - onCompleted + - onError + - onNext + - publish + - run + - setProcessBundleHandler + - toString +BeamFnLoggingClientBenchmark: + methods: + - logging + - onCompleted + - onError + - onNext + - tearDown + - testLogging + - testLoggingWithAllOptionalParameters + - testSkippedLogging + properties: + - loggingClient + - loggingService + - server +BeamFnLoggingMDC: + methods: + - getInstructionId + - setInstructionId +BeamFnStateGrpcClientCache: + methods: + - forApiServiceDescriptor + - handle + - onCompleted + - onError + - onNext +BeamFnStatusClient: + methods: + - close + - equals + - getInstruction + - getTimeSinceTransition + - getTrackedThreadName + - hashCode + - onCompleted + - onError + - onNext +BeamIOPushDownRule: + methods: + - onMatch + properties: + - INSTANCE +BeamIOSinkRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - flattenRel + - getPipelineOptions + - register +BeamIOSinkRule: + methods: + - convert + properties: + - INSTANCE +BeamIOSourceRel: + methods: + - beamComputeSelfCost + - buildPTransform + - computeSelfCost + - createPushDownRel + - estimateNodeStats + - estimateRowCount + - expand + - getBeamSqlTable + - getPipelineOptions + - isBounded + properties: + - CONSTANT_WINDOW_SIZE +BeamIntersectRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats +BeamIntersectRule: + methods: + - convert + properties: + - INSTANCE +BeamJavaTypeFactory: + methods: + - getJavaClass + properties: + - INSTANCE +BeamJavaUdfCalcRule: + properties: + - INSTANCE +BeamJoinAssociateRule: + methods: + - onMatch + properties: + - INSTANCE +BeamJoinPushThroughJoinRule: + methods: + - onMatch + properties: + - LEFT + - RIGHT +BeamJoinRel: + methods: + - beamComputeSelfCost + - containsSeekableInput + - estimateNodeStats + - getBoundednessOfRelNode + - getPCollectionInputs + - isJoinLegal + - seekable +BeamJoinTransforms: + methods: + - expand + - getJoinColumns + - processElement + - setup + - teardown +BeamKafkaCSVTable: + methods: + - expand + - processElement +BeamKafkaTable: + methods: + - buildIOReader + - buildIOWriter + - getBootstrapServers + - getTableStatistics + - getTopics + - isBounded + - updateConsumerProperties +BeamMatchRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - processElement +BeamMatchRule: + methods: + - convert + properties: + - INSTANCE +BeamMinusRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats +BeamMinusRule: + methods: + - convert + properties: + - INSTANCE +BeamPCollectionTable: + methods: + - buildIOReader + - buildIOWriter + - isBounded +BeamPushDownIOSourceRel: + methods: + - beamComputeSelfCost + - buildPTransform + - expand + - explainTerms +BeamRelDataTypeSystem: + methods: + - getDefaultPrecision + - getMaxNumericPrecision + - getMaxNumericScale + - getMaxPrecision + - shouldConvertRaggedUnionTypesToVarying + properties: + - INSTANCE +BeamRelMetadataQuery: + methods: + - getNodeStats + - instance +BeamRowToBigtableMutation: + methods: + - apply + - expand +BeamRowToStorageApiProto: + methods: + - messageFromBeamRow +BeamRuleSets: + methods: + - getRuleSets +BeamSetOperatorRelBase: + methods: + - expand +BeamSetOperatorsTransforms: + methods: + - apply + - processElement +BeamSideInputJoinRel: + methods: + - buildPTransform + - copy + - expand + - sideInputJoin +BeamSideInputJoinRule: + methods: + - matches + - onMatch + properties: + - INSTANCE +BeamSideInputLookupJoinRel: + methods: + - buildPTransform + - copy + - expand +BeamSideInputLookupJoinRule: + methods: + - convert + - matches + properties: + - INSTANCE +BeamSortRel: + methods: + - beamComputeSelfCost + - buildPTransform + - compare + - copy + - estimateNodeStats + - expand + - getCount + - isLimitOnly + - processElement +BeamSortRule: + methods: + - convert + properties: + - INSTANCE +BeamSqlCli: + methods: + - execute + - explainQuery + - getMetaStore + - metaStore +BeamSqlDataCatalogExample: + methods: + - main +BeamSqlEnv: + methods: + - addSchema + - addUdaf + - addUdf + - autoLoadUserDefinedFunctions + - build + - builder + - executeDdl + - explain + - getContext + - getPipelineOptions + - inMemory + - isDdl + - parseQuery + - readOnly + - setCurrentSchema + - setPipelineOptions + - setQueryPlannerClassName + - setRuleSets + - withTableProvider +BeamSqlEnvRunner: + methods: + - runUsingBeamSqlEnv +BeamSqlLine: + methods: + - main +BeamSqlOutputToConsoleFn: + methods: + - processElement +BeamSqlParser: + methods: + - getDdlExecutor + - getParser + properties: + - DDL_EXECUTOR + - FACTORY +BeamSqlPipelineOptionsRegistrar: + methods: + - getPipelineOptions +BeamSqlRelUtils: + methods: + - getBeamRelInput + - getErrorRowSchema + - getInput + - getNodeStats + - toPCollection + properties: + - ERROR + - ROW +BeamSqlUnparseContext: + methods: + - clone + - equals + - getNullParams + - hashCode + - implementor + - toSql + - unparse +BeamTableFunctionScanRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - processElement +BeamTableFunctionScanRule: + methods: + - convert + properties: + - INSTANCE +BeamTableStatistics: + methods: + - createBoundedTableStatistics + - createUnboundedTableStatistics + - getCollations + - getDistribution + - getKeys + - getRate + - getReferentialConstraints + - getRowCount + - isKey + - isUnknown + properties: + - BOUNDED_UNKNOWN + - UNBOUNDED_UNKNOWN +BeamTableUtils: + methods: + - autoCastField + - beamRow2CsvLine + - csvLines2BeamRows +BeamTpcds: + methods: + - main +BeamUncollectRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - process +BeamUncollectRule: + methods: + - convert + properties: + - INSTANCE +BeamUnionRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats +BeamUnionRule: + methods: + - convert + properties: + - INSTANCE +BeamUnnestRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - explainTerms + - process +BeamUnnestRule: + methods: + - onMatch + properties: + - INSTANCE +BeamValuesRel: + methods: + - beamComputeSelfCost + - buildPTransform + - estimateNodeStats + - expand + - getPipelineOptions +BeamValuesRule: + methods: + - convert + properties: + - INSTANCE +BeamWindowRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - processElement +BeamWindowRule: + methods: + - convert + properties: + - INSTANCE +BeamZetaSqlCalcMergeRule: + methods: + - onMatch + properties: + - INSTANCE +BeamZetaSqlCalcRel: + methods: + - buildPTransform + - copy + - expand + - finishBundle + - getAllowedTimestampSkew + - output + - outputWithTimestamp + - processElement + - setup + - startBundle + - teardown +BeamZetaSqlCalcRule: + properties: + - INSTANCE +BeamZetaSqlCalcSplittingRule: + properties: + - INSTANCE +BeamZetaSqlCatalog: + properties: + - PRE_DEFINED_WINDOW_FUNCTIONS + - USER_DEFINED_JAVA_AGGREGATE_FUNCTIONS + - USER_DEFINED_JAVA_SCALAR_FUNCTIONS + - USER_DEFINED_SQL_FUNCTIONS + - ZETASQL_FUNCTION_GROUP_NAME +BeamZetaSqlUncollectRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - process +BeamZetaSqlUncollectRule: + methods: + - convert + properties: + - INSTANCE +BeamZetaSqlUnnestRel: + methods: + - beamComputeSelfCost + - buildPTransform + - copy + - estimateNodeStats + - expand + - explainTerms + - process +BeamZetaSqlUnnestRule: + methods: + - onMatch + properties: + - INSTANCE +Bid: + methods: + - decode + - encode + - equals + - hasAnnotation + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + - withAnnotation + - withoutAnnotation + properties: + - ASCENDING_TIME_THEN_PRICE + - CODER + - PRICE_THEN_DESCENDING_TIME + - auction + - bidder + - dateTime + - extra + - price +BidGenerator: + methods: + - nextBid +BidsPerSession: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + properties: + - CODER +BigDecimalCoder: + methods: + - consistentWithEquals + - decode + - encode + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +BigDecimalConverter: + methods: + - forSqlType +BigEndianIntegerCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +BigEndianLongCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +BigEndianShortCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +BigIntegerCoder: + methods: + - consistentWithEquals + - decode + - encode + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +BigQueryClient: + methods: + - create + - createTableIfNotExists + - insertAll + - insertRow +BigQueryCoderProviderRegistrar: + methods: + - getCoderProviders +BigQueryDirectReadSchemaTransformProvider: + methods: + - build + - buildTransform + - builder + - expand + - getBigQueryServices + - getQuery + - getRowRestriction + - getSelectedFields + - getTableSpec + - identifier + - inputCollectionNames + - outputCollectionNames + - setBigQueryServices + - setQuery + - setRowRestriction + - setSelectedFields + - setTableSpec + - validate +BigQueryDlqProvider: + methods: + - expand + - identifier + - newDlqTransform +BigQueryFilter: + methods: + - getNotSupported + - getSupported + - numSupported + - toString +BigQueryHelpers: + methods: + - apply + - fromJsonString + - getNumRows + - parseTableSpec + - parseTableUrn + - stripPartitionDecorator + - toJsonString + - toString + - toTableSpec + properties: + - jobId + - shouldRetry +BigQueryIO: + methods: + - actuateProjectionPushdown + - apply + - expand + - from + - fromQuery + - getTable + - getTableProvider + - ignoreInsertIds + - ignoreUnknownValues + - optimizedWrites + - populateDisplayData + - processElement + - read + - readTableRows + - readTableRowsWithSchema + - readWithDatumReader + - setSchema + - skipInvalidRows + - supportsProjectionPushdown + - to + - useAvroLogicalTypes + - useBeamSchema + - usingStandardSql + - validate + - withAutoSchemaUpdate + - withAutoSharding + - withAvroFormatFunction + - withAvroSchemaFactory + - withAvroWriter + - withBeamRowConverters + - withClustering + - withCoder + - withCreateDisposition + - withCustomGcsTempLocation + - withDeterministicRecordIdFn + - withExtendedErrorInfo + - withFailedInsertRetryPolicy + - withFormat + - withFormatFunction + - withFormatRecordOnFailureFunction + - withJsonSchema + - withJsonTimePartitioning + - withKmsKey + - withLoadJobProjectId + - withMaxBytesPerPartition + - withMaxFilesPerBundle + - withMethod + - withNumFileShards + - withNumStorageWriteApiStreams + - withQueryLocation + - withQueryPriority + - withQueryTempDataset + - withRowRestriction + - withSchema + - withSchemaFromView + - withSchemaUpdateOptions + - withSelectedFields + - withSuccessfulInsertsPropagation + - withTableDescription + - withTemplateCompatibility + - withTestServices + - withTimePartitioning + - withTriggeringFrequency + - withWriteDisposition + - withWriteTempDataset + - withoutResultFlattening + - withoutValidation + - write + - writeTableRows + properties: + - BIGQUERY_JOB_TEMPLATE + - INSTANCE +BigQueryInsertError: + methods: + - equals + - getError + - getRow + - getTable + - hashCode +BigQueryInsertErrorCoder: + methods: + - decode + - encode + - getEncodedTypeDescriptor + - of + - verifyDeterministic +BigQuerySchemaIOProvider: + methods: + - buildReader + - buildWriter + - configurationSchema + - expand + - from + - identifier + - isBounded + - requiresDataSchema + - schema +BigQuerySchemaRetrievalException: {} +BigQuerySchemaTransformReadConfiguration: + methods: + - build + - builder + - getQuery + - getQueryLocation + - getTableSpec + - getUseStandardSql + - setQuery + - setQueryLocation + - setTableSpec + - setUseStandardSql +BigQuerySchemaTransformReadProvider: + methods: + - buildTransform + - expand + - identifier + - inputCollectionNames + - outputCollectionNames +BigQuerySchemaTransformWriteConfiguration: + methods: + - build + - builder + - getCreateDisposition + - getTableSpec + - getWriteDisposition + - setCreateDisposition + - setTableSpec + - setWriteDisposition +BigQuerySchemaTransformWriteProvider: + methods: + - buildTransform + - expand + - identifier + - inputCollectionNames + - outputCollectionNames + - validate +BigQueryStorageApiInsertError: + methods: + - getErrorMessage + - getRow + - toString +BigQueryStorageApiInsertErrorCoder: + methods: + - decode + - encode + - of +BigQueryStorageTableSource: + methods: + - create + - getEstimatedSizeBytes + - populateDisplayData +BigQueryTableProvider: + methods: + - buildBeamSqlTable + - getTableType +BigQueryUtils: + methods: + - apply + - build + - builder + - convertAvroFormat + - convertGenericRecordToTableRow + - fromTableSchema + - getInferMaps + - getTruncateTimestamps + - hashSchemaDescriptorDeterministic + - readCallMetric + - setInferMaps + - setTruncateTimestamps + - tableRowFromBeamRow + - tableRowToBeamRow + - toBeamRow + - toGenericAvroSchema + - toTableReference + - toTableRow + - toTableSchema + - writeCallMetric +BigqueryClient: + methods: + - createNewDataset + - createNewTable + - deleteDataset + - deleteTable + - getClient + - getNewBigqueryClient + - getTableResource + - insertDataToTable + - queryUnflattened + - queryWithRetries + - queryWithRetriesUsingStandardSql +BigqueryMatcher: + methods: + - create + - createQuery + - createQueryUsingStandardSql + - describeMismatchSafely + - describeTo + - getApplicationName + - getProjectId + - getQuery + - getUsingStandardSql + - queryResultHasChecksum +BigtableIO: + methods: + - advance + - close + - createReader + - expand + - finishBundle + - getBigtableOptions + - getCurrent + - getCurrentSource + - getEstimatedSizeBytes + - getFractionConsumed + - getMaxBufferElementCount + - getOutputCoder + - getRanges + - getRowFilter + - getSplitPointsConsumed + - getTableId + - populateDisplayData + - processElement + - read + - split + - splitAtFraction + - start + - startBundle + - tearDown + - toString + - validate + - withBigtableOptions + - withBigtableOptionsConfigurator + - withEmulator + - withInstanceId + - withKeyRange + - withKeyRanges + - withMaxBufferElementCount + - withProjectId + - withRowFilter + - withTableId + - withWriteResults + - withoutValidation + - write +BigtableRowToBeamRow: + methods: + - apply + - expand +BigtableRowToBeamRowFlat: + methods: + - apply + - expand +BigtableTable: + methods: + - buildIOReader + - buildIOWriter + - constructFilter + - isBounded +BigtableTableProvider: + methods: + - buildBeamSqlTable + - getTableType +BigtableUtils: + methods: + - booleanToByteArray + - byteString + - byteStringUtf8 + - doubleToByteArray + - floatToByteArray + - longToByteArray +BigtableWriteResult: + methods: + - create + - getRowsWritten +BigtableWriteResultCoder: + methods: + - decode + - encode + - getCoderProvider + - of +BitSetCoder: + methods: + - consistentWithEquals + - decode + - encode + - of + - verifyDeterministic +BlockBasedSource: + methods: + - getCurrent + - getCurrentBlock + - getCurrentBlockOffset + - getCurrentBlockSize + - getCurrentRecord + - getFractionConsumed + - getFractionOfBlockConsumed + - readNextBlock + - readNextRecord +BlockingCommitterImpl: + methods: + - close + - commitOffset +BooleanCoder: + methods: + - consistentWithEquals + - decode + - encode + - isRegisterByteSizeObserverCheap + - of +BoundedEventSource: + methods: + - advance + - close + - createReader + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getDefaultOutputCoder + - getEstimatedSizeBytes + - getFractionConsumed + - split + - splitAtFraction + - start + - validate +BoundedReadFromUnboundedSource: + methods: + - expand + - getKindString + - populateDisplayData + - process + - withMaxNumRecords + - withMaxReadTime +BoundedSideInputJoin: + methods: + - expand + - needsSideInput + - processElement +BoundedSideInputJoinModel: + methods: + - simulator +BoundedSource: + methods: + - createReader + - getCurrentSource + - getCurrentTimestamp + - getEstimatedSizeBytes + - getFractionConsumed + - getSplitPointsConsumed + - getSplitPointsRemaining + - split + - splitAtFraction + properties: + - SPLIT_POINTS_UNKNOWN +BoundedWindow: + methods: + - formatTimestamp + - maxTimestamp + - validateTimestampBounds + properties: + - TIMESTAMP_MAX_VALUE + - TIMESTAMP_MIN_VALUE +BroadcastHashJoinTranslator: + methods: + - processElement +BucketingFunction: + methods: + - add + - get + - isSignificant + - remove +BufferedElementCountingOutputStream: + methods: + - close + - finish + - flush + - markElementStart + - write + properties: + - DEFAULT_BUFFER_SIZE +BufferedExternalSorter: + methods: + - add + - create + - getExternalSorterType + - getMemoryMB + - getTempLocation + - options + - sort + - withExternalSorterType + - withMemoryMB + - withTempLocation +BufferingStreamObserver: + methods: + - getBufferSize + - getCause + - onCompleted + - onError + - onNext +Builders: {} +BuiltinHashFunctions: + methods: + - md5Bytes + - md5String + - sha1Bytes + - sha1String + - sha256Bytes + - sha256String + - sha512Bytes + - sha512String +BuiltinStringFunctions: + methods: + - endsWith + - fromHex + - lengthBytes + - lengthString + - lpad + - reverseBytes + - reverseString + - rpad + - startsWith + - toHex +BuiltinTrigonometricFunctions: + methods: + - cosh + - sinh + - tanh +BundleSplitter: {} +ByteArrayCoder: + methods: + - decode + - encode + - encodeAndOwn + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - structuralValue + - verifyDeterministic +ByteBuddyUtils: + methods: + - appender + - apply + - clear + - containsKey + - containsValue + - convert + - createGetterConversions + - createSetterConversions + - createTypeConversion + - entrySet + - equals + - get + - getTransformingMap + - hashCode + - isEmpty + - isValid + - keySet + - prepare + - put + - putAll + - remove + - size + - subclassGetterInterface + - subclassSetterInterface + - toString + - transformContainer + - values +ByteCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +ByteKey: + methods: + - compareTo + - copyFrom + - equals + - getBytes + - getValue + - hashCode + - isEmpty + - of + - toString + properties: + - EMPTY +ByteKeyRange: + methods: + - containsKey + - equals + - estimateFractionForKey + - getEndKey + - getStartKey + - hashCode + - interpolateKey + - newTracker + - of + - overlaps + - split + - toString + - withEndKey + - withStartKey + properties: + - ALL_KEYS +ByteKeyRangeTracker: + methods: + - checkDone + - currentRestriction + - getFractionConsumed + - getProgress + - getRange + - getSplitPointsConsumed + - getStartPosition + - getStopPosition + - isBounded + - isDone + - markDone + - of + - toString + - tryClaim + - tryReturnRecordAt + - trySplit + - trySplitAtPosition +ByteMonitor: + methods: + - processElement +ByteStringCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +ByteStringOutputStream: + methods: + - size + - toByteString + - toByteStringAndReset + - toString + - write +ByteStringOutputStreamBenchmark: + methods: + - setup + - tearDown + - testCopyArray + - testNewArray + - testProtobufByteStringOutputStreamFewLargeWrites + - testProtobufByteStringOutputStreamFewMixedWritesWithReuse + - testProtobufByteStringOutputStreamFewMixedWritesWithoutReuse + - testProtobufByteStringOutputStreamFewSmallWrites + - testProtobufByteStringOutputStreamFewTinyWrites + - testProtobufByteStringOutputStreamManyLargeWrites + - testProtobufByteStringOutputStreamManyMixedWritesWithReuse + - testProtobufByteStringOutputStreamManyMixedWritesWithoutReuse + - testProtobufByteStringOutputStreamManySmallWrites + - testProtobufByteStringOutputStreamManyTinyWrites + - testSdkCoreByteStringOutputStreamFewLargeWrites + - testSdkCoreByteStringOutputStreamFewMixedWritesWithReuse + - testSdkCoreByteStringOutputStreamFewMixedWritesWithoutReuse + - testSdkCoreByteStringOutputStreamFewSmallWrites + - testSdkCoreByteStringOutputStreamFewTinyWrites + - testSdkCoreByteStringOutputStreamManyLargeWrites + - testSdkCoreByteStringOutputStreamManyMixedWritesWithReuse + - testSdkCoreByteStringOutputStreamManyMixedWritesWithoutReuse + - testSdkCoreByteStringOutputStreamManySmallWrites + - testSdkCoreByteStringOutputStreamManyTinyWrites + properties: + - src +BytesThroughputEstimator: + methods: + - addBytes + - get + - getBytes + - getFrom + - getSeconds + - getTimestamp + - update +CEPCall: + methods: + - getOperands + - getOperator + - of + - toString +CEPFieldRef: + methods: + - getAlpha + - getIndex + - of + - toString +CEPLiteral: + methods: + - compareTo + - equals + - getBoolean + - getByte + - getDateTime + - getDecimal + - getDouble + - getFloat + - getInt16 + - getInt32 + - getInt64 + - getString + - getTypeName + - hashCode + - of +CEPMeasure: + methods: + - getField + - getName + - getOperation + - getType +CEPOperation: + methods: + - of +CEPOperator: + methods: + - getCepKind + - of + - toString +CEPPattern: + methods: + - getPatternCondition + - getPatternVar + - getQuantifier + - of + - toString +CEPUtils: + methods: + - getCEPFieldRefFromParKeys + - getCEPPatternFromPattern + - getFieldRef + - getFieldType + - getRegexFromPattern + - makeOrderKeysFromCollation +Caches: + methods: + - clear + - computeIfAbsent + - describeStats + - equals + - eternal + - fromOptions + - getCache + - getWeight + - hashCode + - noop + - onRemoval + - peek + - put + - remove + - subCache + - toString + - weigh + properties: + - REFERENCE_SIZE + - ROOT +CachingFactory: + methods: + - create + - equals + - hashCode +CalcRelSplitter: + methods: + - canImplement + - execute + - getMaxUsingLevelOrdinals + - maxInputFor + - toString + - visitCall + - visitDynamicParam + - visitFieldAccess + - visitInputRef + - visitLiteral + - visitLocalRef +CalciteConnectionWrapper: + methods: + - abort + - clearWarnings + - close + - commit + - config + - createArrayOf + - createBlob + - createClob + - createNClob + - createPrepareContext + - createQuery + - createSQLXML + - createStatement + - createStruct + - execute + - executeQuery + - getAutoCommit + - getCatalog + - getClientInfo + - getHoldability + - getMetaData + - getNetworkTimeout + - getProperties + - getRootSchema + - getSchema + - getTransactionIsolation + - getTypeFactory + - getTypeMap + - getWarnings + - isClosed + - isReadOnly + - isValid + - isWrapperFor + - nativeSQL + - prepareCall + - prepareStatement + - releaseSavepoint + - rollback + - setAutoCommit + - setCatalog + - setClientInfo + - setHoldability + - setNetworkTimeout + - setReadOnly + - setSavepoint + - setSchema + - setTransactionIsolation + - setTypeMap + - unwrap +CalciteFactoryWrapper: + methods: + - newConnection + - newDatabaseMetaData + - newPreparedStatement + - newResultSet + - newResultSetMetaData + - newStatement +CalciteQueryPlanner: + methods: + - convertToBeamRel + - createPlanner + - defaultConfig + - getDef + - getNonCumulativeCost + - parse + properties: + - FACTORY + - SOURCE +CalciteUtils: + methods: + - isDateTimeType + - isStringType + - sqlTypeWithAutoCast + - toCalciteRowType + - toField + - toFieldType + - toRelDataType + - toSchema + - toSqlTypeName + properties: + - BIG_INT + - BOOLEAN + - CHAR + - DATE + - DECIMAL + - DOUBLE + - FLOAT + - IDENTIFIER + - INTEGER + - NULLABLE_DATE + - NULLABLE_TIME + - NULLABLE_TIMESTAMP + - NULLABLE_TIMESTAMP_WITH_LOCAL_TZ + - SMALL_INT + - TIME + - TIMESTAMP + - TIMESTAMP_WITH_LOCAL_TZ + - TIME_WITH_LOCAL_TZ + - TINY_INT + - VARBINARY + - VARCHAR +CalendarWindows: + methods: + - assignWindow + - beginningOnDay + - days + - getDayOfMonth + - getMonthOfYear + - getNumber + - getStartDate + - getTimeZone + - isCompatible + - months + - populateDisplayData + - verifyCompatibility + - weeks + - windowCoder + - withStartingDay + - withStartingMonth + - withStartingYear + - withTimeZone + - years +CancellableQueue: + methods: + - cancel + - put + - reset + - take +CannotProvideCoderException: + methods: + - getReason + - getRootCause +CassandraIO: + methods: + - build + - delete + - expand + - finishBundle + - process + - processElement + - read + - readAll + - setup + - teardown + - validate + - withCoder + - withConnectTimeout + - withConsistencyLevel + - withEntity + - withHosts + - withKeyspace + - withLocalDc + - withMapperFactoryFn + - withMinNumberOfSplits + - withPassword + - withPort + - withQuery + - withReadTimeout + - withRingRanges + - withTable + - withUsername + - write +Cast: + methods: + - accept + - accumulate + - apply + - castNumber + - castRow + - castValue + - create + - expand + - isDecimal + - isIntegral + - message + - narrowing + - of + - outputSchema + - path + - process + - toString + - validator + - verifyCompatibility + - widening +CastFunctionImpl: + methods: + - getImplementor + - getParameters + - implement +CategoryPrice: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + properties: + - CODER + - category + - isLast + - price +CdapIO: + methods: + - expand + - read + - withCdapPlugin + - withCdapPluginClass + - withKeyClass + - withLocksDirPath + - withPluginConfig + - withValueClass + - write +ChangeStreamDao: + methods: + - changeStreamQuery +ChangeStreamMetrics: + methods: + - decActivePartitionReadCounter + - incActivePartitionReadCounter + - incDataRecordCounter + - incHeartbeatRecordCount + - incPartitionRecordCount + - incPartitionRecordMergeCount + - incPartitionRecordSplitCount + - incQueryCounter + - updateDataRecordCommittedToEmitted + - updatePartitionCreatedToScheduled + - updatePartitionScheduledToRunning + properties: + - ACTIVE_PARTITION_READ_COUNT + - DATA_RECORD_COMMITTED_TO_EMITTED_0MS_TO_1000MS_COUNT + - DATA_RECORD_COMMITTED_TO_EMITTED_1000MS_TO_3000MS_COUNT + - DATA_RECORD_COMMITTED_TO_EMITTED_3000MS_TO_INF_COUNT + - DATA_RECORD_COUNT + - HEARTBEAT_RECORD_COUNT + - PARTITION_CREATED_TO_SCHEDULED_MS + - PARTITION_RECORD_COUNT + - PARTITION_RECORD_MERGE_COUNT + - PARTITION_RECORD_SPLIT_COUNT + - PARTITION_SCHEDULED_TO_RUNNING_MS + - QUERY_COUNT +ChangeStreamRecordMapper: + methods: + - toChangeStreamRecords +ChangeStreamRecordMetadata: + methods: + - build + - equals + - getNumberOfRecordsRead + - getPartitionCreatedAt + - getPartitionEndTimestamp + - getPartitionRunningAt + - getPartitionScheduledAt + - getPartitionStartTimestamp + - getPartitionToken + - getQueryStartedAt + - getRecordReadAt + - getRecordStreamEndedAt + - getRecordStreamStartedAt + - getRecordTimestamp + - getTotalStreamTimeMillis + - hashCode + - newBuilder + - toString + - withNumberOfRecordsRead + - withPartitionCreatedAt + - withPartitionEndTimestamp + - withPartitionRunningAt + - withPartitionScheduledAt + - withPartitionStartTimestamp + - withPartitionToken + - withQueryStartedAt + - withRecordReadAt + - withRecordStreamEndedAt + - withRecordStreamStartedAt + - withRecordTimestamp + - withTotalStreamTimeMillis +ChangeStreamResultSet: + methods: + - close + - getCurrentRowAsStruct + - getMetadata + - getPgJsonb + - next +ChangeStreamResultSetMetadata: + methods: + - getNumberOfRecordsRead + - getQueryStartedAt + - getRecordReadAt + - getRecordStreamEndedAt + - getRecordStreamStartedAt + - getTotalStreamDuration +ChangeStreamsConstants: + properties: + - DEFAULT_CHANGE_STREAM_NAME + - DEFAULT_INCLUSIVE_END_AT + - DEFAULT_INCLUSIVE_START_AT + - DEFAULT_RPC_PRIORITY + - MAX_INCLUSIVE_END_AT + - SAMPLE_PARTITION + - THROUGHPUT_WINDOW_SECONDS +CheckpointMarkImpl: + methods: + - decode + - encode + - finalizeCheckpoint +ChildPartition: + methods: + - equals + - getParentTokens + - getToken + - hashCode + - toString +ChildPartitionsRecord: + methods: + - equals + - getChildPartitions + - getRecordSequence + - getRecordTimestamp + - getStartTimestamp + - hashCode + - toString +ChildPartitionsRecordAction: + methods: + - run +CivilTimeEncoder: + methods: + - decodePacked32TimeSeconds + - decodePacked32TimeSecondsAsJavaTime + - decodePacked64DatetimeMicros + - decodePacked64DatetimeMicrosAsJavaTime + - decodePacked64DatetimeSeconds + - decodePacked64DatetimeSecondsAsJavaTime + - decodePacked64TimeMicros + - decodePacked64TimeMicrosAsJavaTime + - decodePacked64TimeNanos + - decodePacked64TimeNanosAsJavaTime + - encodePacked32TimeSeconds + - encodePacked64DatetimeMicros + - encodePacked64DatetimeSeconds + - encodePacked64TimeMicros + - encodePacked64TimeNanos +ClassLoaderFileSystem: + methods: + - fromOptions + - getCurrentDirectory + - getFilename + - getScheme + - isDirectory + - resolve + properties: + - SCHEMA +CleanUpReadChangeStreamDoFn: + methods: + - processElement +ClickHouseIO: + methods: + - build + - expand + - finishBundle + - getTableSchema + - initialBackoff + - insertDeduplicate + - insertDistributedSync + - insertQuorum + - jdbcUrl + - maxCumulativeBackoff + - maxInsertBlockSize + - maxRetries + - processElement + - properties + - schema + - setup + - startBundle + - table + - tableSchema + - tearDown + - withInitialBackoff + - withInsertDeduplicate + - withInsertDistributedSync + - withInsertQuorum + - withMaxCumulativeBackoff + - withMaxInsertBlockSize + - withMaxRetries + - withTableSchema + - write + properties: + - DEFAULT_INITIAL_BACKOFF + - DEFAULT_MAX_CUMULATIVE_BACKOFF + - DEFAULT_MAX_INSERT_BLOCK_SIZE + - DEFAULT_MAX_RETRIES +ClickHouseWriter: {} +ClientConfiguration: + methods: + - build + - builder + - create + - credentialsProvider + - endpoint + - region + - retry + - toBuilder +CloudPubsubTransforms: + methods: + - ensureUsableAsCloudPubsub + - expand + - fromCloudPubsubMessages + - toCloudPubsubMessages +CloudVision: + methods: + - annotateImagesFromBytes + - annotateImagesFromBytesWithContext + - annotateImagesFromGcsUri + - annotateImagesFromGcsUriWithContext + - mapToRequest +CoGbkResult: + methods: + - and + - copy + - decode + - done + - empty + - encode + - equals + - fastForward + - getAll + - getCoderArguments + - getOnly + - getSchema + - getUnionCoder + - hasNext + - hashCode + - isEmpty + - iterator + - next + - observeAt + - of + - peek + - toString + - verifyDeterministic + properties: + - index + - value +CoGbkResultSchema: + methods: + - equals + - getIndex + - getTag + - getTupleTagList + - hashCode + - of + - size + - toString +CoGroup: + methods: + - crossProductJoin + - expand + - fieldAccessDescriptor + - fieldIds + - fieldNames + - join + - process + - withKeyField + - withOptionalParticipation + - withSideInput +CoGroupByKey: + methods: + - create + - expand + - processElement +CoGroupByKeyLoadTest: + methods: + - main + - processElement +Coder: + methods: + - consistentWithEquals + - decode + - encode + - equals + - getCoderArguments + - getEncodedTypeDescriptor + - getMessage + - getReasons + - hashCode + - isRegisterByteSizeObserverCheap + - nested + - registerByteSizeObserver + - structuralValue + - toString + - verifyDeterministic + properties: + - NESTED + - OUTER + - isWholeStream +CoderException: {} +CoderProperties: + methods: + - coderConsistentWithEquals + - coderConsistentWithEqualsInContext + - coderDecodeEncodeContentsEqual + - coderDecodeEncodeContentsEqualInContext + - coderDecodeEncodeContentsInSameOrder + - coderDecodeEncodeContentsInSameOrderInContext + - coderDecodeEncodeEqual + - coderDecodeEncodeEqualInContext + - coderDecodeEncodeInContext + - coderDecodesBase64 + - coderDecodesBase64ContentsEqual + - coderDeterministic + - coderDeterministicInContext + - coderEncodesBase64 + - coderSerializable + - getCount + - getMean + - getSum + - getSumAndReset + - reset + - structuralValueConsistentWithEquals + - structuralValueConsistentWithEqualsInContext + - structuralValueDecodeEncodeEqual + - structuralValueDecodeEncodeEqualInContext + - structuralValueDecodeEncodeEqualIterable + - structuralValueDecodeEncodeEqualIterableInContext + - testByteCount + properties: + - ALL_CONTEXTS +CoderProvider: + methods: + - coderFor +CoderProviders: + methods: + - coderFor + - forCoder + - fromStaticMethods + - toString +CoderRegistry: + methods: + - coderFor + - createDefault + - getCoder + - getOutputCoder + - getType + - registerCoderForClass + - registerCoderForType + - registerCoderProvider +CoderUtils: + methods: + - clone + - decodeFromBase64 + - decodeFromByteArray + - decodeFromByteString + - encodeToBase64 + - encodeToByteArray + - getCodedType +CollectionCoder: + methods: + - getEncodedTypeDescriptor + - of +ColumnType: + methods: + - equals + - getName + - getOrdinalPosition + - getType + - hashCode + - isPrimaryKey + - toString +Combine: + methods: + - accum + - addInput + - apply + - asSingletonView + - compact + - createAccumulator + - decode + - defaultValue + - encode + - equals + - expand + - extractOutput + - getAccumulatorCoder + - getAdditionalInputs + - getAppliedFn + - getCoderArguments + - getCombineFn + - getDefaultOutputCoder + - getFanout + - getFn + - getInputType + - getInsertDefault + - getNameOverride + - getOutputType + - getPipelineOptions + - getSideInputs + - globally + - groupedValues + - hashCode + - identity + - input + - isInsertDefault + - mergeAccumulators + - of + - perKey + - populateDisplayData + - processElement + - sideInput + - startBundle + - toString + - verifyDeterministic + - withFanout + - withHotKeyFanout + - withSideInputs + - withoutDefaults + properties: + - accum + - input +CombineContextFactory: + methods: + - createFromStateContext + - getPipelineOptions + - nullContext + - sideInput +CombineFnBase: + methods: + - getAccumTVariable + - getAccumulatorCoder + - getDefaultOutputCoder + - getIncompatibleGlobalWindowErrorMessage + - getInputTVariable + - getOutputTVariable + - populateDisplayData +CombineFnTester: + methods: + - testCombineFn +CombineFnUtil: + methods: + - addInput + - bindContext + - compact + - createAccumulator + - defaultValue + - extractOutput + - getAccumulatorCoder + - getDefaultOutputCoder + - mergeAccumulators + - populateDisplayData + - toFnWithContext +CombineFns: + methods: + - addInput + - compact + - compose + - createAccumulator + - decode + - encode + - equals + - extractOutput + - get + - getAccumulatorCoder + - getCoderArguments + - hasNext + - hashCode + - iterator + - mergeAccumulators + - next + - populateDisplayData + - remove + - verifyDeterministic + - with +CombineLoadTest: + methods: + - apply + - getPerKeyCombiner + - main +CombineRunners: + methods: + - createRunnerForPTransform + - getPTransformRunnerFactories +CombineWithContext: + methods: + - addInput + - apply + - compact + - createAccumulator + - defaultValue + - extractOutput + - getPipelineOptions + - mergeAccumulators + - sideInput +CompletableFutureInboundDataClient: + methods: + - awaitCompletion + - cancel + - complete + - create + - fail + - isDone + - runWhenComplete +CompositeOperatorTranslator: + methods: + - canTranslate + - translate +CompositeProvider: + methods: + - findTranslator + - of +CompositeUnaryFunction: + methods: + - apply + - of +CompressedSource: + methods: + - allowsDynamicSplitting + - close + - createDecompressingChannel + - from + - getChannelFactory + - getCount + - getCurrent + - getCurrentTimestamp + - getOutputCoder + - getSplitPointsConsumed + - getSplitPointsRemaining + - isCompressed + - isOpen + - matches + - populateDisplayData + - read + - validate + - withCompression + - withDecompression +ConfigWrapper: + methods: + - build + - fromJsonFile + - fromJsonString + - setParam + - withParams +ConfluentSchemaRegistryDeserializerProvider: + methods: + - getCoder + - getDeserializer + - of +ConnectionManager: {} +ConsoleResultPublisher: + methods: + - publish +Contextful: + methods: + - fn + - getClosure + - getRequirements + - of + - sideInput + - toString + - wrapProcessContext +ContextualTextIO: + methods: + - apply + - compare + - expand + - from + - populateDisplayData + - processElement + - read + - readFiles + - withCompression + - withDelimiter + - withEmptyMatchTreatment + - withHasMultilineCSVRecords + - withHintMatchesManyFiles + - withMatchConfiguration + - withRecordNumMetadata +ConversionContext: + methods: + - of +Convert: + methods: + - expand + - fromRows + - processElement + - to + - toRows +ConvertHelpers: + methods: + - appender + - getConvertPrimitive + - getConvertedSchemaInformation + - prepare + properties: + - outputSchemaCoder + - unboxedType +Count: + methods: + - addInput + - apply + - combineFn + - createAccumulator + - decode + - encode + - equals + - expand + - extractOutput + - getAccumulatorCoder + - getIncompatibleGlobalWindowErrorMessage + - globally + - hashCode + - isRegisterByteSizeObserverCheap + - mergeAccumulators + - perElement + - perKey +CountByKey: + methods: + - accumulationMode + - expand + - keyBy + - named + - of + - output + - triggeredBy + - windowBy + - withAllowedLateness + - withOnTimeBehavior + - withTimestampCombiner +CountIf: + methods: + - addInput + - combineFn + - createAccumulator + - extractOutput + - getAccumulatorCoder + - mergeAccumulators +CountingSource: + methods: + - advance + - apply + - close + - createReader + - createSourceForSubrange + - equals + - finalizeCheckpoint + - getBytesPerOffset + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getLastEmitted + - getMaxEndOffset + - getOutputCoder + - getSplitBacklogBytes + - getSplitPointsRemaining + - getStartTime + - getWatermark + - hashCode + - split + - start + - unbounded + - unboundedWithTimestampFn + - upTo + - withRate + - withTimestampFn +CovarianceFn: + methods: + - addInput + - createAccumulator + - extractOutput + - getAccumulatorCoder + - mergeAccumulators + - newPopulation + - newSample +CrashingRunner: + methods: + - fromOptions + - run +Create: + methods: + - apply + - close + - createReader + - createSourceForSubrange + - empty + - expand + - fromIterable + - getBytesPerOffset + - getCurrent + - getCurrentSource + - getElements + - getEstimatedSizeBytes + - getMaxEndOffset + - getOutputCoder + - of + - ofProvider + - processElement + - timestamped + - withCoder + - withRowSchema + - withSchema + - withType +CreateOptions: + methods: + - build + - builder + - expectFileToNotExist + - mimeType + - setExpectFileToNotExist + - setMimeType +CreateTableDestinations: + methods: + - expand + - processElement + - startBundle +CreateTableHelpers: {} +CreateTables: + methods: + - expand + - processElement + - startBundle +CrossLanguageConfiguration: + methods: + - getDataSourceConfiguration + - getDatabase + - getOAuthToken + - getPassword + - getPrivateKeyPassphrase + - getPrivateKeyPath + - getQuery + - getRawPrivateKey + - getRole + - getSchema + - getServerName + - getStagingBucketName + - getStorageIntegrationName + - getTable + - getUsername + - getWarehouse + - setDatabase + - setOAuthToken + - setPassword + - setPrivateKeyPassphrase + - setPrivateKeyPath + - setQuery + - setRawPrivateKey + - setRole + - setSchema + - setServerName + - setStagingBucketName + - setStorageIntegrationName + - setTable + - setUsername + - setWarehouse +CsvToRow: + methods: + - expand + - getCsvFormat +CustomCoder: + methods: + - getCoderArguments + - verifyDeterministic +CustomHttpErrors: + methods: + - addErrorForCode + - addErrorForCodeAndUrlContains + - build + - getCustomError + - getMatcher +CustomTimestampPolicyWithLimitedDelay: + methods: + - getTimestampForRecord + - getWatermark +CustomX509TrustManager: + methods: + - checkClientTrusted + - checkServerTrusted + - getAcceptedIssuers +Customer: + methods: + - equals + - getCountryOfResidence + - getId + - getName + - hashCode + - setCountryOfResidence + - setId + - setName + - toString +DLPDeidentifyText: + methods: + - build + - expand + - getBatchSizeBytes + - getColumnDelimiter + - getDeidentifyConfig + - getDeidentifyTemplateName + - getHeaderColumns + - getInspectConfig + - getInspectTemplateName + - getProjectId + - newBuilder + - processElement + - setBatchSizeBytes + - setColumnDelimiter + - setDeidentifyConfig + - setDeidentifyTemplateName + - setHeaderColumns + - setInspectConfig + - setInspectTemplateName + - setProjectId + - setup + - teardown + properties: + - DLP_PAYLOAD_LIMIT_BYTES +DLPInspectText: + methods: + - build + - expand + - getBatchSizeBytes + - getColumnDelimiter + - getHeaderColumns + - getInspectConfig + - getInspectTemplateName + - getProjectId + - newBuilder + - processElement + - setBatchSizeBytes + - setColumnDelimiter + - setHeaderColumns + - setInspectConfig + - setInspectTemplateName + - setProjectId + - setup + - teardown + properties: + - DLP_PAYLOAD_LIMIT_BYTES +DLPReidentifyText: + methods: + - build + - expand + - getBatchSizeBytes + - getColumnDelimiter + - getHeaderColumns + - getInspectConfig + - getInspectTemplateName + - getProjectId + - getReidentifyConfig + - getReidentifyTemplateName + - newBuilder + - processElement + - setBatchSizeBytes + - setColumnDelimiter + - setHeaderColumns + - setInspectConfig + - setInspectTemplateName + - setProjectId + - setReidentifyConfig + - setReidentifyTemplateName + - setup + - teardown + properties: + - DLP_PAYLOAD_LIMIT_BYTES +DaoFactory: + methods: + - getChangeStreamDao + - getPartitionMetadataAdminDao + - getPartitionMetadataDao +DataCatalogPipelineOptionsRegistrar: + methods: + - getPipelineOptions +DataCatalogTableProvider: + methods: + - buildBeamSqlTable + - close + - create + - createDataCatalogClient + - createTable + - dropTable + - getTable + - getTableByFullName + - getTableType + - getTables + - setSchemaIfNotPresent +DataChangeRecord: + methods: + - equals + - getCommitTimestamp + - getMetadata + - getModType + - getMods + - getNumberOfPartitionsInTransaction + - getNumberOfRecordsInTransaction + - getPartitionToken + - getRecordSequence + - getRecordTimestamp + - getRowType + - getServerTransactionId + - getTableName + - getTransactionTag + - getValueCaptureType + - hashCode + - isLastRecordInTransactionInPartition + - isSystemTransaction + - toString +DataChangeRecordAction: + methods: + - run +DataEndpoint: + methods: + - create + - getCoder + - getReceiver + - getTransformId +DataStoreV1SchemaIOProvider: + methods: + - buildReader + - buildWriter + - configurationSchema + - expand + - from + - getKind + - getProjectId + - identifier + - isBounded + - requiresDataSchema + - schema + properties: + - KEY_FIELD_PROPERTY +DataStoreV1TableProvider: + methods: + - getSchemaIOProvider + - getTableStatistics + - getTableType +DataStreams: + methods: + - close + - decodeFromChunkBoundaryToChunkBoundary + - delimitElement + - hasNext + - isEof + - isReady + - next + - outbound + - prefetch + - read + - remove + - write + properties: + - DEFAULT_OUTBOUND_BUFFER_LIMIT_BYTES +DataframeTransform: + methods: + - expand + - of + - withExpansionService + - withIndexes +DatastoreIO: + methods: + - v1 +DatastoreV1: + methods: + - addRequestLatency + - apply + - deleteEntity + - deleteKey + - expand + - finishBundle + - from + - getDatastore + - getLiteralGqlQuery + - getLocalhost + - getNamespace + - getNamespaceValueProvider + - getNumEntities + - getNumQuerySplits + - getProjectId + - getProjectValueProvider + - getQuery + - getQuerySplitter + - getReadTime + - nextBatchSize + - populateDisplayData + - processElement + - read + - start + - startBundle + - toString + - withHintNumWorkers + - withLiteralGqlQuery + - withLocalhost + - withNamespace + - withNumQuerySplits + - withProjectId + - withQuery + - withRampupThrottlingDisabled + - withReadTime + - write + properties: + - NUM_QUERY_SPLITS_MAX +Date: + methods: + - getArgument + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - IDENTIFIER +DateFunctions: + methods: + - date +DateTime: + methods: + - getArgument + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - DATETIME_SCHEMA + - DATE_FIELD_NAME + - IDENTIFIER + - TIME_FIELD_NAME +DateTimeUtils: + methods: + - findDateTimePattern + - formatTimestampWithTimeZone + - parseDate + - parseDateToValue + - parseTime + - parseTimeToValue + - parseTimestampWithLocalTimeZone + - parseTimestampWithTZToValue + - parseTimestampWithTimeZone + - parseTimestampWithUTCTimeZone + - parseTimestampWithoutTimeZone + - validateTimeInterval + - validateTimestamp + properties: + - MAX_UNIX_MILLIS + - MILLIS_PER_DAY + - MIN_UNIX_MILLIS +DeadLetteredTransform: + methods: + - expand +DebeziumIO: + methods: + - create + - expand + - getConfigurationMap + - read + - readAsJson + - withCoder + - withConnectionProperties + - withConnectionProperty + - withConnectorClass + - withConnectorConfiguration + - withFormatFunction + - withHostName + - withMaxNumberOfRecords + - withPassword + - withPort + - withSourceConnector + - withUsername +DebeziumTransformRegistrar: + methods: + - buildExternal + - knownBuilders + - setConnectionProperties + - setConnectorClass + - setHost + - setMaxNumberOfRecords + - setPassword + - setPort + - setUsername + properties: + - READ_JSON_URN +DecodingFnDataReceiver: + methods: + - accept + - create +Deduplicate: + methods: + - apply + - expand + - keyedValues + - onExpiry + - processElement + - values + - withDuration + - withRepresentativeCoder + - withRepresentativeType + - withRepresentativeValueFn + - withTimeDomain + properties: + - DEFAULT_DURATION + - DEFAULT_TIME_DOMAIN +DefaultAutoscaler: + methods: + - getTotalBacklogBytes + - start + - stop +DefaultBlobstoreClientBuilderFactory: + methods: + - createBuilder +DefaultFilenamePolicy: + methods: + - decode + - encode + - equals + - fromParams + - fromStandardParameters + - hashCode + - of + - populateDisplayData + - toString + - unwindowedFilename + - windowedFilename + - withBaseFilename + - withShardTemplate + - withSuffix + - withWindowedWrites + properties: + - DEFAULT_UNWINDOWED_SHARD_TEMPLATE + - DEFAULT_WINDOWED_SHARD_TEMPLATE +DefaultPipelineOptionsRegistrar: + methods: + - getPipelineOptions +DefaultS3ClientBuilderFactory: + methods: + - createBuilder +DefaultS3FileSystemSchemeRegistrar: + methods: + - fromOptions +DefaultTableFilter: + methods: + - getNotSupported + - numSupported +DefaultTrigger: + methods: + - getWatermarkThatGuaranteesFiring + - isCompatible + - mayFinish + - of +DelegateCoder: + methods: + - decode + - encode + - equals + - getCoder + - getEncodedTypeDescriptor + - hashCode + - of + - structuralValue + - toString + - verifyDeterministic +DelegatingCounter: + methods: + - dec + - getName + - inc +DelegatingDistribution: + methods: + - getName + - update +DelegatingHistogram: + methods: + - getName + - update +DequeCoder: + methods: + - consistentWithEquals + - getEncodedTypeDescriptor + - of + - structuralValue + - verifyDeterministic +DetectNewPartitionsAction: + methods: + - run +DetectNewPartitionsDoFn: + methods: + - getInitialWatermarkEstimatorState + - getSize + - initialRestriction + - newTracker + - newWatermarkEstimator + - processElement + - setAveragePartitionBytesSize + - setup +DetectNewPartitionsRangeTracker: + methods: + - tryClaim +DicomIO: + methods: + - expand + - finishSpecifyingOutput + - getFailedReads + - getPipeline + - getReadResponse + - instantiateHealthcareClient + - processElement + - readStudyMetadata + properties: + - ERROR_MESSAGE + - METADATA +DirectStreamObserver: + methods: + - onCompleted + - onError + - onNext +DisplayData: + methods: + - absolute + - add + - addIfNotDefault + - addIfNotNull + - asMap + - build + - delegate + - equals + - extend + - from + - getComponents + - getKey + - getLabel + - getLinkUrl + - getNamespace + - getPath + - getShortValue + - getType + - getValue + - hashCode + - include + - inferType + - item + - items + - none + - of + - root + - setKey + - setLabel + - setLinkUrl + - setNamespace + - setShortValue + - setType + - setValue + - toString + - withLabel + - withLinkUrl + - withNamespace +Distinct: + methods: + - accumulationMode + - apply + - create + - expand + - named + - of + - output + - processElement + - projected + - triggeredBy + - windowBy + - withAllowedLateness + - withOnTimeBehavior + - withRepresentativeType + - withRepresentativeValueFn + - withTimestampCombiner +DistributionResult: + methods: + - create + - getCount + - getMax + - getMean + - getMin + - getSum + properties: + - IDENTITY_ELEMENT +DlqProvider: + methods: + - expand + - identifier + - newDlqTransform +DoFn: + methods: + - element + - fireTimestamp + - getAllowedTimestampSkew + - getInputTypeDescriptor + - getOutputTypeDescriptor + - getPipelineOptions + - output + - outputWithTimestamp + - pane + - populateDisplayData + - prepareForProcessing + - resume + - resumeDelay + - shouldResume + - sideInput + - stop + - timeDomain + - timestamp + - window + - withResumeDelay +DoFnInfo: + methods: + - forFn + - getDoFn + - getDoFnSchemaInformation + - getInputCoder + - getMainOutput + - getOutputCoders + - getSideInputMapping + - getSideInputViews + - getWindowingStrategy + - withFn +DoFnInvokers: + methods: + - getErrorContext + - invokerFor + - pipelineOptions + - tryInvokeSetupFor +DoFnOutputReceivers: + methods: + - get + - getRowReceiver + - output + - outputWithTimestamp + - rowReceiver + - windowedMultiReceiver + - windowedReceiver +DoFnSchemaInformation: + methods: + - apply + - create + - getElementConverters + - getFieldAccessDescriptor + - of + - toBuilder +DoFnSignature: + methods: + - alwaysFetched + - boundedWindow + - build + - bundleFinalizer + - coderT + - dispatch + - elementParameter + - elementT + - estimatorStateT + - estimatorT + - extraParameters + - field + - fieldAccessDeclarations + - fieldAccessString + - finishBundle + - finishBundleContext + - fnClass + - getInitialRestriction + - getInitialWatermarkEstimatorState + - getMainOutputReceiver + - getRestrictionCoder + - getSchemaElementParameters + - getSideInputParameters + - getSize + - getWatermarkEstimatorStateCoder + - hasReturnValue + - id + - index + - isBoundedPerElement + - isRowReceiver + - isSplittable + - isStateful + - keyT + - match + - newTracker + - newWatermarkEstimator + - onTimerContext + - onTimerFamilyMethods + - onTimerMethods + - onWindowExpiration + - onWindowExpirationContext + - outputReceiverParameter + - paneInfoParameter + - pipelineOptions + - processContext + - processElement + - referent + - requiresStableInput + - requiresTimeSortedInput + - restrictionParameter + - restrictionT + - restrictionTracker + - schemaElementParameter + - setElementT + - setFieldAccessString + - setIndex + - setSideInputId + - setup + - sideInputId + - sideInputParameter + - splitRestriction + - startBundle + - startBundleContext + - stateDeclarations + - stateParameter + - stateType + - taggedOutputReceiverParameter + - targetMethod + - teardown + - timeDomainParameter + - timerDeclarations + - timerFamilyDeclarations + - timerFamilyParameter + - timerIdParameter + - timerParameter + - timestampParameter + - toBuilder + - trackerT + - truncateRestriction + - usesState + - usesTimers + - watermarkEstimator + - watermarkEstimatorState + - watermarkEstimatorStateT + - watermarkEstimatorT + - windowT + properties: + - PREFIX +DoFnSignatures: + methods: + - addFieldAccessDeclaration + - addFieldAccessDeclarations + - addParameter + - addStateDeclaration + - addStateDeclarations + - addTimerDeclaration + - addTimerDeclarations + - addTimerFamilyDeclaration + - addTimerFamilyDeclarations + - checkArgument + - checkNotNull + - create + - findParameter + - findParameters + - getAnnotations + - getExtraParameters + - getFieldAccessDeclarations + - getIndex + - getMethod + - getSignature + - getStateDeclarations + - getStateParameters + - getStateSpecOrThrow + - getTimerDeclarations + - getTimerFamilyDeclarations + - getTimerFamilyParameters + - getTimerFamilySpecOrThrow + - getTimerParameters + - getTimerSpecOrThrow + - getType + - hasParameter + - isSplittable + - isStateful + - of + - requiresTimeSortedInput + - setParameter + - signatureForDoFn + - usesBagState + - usesBundleFinalizer + - usesMapState + - usesOrderedListState + - usesSetState + - usesState + - usesTimers + - usesValueState + - usesWatermarkHold +DoFnTester: + methods: + - clearOutputElements + - close + - createProcessContext + - dispatch + - element + - finishBundle + - finishBundleContext + - get + - getCloningBehavior + - getErrorContext + - getMainOutputTag + - getMutableOutput + - getPipelineOptions + - key + - of + - output + - outputReceiver + - outputWithTimestamp + - pane + - paneInfo + - peekOutputElements + - peekOutputElementsInWindow + - peekOutputElementsWithTimestamp + - pipelineOptions + - processBundle + - processContext + - processElement + - processTimestampedElement + - processWindowedElement + - restriction + - restrictionTracker + - setCloningBehavior + - setSideInput + - setSideInputs + - sideInput + - startBundle + - startBundleContext + - taggedOutputReceiver + - takeOutputElements + - takeOutputElementsWithTimestamp + - timeDomain + - timerId + - timestamp + - window +DoFnWithExecutionInformation: + methods: + - getDoFn + - getMainOutputTag + - getSchemaInformation + - getSideInputMapping + - of +Done: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + properties: + - CODER +DoubleCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +DropFields: + methods: + - expand + - fields +DurationCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - registerByteSizeObserver + - verifyDeterministic +DynamicAvroDestinations: + methods: + - getCodec + - getDatumWriterFactory + - getMetadata + - getSchema +DynamicDestinations: + methods: + - getDestination + - getDestinationCoder + - getSchema + - getSideInputs + - getTable + - sideInput +DynamicFileDestinations: + methods: + - constant + - formatRecord + - getDefaultDestination + - getDestination + - getDestinationCoder + - getFilenamePolicy + - populateDisplayData + - toDefaultPolicies +DynamicProtoCoder: + methods: + - coderFor + - equals + - getCoderProvider + - hashCode + - of + - withExtensionsFrom + properties: + - serialVersionUID +DynamoDBIO: + methods: + - apply + - build + - builder + - create + - expand + - finishBundle + - items + - processElement + - read + - setMaxAttempts + - setMaxDuration + - setup + - startBundle + - tearDown + - test + - withAwsClientsProvider + - withClientConfiguration + - withCoder + - withDeduplicateKeys + - withDynamoDbClientProvider + - withRetryConfiguration + - withScanRequestFn + - withScanResponseMapperFn + - withScanResultMapperFn + - withWriteRequestMapperFn + - write +ElasticsearchIO: + methods: + - advance + - bulkIO + - close + - closeClient + - create + - createReader + - customizeRequestConfig + - decode + - docToBulk + - encode + - expand + - finishBundle + - fromSpec + - getAddresses + - getAllowedTimestampSkew + - getApiKey + - getApiPrefix + - getBearerToken + - getBulkDirective + - getBulkEndPoint + - getBulkIO + - getConnectTimeout + - getCountEndPoint + - getCurrent + - getCurrentSource + - getDocToBulk + - getEstimatedSizeBytes + - getHasError + - getIndex + - getInputDoc + - getKeystorePassword + - getKeystorePath + - getOutputCoder + - getPassword + - getPrefixedEndpoint + - getResponseItemJson + - getSearchEndPoint + - getSocketTimeout + - getTimestamp + - getType + - getUsername + - isTrustSelfSignedCerts + - of + - output + - populateDisplayData + - processElement + - read + - serialize + - setup + - split + - start + - startBundle + - test + - validate + - withAllowableResponseErrors + - withApiKey + - withAppendOnly + - withBackendVersion + - withBatchSize + - withBearerToken + - withBulkDirective + - withConnectTimeout + - withConnectionConfiguration + - withDocVersionFn + - withDocVersionType + - withHasError + - withIdFn + - withIgnoreVersionConflicts + - withIndexFn + - withInputDoc + - withIsDeleteFn + - withKeystorePassword + - withKeystorePath + - withMaxBatchSize + - withMaxBatchSizeBytes + - withMaxBufferingDuration + - withMaxParallelRequests + - withMaxParallelRequestsPerWindow + - withMetadata + - withPassword + - withQuery + - withResponseItemJson + - withRetryConfiguration + - withRoutingFn + - withScrollKeepalive + - withSocketTimeout + - withThrowWriteErrors + - withTimestamp + - withTrustSelfSignedCerts + - withTypeFn + - withUpsertScript + - withUsePartialUpdate + - withUseStatefulBatches + - withUsername + - write + properties: + - FAILED_WRITES + - SUCCESSFUL_WRITES +ElementByteSizeObservableIterable: + methods: + - addObserver + - iterator +ElementByteSizeObservableIterator: {} +ElementByteSizeObserver: + methods: + - advance + - getIsLazy + - setLazy + - setScalingFactor + - update +EmptyOnDeserializationThreadLocal: {} +EncodableThrowable: + methods: + - equals + - forThrowable + - hashCode + - throwable +EncodedBoundedWindow: + methods: + - consistentWithEquals + - decode + - encode + - forEncoding + - getEncodedWindow + - isRegisterByteSizeObserverCheap + - maxTimestamp + properties: + - INSTANCE +EncodingException: {} +EntityToRow: + methods: + - create + - expand + - processElement +EnumerationType: + methods: + - create + - equals + - getArgument + - getArgumentType + - getBaseType + - getIdentifier + - getValue + - getValues + - getValuesMap + - hashCode + - toBaseType + - toInputType + - toString + - valueOf + properties: + - IDENTIFIER +Event: + methods: + - decode + - encode + - equals + - hasAnnotation + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + - withAnnotation + properties: + - CODER + - bid + - newAuction + - newPerson +ExecutionStateSampler: + methods: + - activate + - create + - deactivate + - getCurrentThreadsPTransformId + - getLastTransitionTimeMillis + - getPTransformId + - getPTransformUniqueName + - getStatus + - getTrackedThread + - reset + - start + - stop + - takeSample + - updateFinalMonitoringData + - updateIntermediateMonitoringData + - updateMonitoringData +ExecutionStateSamplerBenchmark: + methods: + - setup + - tearDown + - testLargeBundleHarnessStateSampler + - testLargeBundleRunnersCoreStateSampler + - testTinyBundleHarnessStateSampler + - testTinyBundleRunnersCoreStateSampler + properties: + - sampler + - state1 + - state2 + - state3 + - tracker +ExpansionServer: + methods: + - close + - create + - getHost + - getPort +ExpansionService: + methods: + - close + - discoverSchemaTransform + - expand + - fromOptions + - getDependencies + - getTransform + - knownTransforms + - main + - payloadToConfig + - run +ExpansionServiceSchemaTransformProvider: + methods: + - createInput + - extractOutputs + - getTransform + - of +ExplicitShardedFile: + methods: + - readFilesWithRetries + - toString +ExposedByteArrayInputStream: + methods: + - close + - readAll +ExposedByteArrayOutputStream: + methods: + - reset + - toByteArray + - write + - writeAndOwn +ExpressionConverter: + methods: + - convertRelNodeToRexRangeRef + - convertResolvedLiteral + - convertRexNodeFromResolvedExpr + - convertTableValuedFunction + - indexOfProjectionColumnRef + - retrieveRexNode + - retrieveRexNodeFromOrderByScan + - trueLiteral +ExternalRead: + methods: + - buildExternal + - knownBuilders + - setIdLabel + - setSubscription + - setTimestampAttribute + - setTopic + - setWithAttributes + properties: + - URN +ExternalSchemaIOTransformRegistrar: + methods: + - buildExternal + - knownBuilderInstances + - setConfig + - setDataSchema + - setLocation +ExternalSorter: + methods: + - create + - getMemoryMB + - getSorterType + - getTempLocation + - setMemoryMB + - setSorterType + - setTempLocation +ExternalSqlTransformRegistrar: + methods: + - buildExternal + - knownBuilders + - setDialect + - setQuery +ExternalTransformRegistrarImpl: + methods: + - knownBuilderInstances + properties: + - READ_URN + - WRITE_URN +ExternalWorkerService: + methods: + - close + - main + - start + - startWorker + - stopWorker +ExternalWrite: + methods: + - buildExternal + - knownBuilders + - setIdLabel + - setTimestampAttribute + - setTopic + properties: + - URN +FailsafeValueInSingleWindow: + methods: + - decode + - encode + - getCoderArguments + - getComponents + - getFailsafeValue + - getPane + - getTimestamp + - getValue + - getWindow + - of + - verifyDeterministic +Failure: + methods: + - build + - getError + - getPayload + - newBuilder + - setError + - setPayload +FailureCollectorWrapper: + methods: + - addFailure + - getOrThrowException + - getValidationFailures +FakeBigQueryServices: + methods: + - cancel + - convertNumbers + - decodeQueryResult + - encodeQueryResult + - getDatasetService + - getJobService + - getStorageClient + - iterator + - withDatasetService + - withJobService + - withStorageClient +FakeDatasetService: + methods: + - appendRows + - close + - commitWriteStreams + - createDataset + - createTable + - createWriteStream + - deleteDataset + - deleteTable + - failOnInsert + - finalizeWriteStream + - flush + - getAllIds + - getAllRows + - getDataset + - getInsertCount + - getStreamAppendClient + - getTable + - getTableImpl + - insertAll + - isTableEmpty + - patchTableDescription + - pin + - setShouldFailRow + - setUp + - unpin + - updateTableSchema +FakeJobService: + methods: + - close + - dryRunQuery + - expectDryRunQuery + - getAllJobs + - getJob + - getNumExtractJobCalls + - pollJob + - setNumFailuresExpected + - setUp + - startCopyJob + - startExtractJob + - startLoadJob + - startQueryJob +FhirBundleParameter: + methods: + - getBundle + - getMetadata + - of +FhirBundleResponse: + methods: + - getFhirBundleParameter + - getResponse + - of +FhirIO: + methods: + - addToFile + - closeFile + - deidentify + - delete + - executeBundles + - expand + - exportResources + - fhirStoresImport + - finishSpecifyingOutput + - getFailedBodies + - getFailedBundles + - getFailedFiles + - getFailedReads + - getFailedSearches + - getFhirStore + - getKeyedResources + - getPatientEverything + - getPipeline + - getResources + - getSuccessfulBodies + - getSuccessfulBundles + - importBatch + - importResources + - in + - init + - initBatch + - initClient + - initFile + - instantiateHealthcareClient + - process + - processElement + - readResources + - searchResources + - searchResourcesWithGenericParameters + properties: + - DEAD_LETTER + - FAILED_BODY + - FAILED_BUNDLES + - FAILED_FILES + - OUT + - SUCCESSFUL_BODY + - SUCCESSFUL_BUNDLES + - TEMP_FILES +FhirIOPatientEverything: + methods: + - expand + - finishSpecifyingOutput + - getFailedReads + - getPatientCompartments + - getPipeline + - instantiateHealthcareClient + - processElement + properties: + - DEAD_LETTER + - OUT +FhirSearchParameter: + methods: + - equals + - getKey + - getQueries + - getResourceType + - hashCode + - of + - toString +FhirSearchParameterCoder: + methods: + - decode + - encode + - of +FieldAccessDescriptor: + methods: + - build + - builder + - create + - fieldIdsAccessed + - fieldNamesAccessed + - getAllFields + - getFieldId + - getFieldName + - getFieldRename + - getFieldsAccessed + - getKind + - getList + - getMap + - getNestedFieldsAccessed + - getQualifiers + - nestedFieldsById + - nestedFieldsByName + - of + - referencesSingleField + - resolve + - setFieldId + - setFieldName + - setFieldRename + - setQualifiers + - toString + - union + - withAllFields + - withFieldIds + - withFieldNameAs + - withFieldNames + - withFieldNamesAs + - withFields + - withNestedField + - withNestedFieldAs +FieldAccessDescriptorParser: + methods: + - getQualifiers + - parse + - visitArrayQualifierList + - visitDotExpression + - visitFieldSpecifier + - visitMapQualifierList + - visitQualifiedComponent + - visitQualifyComponent + - visitSimpleIdentifier + - visitWildcard +FieldTypeDescriptors: + methods: + - fieldTypeForJavaType + - javaTypeForFieldType +FieldValueTypeInformation: + methods: + - forField + - forGetter + - forOneOf + - forSetter + - getElementType + - getField + - getMapKeyType + - getMapValueType + - getMethod + - getName + - getNameOverride + - getNumber + - getNumberOverride + - getOneOfTypes + - getRawType + - getType + - isNullable + - setElementType + - setField + - setMapKeyType + - setMapValueType + - setMethod + - setName + - setNullable + - setNumber + - setOneOfTypes + - setRawType + - setType + - withName +FileBasedSink: + methods: + - apply + - cleanup + - close + - convertToFileResourceIfPossible + - create + - createWriteOperation + - createWriter + - decode + - encode + - formatRecord + - fromCanonical + - getCoderArguments + - getComponents + - getDefaultDestination + - getDestination + - getDestinationCoder + - getDestinationFile + - getDynamicDestinations + - getFilenamePolicy + - getMimeType + - getOutputFile + - getPaneInfo + - getShard + - getSideInputs + - getSink + - getSuggestedFilenameSuffix + - getTempDirectory + - getTempDirectoryProvider + - getTempFilename + - getWindow + - getWriteOperation + - of + - open + - populateDisplayData + - removeTemporaryFiles + - setWindowedWrites + - sideInput + - toString + - unwindowedFilename + - validate + - verifyDeterministic + - windowedFilename + - withShard + - write +FileBasedSource: + methods: + - advance + - allowsDynamicSplitting + - close + - createReader + - createSourceForSubrange + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getEmptyMatchTreatment + - getEstimatedSizeBytes + - getFileOrPatternSpec + - getFileOrPatternSpecProvider + - getFractionConsumed + - getMaxEndOffset + - getMode + - getSingleFileMetadata + - populateDisplayData + - split + - splitAtFraction + - start + - toString + - validate +FileChecksumMatcher: + methods: + - describeMismatchSafely + - describeTo + - fileContentsHaveChecksum + - matchesSafely +FileIO: + methods: + - apply + - by + - continuously + - create + - createWriteOperation + - createWriter + - defaultNaming + - equals + - expand + - filepattern + - formatRecord + - getCompression + - getDefaultDestination + - getDestination + - getDestinationCoder + - getEmptyMatchTreatment + - getFilenamePolicy + - getMatchUpdatedFiles + - getMetadata + - getSideInputs + - getWatchInterval + - hashCode + - match + - matchAll + - open + - openSeekable + - populateDisplayData + - process + - readFullyAsBytes + - readFullyAsUTF8String + - readMatches + - relativeFileNaming + - sideInput + - to + - toString + - unwindowedFilename + - via + - windowedFilename + - withCompression + - withConfiguration + - withDestinationCoder + - withDirectoryTreatment + - withEmptyGlobalWindowDestination + - withEmptyMatchTreatment + - withIgnoreWindowing + - withNaming + - withNoSpilling + - withNumShards + - withPrefix + - withSharding + - withSuffix + - withTempDirectory + - write + - writeDynamic +FilePatternMatchingShardedFile: + methods: + - readFilesWithRetries + - toString +FileSystem: {} +FileSystemUtils: + methods: + - wildcardToRegexp +FileSystems: + methods: + - apply + - copy + - create + - delete + - hasGlobWildcard + - match + - matchNewResource + - matchResources + - matchSingleFileSpec + - open + - rename + - setDefaultPipelineOptions + properties: + - DEFAULT_SCHEME + - filteredExistingSrcs + - resultDestinations + - resultSources +FillGaps: + methods: + - expand + - getNextWindow + - getPreviousWindow + - getValue + - keepEarliest + - keepLatest + - of + - onGcTimer + - onTimer + - process +Filter: + methods: + - by + - create + - equal + - expand + - getPredicate + - getRowSelector + - greaterThan + - greaterThanEq + - lessThan + - lessThanEq + - named + - of + - output + - populateDisplayData + - process + - processElement + - whereFieldId + - whereFieldIds + - whereFieldName + - whereFieldNames +FinalizeBundleHandler: + methods: + - create + - finalizeBundle + - getCallback + - getExpiryTime + - registerCallbacks +FindQuery: + methods: + - apply + - create + - withFilters + - withLimit + - withProjection +FirestoreIO: + methods: + - v1 +FirestoreV1: + methods: + - apply + - batchGetDocuments + - batchWrite + - build + - equals + - expand + - getNumBytes + - getNumWrites + - getStatus + - getWrite + - getWriteFailures + - getWriteResult + - hashCode + - listCollectionIds + - listDocuments + - partitionQuery + - populateDisplayData + - processElement + - read + - runQuery + - toBuilder + - toString + - withDeadLetterQueue + - withNameOnlyQuery + - withReadTime + - withRpcQosOptions + - write +FixedBytes: + methods: + - getLength + - getName + - of + - toBaseType + - toInputType + - toString + properties: + - IDENTIFIER +FixedPrecisionNumeric: + methods: + - of + - toInputType + properties: + - BASE_IDENTIFIER + - IDENTIFIER +FixedString: + methods: + - getLength + - getName + - of + - toInputType + - toString + properties: + - IDENTIFIER +FixedWindows: + methods: + - assignWindow + - equals + - getOffset + - getSize + - hashCode + - isCompatible + - of + - populateDisplayData + - verifyCompatibility + - windowCoder + - withOffset +FlatMap: + methods: + - eventTimeBy + - getAllowedTimestampSkew + - getEventTimeExtractor + - getFunctor + - named + - of + - output + - using +FlatMapElements: + methods: + - exceptionsInto + - exceptionsVia + - expand + - getInputTypeDescriptor + - getOutputTypeDescriptor + - getTypeDescriptor + - into + - populateDisplayData + - processElement + - via +FlatMapTranslator: + methods: + - collect + - getAllowedTimestampSkew + - processElement + - translate +Flatten: + methods: + - apply + - expand + - iterables + - pCollections +FlattenRunner: + methods: + - createRunnerForPTransform + - getPTransformRunnerFactories +FloatCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +FluentBackoff: + methods: + - backoff + - nextBackOffMillis + - reset + - toString + - withExponent + - withInitialBackoff + - withMaxBackoff + - withMaxCumulativeBackoff + - withMaxRetries + properties: + - DEFAULT +FnApiDoFnRunner: + methods: + - accept + - align + - bundleFinalizer + - clear + - createRunnerForPTransform + - element + - finishBundleContext + - fireTimestamp + - forRoots + - get + - getCurrentRelativeTime + - getDownstreamSplit + - getErrorContext + - getNewWindowStopIndex + - getPTransformRunnerFactories + - getPipelineOptions + - getPrimaryInFullyProcessedWindowsRoot + - getPrimarySplitRoot + - getProgress + - getResidualInUnprocessedWindowsRoot + - getResidualSplitRoot + - getWindowSplit + - key + - of + - offset + - onClaimFailed + - onClaimed + - onTimerContext + - output + - outputReceiver + - outputRowReceiver + - outputWithTimestamp + - pane + - paneInfo + - pipelineOptions + - processContext + - reset + - restriction + - restrictionTracker + - schemaElement + - set + - setRelative + - sideInput + - startBundleContext + - state + - taggedOutputReceiver + - timeDomain + - timer + - timerFamily + - timerId + - timestamp + - trySplit + - updateFinalMonitoringData + - updateIntermediateMonitoringData + - watermarkEstimator + - watermarkEstimatorState + - window + - withNoOutputTimestamp + - withOutputTimestamp +FnApiStateAccessor: + methods: + - add + - addAccum + - addIfAbsent + - apply + - bindBag + - bindCombining + - bindCombiningWithContext + - bindMap + - bindOrderedList + - bindSet + - bindValue + - bindWatermark + - clear + - computeIfAbsent + - contains + - entries + - finalizeState + - get + - getAccum + - getOrDefault + - getPipelineOptions + - isEmpty + - keys + - mergeAccumulators + - put + - read + - readLater + - remove + - sideInput + - values + - window + - write +FnApiTimerBundleTracker: + methods: + - get + - getBundleModifications + - getModifiedEventTimersOrdered + - getModifiedProcessingTimersOrdered + - getModifiedSynchronizedProcessingTimersOrdered + - getModifiedTimerIds + - getModifiedTimersOrdered + - getTimeDomain + - getTimer + - getTimerFamilyOrId + - of + - outputTimers + - reset + - timerModified +FnHarness: + methods: + - apply + - main +Fold: + methods: + - of +ForwardingClientResponseObserver: + methods: + - beforeStart + - create + - onCompleted + - onError + - onNext +FullJoin: + methods: + - by + - named + - of + - using +FullNameTableProvider: + methods: + - buildBeamSqlTable + - getSubProvider + - getTable + - getTableByFullName + - getTableType + - registerKnownTableNames +GaugeResult: + methods: + - create + - empty + - getTimestamp + - getValue +GceMetadataUtil: + methods: + - fetchDataflowJobId +GcpCredentialFactory: + methods: + - fromOptions + - getCredential +GcpIoPipelineOptionsRegistrar: + methods: + - getPipelineOptions +GcpPipelineOptionsRegistrar: + methods: + - getPipelineOptions +GcsCreateOptions: + methods: + - build + - builder + - gcsUploadBufferSizeBytes + - setGcsUploadBufferSizeBytes +GcsFileSystemRegistrar: + methods: + - fromOptions +GcsPath: + methods: + - compareTo + - endsWith + - equals + - fromComponents + - fromObject + - fromResourceName + - fromUri + - getBucket + - getFileName + - getFileSystem + - getName + - getNameCount + - getObject + - getParent + - getRoot + - hasNext + - hashCode + - isAbsolute + - iterator + - next + - normalize + - register + - relativize + - remove + - resolve + - resolveSibling + - setFileSystem + - startsWith + - subpath + - toAbsolutePath + - toFile + - toRealPath + - toResourceName + - toString + - toUri + properties: + - GCS_URI + - SCHEME +GcsPathValidator: + methods: + - fromOptions + - validateInputFilePatternSupported + - validateOutputFilePrefixSupported + - validateOutputResourceSupported + - verifyPath +GcsResourceId: + methods: + - equals + - getCurrentDirectory + - getFilename + - getScheme + - hashCode + - isDirectory + - resolve + - toString +GcsUtil: + methods: + - bucketAccessible + - bucketOwner + - build + - builder + - copy + - create + - createBucket + - enqueue + - execute + - expand + - fileSize + - getContentType + - getExpectFileToNotExist + - getFrom + - getLastError + - getNonWildcardPrefix + - getObject + - getObjects + - getReadyToEnqueue + - getTo + - getUploadBufferSizeBytes + - ioException + - isWildcard + - listObjects + - onFailure + - onSuccess + - open + - queue + - remove + - rename + - setContentType + - setExpectFileToNotExist + - setUploadBufferSizeBytes + - shouldRetry + - size + - storageObject + - verifyBucketAccessible +GenerateSequence: + methods: + - buildExternal + - expand + - from + - knownBuilders + - populateDisplayData + - setElementsPerPeriod + - setMaxReadTime + - setPeriod + - setStart + - setStop + - to + - withMaxReadTime + - withRate + - withTimestampFn + properties: + - URN +GenerateSequenceTableProvider: + methods: + - buildBeamSqlTable + - getTableType +Generator: + methods: + - compareTo + - copy + - currentInterEventDelayUs + - equals + - getCurrentConfig + - getFractionConsumed + - getNextEventId + - hasNext + - hashCode + - next + - nextEvent + - remove + - splitAtEventId + - toCheckpoint + - toString + - withDelay + properties: + - event + - eventTimestamp + - wallclockTimestamp + - watermark +GeneratorCheckpoint: + methods: + - decode + - encode + - finalizeCheckpoint + - toGenerator + - toString + - verifyDeterministic + properties: + - CODER_INSTANCE +GeneratorConfig: + methods: + - copy + - copyWith + - estimatedBytesForEvents + - getAvgAuctionByteSize + - getAvgBidByteSize + - getAvgPersonByteSize + - getEstimatedSizeBytes + - getHotAuctionRatio + - getHotBiddersRatio + - getHotSellersRatio + - getNumActivePeople + - getNumInFlightAuctions + - getOccasionalDelaySec + - getProbDelayedEvent + - getStartEventId + - getStopEventId + - nextAdjustedEventNumber + - nextEventNumber + - nextEventNumberForWatermark + - split + - timestampAndInterEventDelayUsForEvent + - toString + properties: + - AUCTION_PROPORTION + - FIRST_AUCTION_ID + - FIRST_CATEGORY_ID + - FIRST_PERSON_ID + - PERSON_PROPORTION + - PROPORTION_DENOMINATOR + - baseTime + - firstEventId + - firstEventNumber + - maxEvents +GenericDlq: + methods: + - getDlqTransform +GenericTranslatorProvider: + methods: + - build + - createWithDefaultTranslators + - findTranslator + - newBuilder + - register +GetterBasedSchemaProvider: + methods: + - apply + - create + - equals + - fieldValueGetters + - fieldValueTypeInformations + - fromRowFunction + - get + - getRaw + - hashCode + - name + - schemaTypeCreator + - toRowFunction +GetterBasedSchemaProviderBenchmark: + methods: + - processArrayOfNestedStringField + - processArrayOfStringField + - processByteBufferField + - processBytesField + - processDateTimeField + - processIntField + - processMapOfIntField + - processMapOfNestedIntField + - processNestedBytesField + - processNestedIntField + - processStringBuilderField + - processStringField +GlobalWindow: + methods: + - consistentWithEquals + - decode + - encode + - equals + - getCoderArguments + - hashCode + - maxTimestamp + - verifyDeterministic + properties: + - INSTANCE +GlobalWindows: + methods: + - assignWindows + - assignsToOneWindow + - equals + - getDefaultWindowMappingFn + - getSideInputWindow + - hashCode + - isCompatible + - toString + - verifyCompatibility + - windowCoder +Group: + methods: + - aggregate + - aggregateField + - aggregateFieldBaseValue + - aggregateFields + - aggregateFieldsById + - byFieldAccessDescriptor + - byFieldIds + - byFieldNames + - create + - expand + - getToKvs + - globally + - process + - witValueField + - withKeyField + - withValueField +GroupByKey: + methods: + - applicableTo + - create + - expand + - fewKeys + - getInputValueCoder + - getKeyCoder + - getOutputKvCoder + - populateDisplayData + - updateWindowingStrategy + - validate +GroupByKeyLoadTest: + methods: + - main + - processElement +GroupIntoBatches: + methods: + - apply + - create + - expand + - getBatchSize + - getBatchSizeBytes + - getBatchingParams + - getElementByteSize + - getMaxBufferingDuration + - getWeigher + - identity + - ofByteSize + - ofSize + - onBufferingTimer + - onWindowExpiration + - onWindowTimer + - processElement + - withMaxBufferingDuration + - withShardedKey +GrowableOffsetRangeTracker: + methods: + - getProgress + - isBounded + - trySplit +GrpcContextHeaderAccessorProvider: + methods: + - getHeaderAccessor + - getSdkWorkerId + - interceptCall + - interceptor +GrpcFnServer: + methods: + - allocatePortAndCreateFor + - close + - create + - getApiServiceDescriptor + - getServer + - getService +HBaseCoderProviderRegistrar: + methods: + - getCoderProviders +HBaseIO: + methods: + - advance + - close + - createReader + - equals + - expand + - finishBundle + - getConfiguration + - getCurrent + - getCurrentSource + - getEstimatedSizeBytes + - getFractionConsumed + - getKeyRange + - getOutputCoder + - getScan + - getSplitPointsConsumed + - getTableId + - hashCode + - populateDisplayData + - processElement + - read + - readAll + - setup + - split + - splitAtFraction + - start + - startBundle + - tearDown + - validate + - withConfiguration + - withFilter + - withKeyRange + - withScan + - withTableId + - write +HCatToRow: + methods: + - expand + - fromSpec + - processElement +HCatalogBeamSchema: + methods: + - create + - getTableSchema + - hasDatabase +HCatalogIO: + methods: + - advance + - close + - createReader + - expand + - finishBundle + - getCurrent + - getCurrentSource + - getEstimatedSizeBytes + - getOutputCoder + - initiateWrite + - populateDisplayData + - processElement + - read + - split + - start + - startBundle + - tearDown + - withBatchSize + - withConfigProperties + - withDatabase + - withFilter + - withPartition + - withPartitionCols + - withPollingInterval + - withTable + - withTerminationCondition + - write +HCatalogTable: + methods: + - buildIOReader + - buildIOWriter + - config + - database + - getSchema + - isBounded + - schema + - table +HCatalogUtils: {} +HDFSSynchronization: + methods: + - acquireTaskAttemptIdLock + - acquireTaskIdLock + - releaseJobIdLock + - tryAcquireJobLock +HL7v2IO: + methods: + - expand + - finishSpecifyingOutput + - getAll + - getEarliestToLatestRestriction + - getFailedInsertsWithErr + - getFailedReads + - getMessages + - getPipeline + - ingestMessages + - initClient + - instantiateHealthcareClient + - listMessages + - of + - processElement + - read + - readAll + - readAllWithFilter + - readWithFilter + - split + - withInitialSplitDuration + - writeMessages + properties: + - DEAD_LETTER + - FAILED + - OUT + - SUCCESS +HL7v2Message: + methods: + - fromModel + - getCreateTime + - getData + - getLabels + - getMessageType + - getName + - getSchematizedData + - getSendFacility + - getSendTime + - setSchematizedData + - toModel + - toString +HL7v2MessageCoder: + methods: + - decode + - encode + - of +HadoopFileSystemModule: + methods: + - deserialize + - serialize +HadoopFileSystemOptionsRegistrar: + methods: + - getPipelineOptions +HadoopFileSystemRegistrar: + methods: + - fromOptions +HadoopFormatIO: + methods: + - advance + - close + - createReader + - decode + - encode + - expand + - finishBundle + - getConfiguration + - getCurrent + - getCurrentSource + - getDefaultCoder + - getEstimatedSizeBytes + - getFractionConsumed + - getKeyCoder + - getKeyTranslationFunction + - getKeyTypeDescriptor + - getOutputCoder + - getSkipKeyClone + - getSkipValueClone + - getSplit + - getSplitPointsRemaining + - getValueCoder + - getValueTranslationFunction + - getValueTypeDescriptor + - getinputFormatClass + - getinputFormatKeyClass + - getinputFormatValueClass + - populateDisplayData + - processElement + - read + - setup + - split + - start + - startBundle + - toBuilder + - toString + - validate + - validateTransform + - withConfiguration + - withConfigurationTransform + - withExternalSynchronization + - withKeyTranslation + - withPartitioning + - withSkipKeyClone + - withSkipValueClone + - withValueTranslation + - withoutPartitioning + - write + properties: + - JOB_ID + - NUM_REDUCES + - OUTPUT_DIR + - OUTPUT_FORMAT_CLASS_ATTR + - OUTPUT_KEY_CLASS + - OUTPUT_VALUE_CLASS + - PARTITIONER_CLASS_ATTR +HarnessMonitoringInfosInstructionHandler: + methods: + - harnessMonitoringInfos +HarnessStreamObserverFactories: + methods: + - fromOptions +HealthcareIOError: + methods: + - getDataResource + - getErrorMessage + - getObservedTime + - getStackTrace + - getStatusCode +HealthcareIOErrorCoder: + methods: + - decode + - encode + - of +HealthcareIOErrorToTableRow: + methods: + - apply + properties: + - TABLE_FIELD_SCHEMAS + - TIMESTAMP_FIELD_NAME +HeartbeatRecord: + methods: + - equals + - getRecordTimestamp + - getTimestamp + - hashCode + - toString +HeartbeatRecordAction: + methods: + - run +HistogramData: + methods: + - clear + - equals + - getAccumulatedBucketSize + - getBottomBucketCount + - getBucketIndex + - getBucketSize + - getBucketType + - getCount + - getNumBuckets + - getPercentileString + - getRangeFrom + - getRangeTo + - getStart + - getTopBucketCount + - getTotalCount + - getWidth + - hashCode + - incBottomBucketCount + - incBucketCount + - incTopBucketCount + - linear + - of + - p50 + - p90 + - p99 + - record + - update +HllCount: + methods: + - expand + - forBytes + - forIntegers + - forLongs + - forStrings + - getSketchFromByteBuffer + - globally + - perKey + - processElement + - withPrecision + properties: + - DEFAULT_PRECISION + - MAXIMUM_PRECISION + - MINIMUM_PRECISION +HttpClientConfiguration: + methods: + - build + - builder + - connectionAcquisitionTimeout + - connectionMaxIdleTime + - connectionTimeToLive + - connectionTimeout + - maxConnections + - readTimeout + - socketTimeout + - writeTimeout +HttpHealthcareApiClient: + methods: + - createDicomStore + - createFhirStore + - createHL7v2Message + - createHL7v2Store + - deidentifyFhirStore + - deleteDicomStore + - deleteFhirStore + - deleteHL7v2Message + - deleteHL7v2Store + - executeFhirBundle + - exportFhirResourceToBigQuery + - exportFhirResourceToGcs + - getEarliestHL7v2SendTime + - getEnd + - getHL7v2Message + - getHL7v2Store + - getJsonFactory + - getLatestHL7v2SendTime + - getPatientEverything + - getStart + - hasNext + - importFhirResource + - ingestHL7v2Message + - initialize + - iterator + - listAllFhirStores + - makeHL7v2ListRequest + - makeListRequest + - makeSendTimeBoundHL7v2ListRequest + - next + - ofPatientEverything + - ofSearch + - pollOperation + - readFhirResource + - retrieveDicomStudyMetadata + - searchFhirResource + - uploadToDicomStore +IOITMetrics: + methods: + - publishToInflux +IOUtils: + methods: + - forEach +IdGenerators: + methods: + - decrementingLongs + - incrementingLongs +IdNameReserve: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + properties: + - CODER +IdentityWindowFn: + methods: + - assignWindows + - assignsToOneWindow + - getDefaultWindowMappingFn + - isCompatible + - verifyCompatibility + - windowCoder +IllegalMutationException: + methods: + - getNewValue + - getSavedValue +Impulse: + methods: + - create + - expand +InMemoryMetaStore: + methods: + - buildBeamSqlTable + - createTable + - dropTable + - getTableType + - getTables + - registerProvider +InMemoryMetaTableProvider: + methods: + - createTable + - dropTable + - getTables +InProcessServerFactory: + methods: + - allocateAddressAndCreate + - create +IncompatibleWindowException: + methods: + - getMessage +InferableFunction: + methods: + - apply + - fromProcessFunctionWithOutputType + - getInputTypeDescriptor + - getOutputTypeDescriptor + - populateDisplayData +InfluxDBPublisher: + methods: + - dataPoint + - fields + - measurement + - publish + - publishNexmarkResults + - publishWithSettings + - tags + - timestamp + - timestampUnit + - toString +InfluxDBSettings: + methods: + - builder + - copyWithMeasurement + - get + - withDatabase + - withHost + - withMeasurement + - withRetentionPolicy + properties: + - database + - host + - measurement + - retentionPolicy + - userName + - userPassword +InfluxDbIO: + methods: + - advance + - checkClientTrusted + - checkServerTrusted + - close + - create + - createReader + - expand + - finishBundle + - getAcceptedIssuers + - getConnection + - getCurrent + - getCurrentSource + - getEstimatedSizeBytes + - getOutputCoder + - populateDisplayData + - processElement + - read + - setDisableCertificateValidation + - split + - start + - startBundle + - tearDown + - validate + - withBatchSize + - withConsistencyLevel + - withDataSourceConfiguration + - withDatabase + - withDisableCertificateValidation + - withFromDateTime + - withMetric + - withQuery + - withRetentionPolicy + - withToDateTime + - write +InitialPartition: + methods: + - isInitialPartition + properties: + - PARENT_TOKENS + - PARTITION_TOKEN +InitializeDoFn: + methods: + - processElement +InsertRetryPolicy: + methods: + - alwaysRetry + - getInsertErrors + - neverRetry + - retryTransientErrors + - shouldRetry +InstanceBuilder: + methods: + - build + - fromClass + - fromClassName + - fromFactoryMethod + - ofType + - withArg +InstanceUtils: + methods: + - create + - forName +InstantCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +InstantDeserializer: + methods: + - close + - configure + - deserialize +InstantSerializer: + methods: + - close + - configure + - serialize +IntervalWindow: + methods: + - compareTo + - consistentWithEquals + - contains + - decode + - encode + - end + - equals + - getCoder + - getCoderArguments + - hashCode + - intersects + - isDisjoint + - isRegisterByteSizeObserverCheap + - maxTimestamp + - of + - registerByteSizeObserver + - span + - start + - toString + - verifyDeterministic +InvalidConfigurationException: {} +InvalidLocationException: {} +InvalidSchemaException: {} +InvalidTableException: {} +IsInf: + methods: + - isInf +IsNan: + methods: + - isNan +IterableCoder: + methods: + - getEncodedTypeDescriptor + - of + - structuralValue +IterableLikeCoder: + methods: + - decode + - encode + - getCoderArguments + - getElemCoder + - isRegisterByteSizeObserverCheap + - registerByteSizeObserver + - update + - verifyDeterministic +IterableSideInput: + methods: + - get +JAXBCoder: + methods: + - close + - decode + - encode + - equals + - getEncodedTypeDescriptor + - getJAXBClass + - hashCode + - of + - write +JavaBeanSchema: + methods: + - create + - equals + - fieldValueGetters + - fieldValueTypeInformations + - get + - hashCode + - schemaFor + - schemaTypeCreator + properties: + - INSTANCE +JavaBeanUtils: + methods: + - appender + - createConstructorCreator + - createGetter + - createSetter + - createStaticCreator + - getConstructorCreator + - getFieldTypes + - getGetters + - getSetters + - getStaticCreator + - prepare + - schemaFromJavaBeanClass + - validateJavaBean + properties: + - CACHED_CREATORS +JavaFieldSchema: + methods: + - fieldValueGetters + - fieldValueTypeInformations + - get + - schemaFor + - schemaTypeCreator + properties: + - INSTANCE +JavaUdfLoader: + methods: + - createClassLoader + - loadAggregateFunction + - loadScalarFunction + - run +JdbcConnection: + methods: + - getCurrentSchemaPlus + - getPipelineOptions + - setPipelineOptions + - setPipelineOptionsMap +JdbcDriver: + methods: + - connect + properties: + - CONNECT_STRING_PREFIX + - INSTANCE + - OBJECT_MAPPER +JdbcIO: + methods: + - apply + - create + - expand + - finish + - finishBundle + - of + - populateDisplayData + - process + - processElement + - read + - readAll + - readRows + - readWithPartitions + - setParameters + - setup + - tearDown + - withAutoSharding + - withBatchSize + - withCoder + - withConnectionInitSqls + - withConnectionProperties + - withDataSourceConfiguration + - withDataSourceProviderFn + - withDriverClassLoader + - withFetchSize + - withLowerBound + - withNumPartitions + - withOutputParallelization + - withParameterSetter + - withPartitionColumn + - withPassword + - withPreparedStatementSetter + - withQuery + - withResults + - withRetryConfiguration + - withRetryStrategy + - withRowMapper + - withRowOutput + - withStatement + - withStatementPreparator + - withTable + - withUpperBound + - withUsername + - withWriteResults + - write + - writeVoid +JdbcSchemaIOProvider: + methods: + - buildReader + - buildWriter + - configurationSchema + - expand + - from + - identifier + - isBounded + - requiresDataSchema + - schema +JdbcWriteResult: + methods: + - create +JmsIO: + methods: + - advance + - close + - createReader + - expand + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getOutputCoder + - getTotalBacklogBytes + - getWatermark + - mapMessage + - populateDisplayData + - processElement + - read + - readMessage + - setup + - split + - start + - teardown + - withAutoScaler + - withCloseTimeout + - withCoder + - withConnectionFactory + - withMaxNumRecords + - withMaxReadTime + - withMessageMapper + - withPassword + - withQueue + - withTopic + - withTopicNameMapper + - withUsername + - withValueMapper + - write +JmsIOException: {} +JmsRecord: + methods: + - equals + - getJmsCorrelationID + - getJmsDeliveryMode + - getJmsDestination + - getJmsExpiration + - getJmsMessageID + - getJmsPriority + - getJmsRedelivered + - getJmsReplyTo + - getJmsTimestamp + - getJmsType + - getPayload + - getProperties + - hashCode +Join: + methods: + - accumulationMode + - by + - expand + - fullOuterJoin + - getJoiner + - getLeftKeyExtractor + - getRightKeyExtractor + - getType + - innerBroadcastJoin + - innerJoin + - left + - leftOuterBroadcastJoin + - leftOuterJoin + - named + - of + - on + - output + - outputValues + - processElement + - right + - rightOuterJoin + - triggeredBy + - using + - windowBy + - with + - withAllowedLateness + - withOnTimeBehavior + - withTimestampCombiner + properties: + - LHS_TAG + - RHS_TAG +JoinRelOptRuleCall: + methods: + - builder + - getChildRels + - getMetadataQuery + - getOperand0 + - getParents + - getPlanner + - getRelList + - getRule + - rel + - transformTo +JoinTranslator: + methods: + - getFnName + - processElement +JsonArrayCoder: + methods: + - decode + - encode + - of +JsonMatcher: + methods: + - describeTo + - jsonBytesLike + - jsonStringLike +JsonPayloadSerializerProvider: + methods: + - getSerializer + - identifier +JsonToRow: + methods: + - build + - create + - expand + - finishSpecifyingOutput + - getFailedToParseLines + - getJsonToRowWithErrFn + - getPipeline + - getResults + - processElement + - resultBuilder + - setErrorField + - setJsonToRowWithErrFn + - setLineField + - toBuilder + - withExceptionReporting + - withExtendedErrorInfo + - withNullBehavior + - withSchema + - withSchemaAndNullBehavior + properties: + - ERROR_ROW_SCHEMA + - ERROR_ROW_WITH_ERR_MSG_SCHEMA +JsonUtils: + methods: + - apply + - getJsonBytesToRowFunction + - getJsonStringToRowFunction + - getRowToJsonBytesFunction + - getRowToJsonStringsFunction +JvmInitializers: + methods: + - runBeforeProcessing + - runOnStartup +KV: + methods: + - compare + - equals + - getKey + - getValue + - hashCode + - of + - toString +KafkaCheckpointMark: + methods: + - finalizeCheckpoint + - getNextOffset + - getPartition + - getPartitions + - getTopic + - getWatermarkMillis + - toString +KafkaCommitOffset: + methods: + - expand + - processElement +KafkaIO: + methods: + - apply + - buildExternal + - commitOffsets + - commitOffsetsInFinalize + - decode + - encode + - expand + - externalWithMetadata + - getReplacementTransform + - getTimestamp + - knownBuilders + - mapOutputs + - populateDisplayData + - processElement + - read + - readBytes + - readSourceDescriptors + - setCommitOffsetInFinalize + - setConsumerConfig + - setKeyDeserializer + - setKeySerializer + - setMaxNumRecords + - setMaxReadTime + - setProducerConfig + - setStartReadTime + - setStopReadTime + - setTimestampPolicy + - setTopic + - setTopics + - setValueDeserializer + - setValueSerializer + - toExternalKafkaRecord + - updateConsumerProperties + - updateProducerProperties + - useCreateTime + - useLogAppendTime + - useProcessingTime + - validate + - values + - withBootstrapServers + - withCheckStopReadingFn + - withConsumerConfigOverrides + - withConsumerConfigUpdates + - withConsumerFactoryFn + - withCreatWatermarkEstimatorFn + - withCreateTime + - withDynamicRead + - withEOS + - withExtractOutputTimestampFn + - withInputTimestamp + - withKeyDeserializer + - withKeyDeserializerAndCoder + - withKeyDeserializerProvider + - withKeySerializer + - withLogAppendTime + - withManualWatermarkEstimator + - withMaxNumRecords + - withMaxReadTime + - withMonotonicallyIncreasingWatermarkEstimator + - withOffsetConsumerConfigOverrides + - withProcessingTime + - withProducerConfigUpdates + - withProducerFactoryFn + - withPublishTimestampFunction + - withReadCommitted + - withStartReadTime + - withStopReadTime + - withTimestampFn + - withTimestampFn2 + - withTimestampPolicyFactory + - withTopic + - withTopicPartitions + - withTopics + - withValueDeserializer + - withValueDeserializerAndCoder + - withValueDeserializerProvider + - withValueSerializer + - withWallTimeWatermarkEstimator + - withWatermarkFn + - withWatermarkFn2 + - withoutMetadata + - write + - writeRecords + properties: + - KAFKA_READ_OVERRIDE + - URN + - URN_WITHOUT_METADATA + - URN_WITH_METADATA +KafkaRecord: + methods: + - equals + - getHeaders + - getKV + - getOffset + - getPartition + - getTimestamp + - getTimestampType + - getTopic + - hashCode +KafkaRecordCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - isRegisterByteSizeObserverCheap + - of + - structuralValue + - verifyDeterministic +KafkaSchemaTransformReadConfiguration: + methods: + - build + - builder + - getAutoOffsetResetConfig + - getAvroSchema + - getBootstrapServers + - getConfluentSchemaRegistrySubject + - getConfluentSchemaRegistryUrl + - getConsumerConfigUpdates + - getDataFormat + - getTopic + - setAutoOffsetResetConfig + - setAvroSchema + - setBootstrapServers + - setConfluentSchemaRegistrySubject + - setConfluentSchemaRegistryUrl + - setConsumerConfigUpdates + - setDataFormat + - setTopic + - validate + properties: + - VALID_DATA_FORMATS + - VALID_START_OFFSET_VALUES +KafkaSchemaTransformReadProvider: + methods: + - buildTransform + - expand + - identifier + - inputCollectionNames + - outputCollectionNames +KafkaSourceConsumerFn: + methods: + - checkDone + - configs + - currentRestriction + - exists + - getHashCode + - getInitialRestriction + - getRestrictionCoder + - isBounded + - newTracker + - offset + - offsetStorageReader + - offsets + - process + - start + - storageExists + - tryClaim + - trySplit + properties: + - BEAM_INSTANCE_PROPERTY + - fetchedRecords + - history + - maxRecords + - minutesToRun + - offset +KafkaSourceDescriptor: + methods: + - getTopicPartition + - of +KafkaTableProvider: + methods: + - buildBeamSqlTable + - getTableType +KeyPairUtils: + methods: + - preparePrivateKey + - readPrivateKeyFile +KeyedPCollectionTuple: + methods: + - and + - apply + - empty + - expand + - getCoGbkResultSchema + - getCollection + - getKeyCoder + - getKeyedCollections + - getPipeline + - getTupleTag + - isEmpty + - of +Keys: + methods: + - apply + - create + - expand +KinesisClientThrottledException: {} +KinesisIO: + methods: + - addPutRecordsRequest + - build + - builder + - close + - expand + - finishBundle + - finishSpecifyingOutput + - getPipeline + - maxBufferedTime + - maxBytes + - processElement + - read + - readData + - refreshPeriodically + - setup + - shardAwareHashKey + - shardRefreshInterval + - startBundle + - teardown + - withAWSClientsProvider + - withArrivalTimeWatermarkPolicy + - withBatchMaxBytes + - withBatchMaxRecords + - withClientConfiguration + - withConcurrentRequests + - withCustomRateLimitPolicy + - withCustomWatermarkPolicy + - withDynamicDelayRateLimitPolicy + - withFixedDelayRateLimitPolicy + - withInitialPositionInStream + - withInitialTimestampInStream + - withMaxCapacityPerShard + - withMaxNumRecords + - withMaxReadTime + - withPartitionKey + - withPartitioner + - withProcessingTimeWatermarkPolicy + - withProducerProperties + - withRecordAggregation + - withRecordAggregationDisabled + - withRequestRecordsLimit + - withSerializer + - withStreamName + - withUpToDateThreshold + - write +KinesisRecord: + methods: + - equals + - getApproximateArrivalTimestamp + - getData + - getDataAsBytes + - getExtendedSequenceNumber + - getPartitionKey + - getReadTime + - getSequenceNumber + - getShardId + - getStreamName + - getSubSequenceNumber + - getUniqueId + - hashCode + - toString +KinesisTransformRegistrar: + methods: + - buildExternal + - knownBuilderInstances + - setAwsAccessKey + - setAwsSecretKey + - setInitialPositionInStream + - setInitialTimestampInStream + - setMaxCapacityPerShard + - setMaxNumRecords + - setMaxReadTime + - setPartitionKey + - setProducerProperties + - setRateLimit + - setRegion + - setRequestRecordsLimit + - setServiceEndpoint + - setStreamName + - setUpToDateThreshold + - setVerifyCertificate + - setWatermarkIdleDurationThreshold + - setWatermarkPolicy + properties: + - READ_DATA_URN + - WRITE_URN +KryoCoder: + methods: + - decode + - encode + - equals + - hashCode + - of + - verifyDeterministic + - withRegistrar +KryoCoderProvider: + methods: + - coderFor + - of + - registerTo + - withRegistrar +KuduIO: + methods: + - createReader + - expand + - finishBundle + - getEstimatedSizeBytes + - getOutputCoder + - populateDisplayData + - processElement + - read + - setup + - split + - startBundle + - teardown + - validate + - withBatchSize + - withCoder + - withFaultTolerent + - withFormatFn + - withMasterAddresses + - withParseFn + - withPredicates + - withProjectedColumns + - withTable + - write +KvCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - getEncodedTypeDescriptor + - getKeyCoder + - getValueCoder + - isRegisterByteSizeObserverCheap + - of + - registerByteSizeObserver + - structuralValue + - verifyDeterministic +KvSwap: + methods: + - apply + - create + - expand +LatencyRecordingHttpRequestInitializer: + methods: + - initialize + - intercept + - interceptResponse + properties: + - HISTOGRAM_BUCKET_TYPE +Latest: + methods: + - addInput + - combineFn + - createAccumulator + - expand + - extractOutput + - getAccumulatorCoder + - getDefaultOutputCoder + - globally + - mergeAccumulators + - perKey +LazyAggregateCombineFn: + methods: + - addInput + - createAccumulator + - extractOutput + - getAccumTVariable + - getAccumulatorCoder + - getUdafImpl + - iterator + - mergeAccumulators + - toString +LeftJoin: + methods: + - by + - named + - of + - using +LengthPrefixCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - getValueCoder + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +ListCoder: + methods: + - consistentWithEquals + - getEncodedTypeDescriptor + - of + - structuralValue + - verifyDeterministic +LoadTestResult: + methods: + - getRuntime + - getTotalBytesCount + - toMap +LocalFileSystemRegistrar: + methods: + - fromOptions +LocalResources: + methods: + - fromFile + - fromPath + - fromString +LogicalCalcMergeRule: + methods: + - onMatch + properties: + - INSTANCE +LogicalEndpoint: + methods: + - data + - getInstructionId + - getTimerFamilyId + - getTransformId + - isTimer + - timer +LongGenerator: + methods: + - nextLong +LzoCompression: + methods: + - createLzoInputStream + - createLzoOutputStream + - createLzopInputStream + - createLzopOutputStream +Main: + methods: + - call + - main +ManagedChannelFactory: + methods: + - createDefault + - createEpoll + - createInProcess + - forDescriptor + - withDirectExecutor + - withInterceptors +ManagedFactoryImpl: + methods: + - close + - create +MapCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - getEncodedTypeDescriptor + - getKeyCoder + - getValueCoder + - of + - registerByteSizeObserver + - structuralValue + - verifyDeterministic +MapElements: + methods: + - exceptionsInto + - exceptionsVia + - expand + - getInputTypeDescriptor + - getMapper + - getOutputTypeDescriptor + - getTypeDescriptor + - into + - named + - of + - output + - populateDisplayData + - processElement + - using + - via +MapFnRunners: + methods: + - create + - createRunnerForPTransform + - forValueMapFnFactory + - forWindowedValueMapFnFactory +MapKeys: + methods: + - exceptionsInto + - exceptionsVia + - expand + - into + - via +MapValues: + methods: + - exceptionsInto + - exceptionsVia + - expand + - into + - via +MapperFactory: + methods: + - changeStreamRecordMapper + - partitionMetadataMapper +MappingUtils: + methods: + - registerStreamingPlugin +MatchResult: + methods: + - build + - builder + - checksum + - create + - isReadSeekEfficient + - lastModifiedMillis + - metadata + - resourceId + - setChecksum + - setIsReadSeekEfficient + - setLastModifiedMillis + - setResourceId + - setSizeBytes + - sizeBytes + - status + - unknown +Materializations: + methods: + - getUrn + - iterable + - multimap + properties: + - ITERABLE_MATERIALIZATION_URN + - MULTIMAP_MATERIALIZATION_URN +Max: + methods: + - apply + - doublesGlobally + - doublesPerKey + - globally + - identity + - integersGlobally + - integersPerKey + - longsGlobally + - longsPerKey + - naturalOrder + - of + - ofDoubles + - ofIntegers + - ofLongs + - perKey + - populateDisplayData +Mean: + methods: + - addInput + - createAccumulator + - decode + - encode + - equals + - extractOutput + - getAccumulatorCoder + - globally + - hashCode + - mergeAccumulator + - of + - perKey + - toString + - verifyDeterministic +MemoryMonitor: + methods: + - describeMemory + - dumpHeap + - fromOptions + - isThrashing + - run + - stop + - totalGCTimeMilliseconds + - tryToDumpHeap + - waitForResources + - waitForThrashingState + properties: + - DEFAULT_SLEEP_TIME_MILLIS +MergeOverlappingIntervalWindows: + methods: + - add + - apply + - intersects + - mergeWindows + - toString +MetadataCoder: + methods: + - consistentWithEquals + - decode + - encode + - of +MetadataCoderV2: + methods: + - consistentWithEquals + - decode + - encode + - of +MetricFiltering: + methods: + - matches + - matchesScope + - subPathMatches +MetricKey: + methods: + - create + - metricName + - stepName + - toString +MetricName: + methods: + - getName + - getNamespace + - named + - toString +MetricNameFilter: + methods: + - getName + - getNamespace + - inNamespace + - named +MetricQueryResults: + methods: + - create + - getCounters + - getDistributions + - getGauges + - toString +MetricResult: + methods: + - addAttempted + - addCommitted + - attempted + - create + - getAttempted + - getCommitted + - getCommittedOrNull + - getKey + - getName + - hasCommitted + - transform +MetricResults: + methods: + - allMetrics + - queryMetrics + - toString +Metrics: + methods: + - bundleProcessingThreadCounter + - bundleProcessingThreadDistribution + - counter + - dec + - distribution + - gauge + - getName + - inc + - reset + - set + - update + - updateFinalMonitoringData + - updateIntermediateMonitoringData +MetricsBenchmark: + methods: + - check + - testBundleProcessingThreadCounterMutation + - testBundleProcessingThreadCounterReset + - testCounterCellMutation + - testCounterCellReset + properties: + - bundleCounter + - counterCell +MetricsEnvironment: + methods: + - activate + - close + - getCurrentContainer + - getMetricsEnvironmentStateForCurrentThread + - getProcessWideContainer + - isMetricsSupported + - scopedMetricsContainer + - setCurrentContainer + - setMetricsSupported + - setProcessWideContainer +MetricsFilter: + methods: + - addNameFilter + - addStep + - build + - builder + - names + - steps +MetricsReader: + methods: + - getCounterMetric + - getEndTimeMetric + - getStartTimeMetric + - ofResults + - readAll + - withNamespace +MicrosInstant: + methods: + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - IDENTIFIER +MimeTypes: + properties: + - BINARY + - TEXT +Min: + methods: + - apply + - doublesGlobally + - doublesPerKey + - globally + - identity + - integersGlobally + - integersPerKey + - longsGlobally + - longsPerKey + - naturalOrder + - of + - ofDoubles + - ofIntegers + - ofLongs + - perKey + - populateDisplayData +Mod: + methods: + - equals + - getKeysJson + - getNewValuesJson + - getOldValuesJson + - hashCode + - toString +MongoDbGridFSIO: + methods: + - advance + - close + - createReader + - expand + - finishBundle + - getAllowedTimestampSkew + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getEstimatedSizeBytes + - getOutputCoder + - output + - populateDisplayData + - processElement + - read + - setup + - split + - start + - startBundle + - teardown + - validate + - withBucket + - withChunkSize + - withCoder + - withDatabase + - withFilename + - withFilter + - withParser + - withSkew + - withUri + - write +MongoDbIO: + methods: + - advance + - close + - closeMongoClient + - createMongoClient + - createReader + - expand + - finishBundle + - getCurrent + - getCurrentSource + - getDocumentCount + - getEstimatedSizeBytes + - getOutputCoder + - populateDisplayData + - processElement + - read + - split + - start + - startBundle + - withBatchSize + - withBucketAuto + - withCollection + - withDatabase + - withIgnoreSSLCertificate + - withMaxConnectionIdleTime + - withNumSplits + - withOrdered + - withQueryFn + - withSSLEnabled + - withSSLInvalidHostNameAllowed + - withUpdateConfiguration + - withUri + - write +MongoDbTable: + methods: + - apply + - buildIOReader + - buildIOWriter + - constructFilter + - convert + - create + - expand + - getNotSupported + - getSupported + - getTableStatistics + - isBounded + - numSupported + - processElement + - supportsProjects + - toString + - withSchema +MongoDbTableProvider: + methods: + - buildBeamSqlTable + - getTableType +Monitor: + methods: + - getTransform + - processElement + properties: + - name + - prefix +MoreFutures: + methods: + - allAsList + - allAsListWithExceptions + - exception + - get + - getException + - getResult + - isCancelled + - isDone + - isException + - result + - runAsync + - supplyAsync +MovingFunction: + methods: + - add + - get + - isSignificant +MqttIO: + methods: + - add + - advance + - close + - closeMqttClient + - create + - createMqttClient + - createReader + - equals + - expand + - finalizeCheckpoint + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getOutputCoder + - getWatermark + - hashCode + - populateDisplayData + - processElement + - read + - split + - start + - withClientId + - withConnectionConfiguration + - withMaxNumRecords + - withMaxReadTime + - withPassword + - withRetained + - withServerUri + - withTopic + - withUsername + - write +MultimapSideInput: + methods: + - get +MultimapUserState: + methods: + - asyncClose + - clear + - createIterator + - get + - hasNext + - isReady + - keys + - next + - prefetch + - put + - remove +MutationDetectors: + methods: + - close + - forValueWithCoder + - noopMutationDetector + - verifyUnmodified +MutationGroup: + methods: + - attached + - create + - equals + - hashCode + - iterator + - primary + - size + - toString +NFA: + methods: + - addPrevEvent + - assignIndex + - atFinal + - canDecrement + - canTrim + - compile + - copy + - decrement + - equals + - getCurState + - getCurrentEvent + - getNewProceedPointer + - getNewTakePointer + - getNextState + - getPatternVar + - getPointer + - getPrevPointer + - getProceedCondition + - getQuantifier + - getRow + - getTakeCondition + - hasProceed + - hasTake + - hashCode + - isKleenePlus + - isKleenePlusSecondary + - isNull + - isProceedPointer + - proceed + - proceedIgnore + - processNewRow + - reset + - setNextState + - take + - toCEPLiteral + - toString + - trim + properties: + - isFinal + - isStart +NameCityStateId: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + properties: + - CODER + - city + - id + - name + - state +NameGenerator: + methods: + - generatePartitionMetadataTableName +NameUtils: + methods: + - approximatePTransformName + - approximateSimpleName +NamedTestResult: + methods: + - create + - fields + - getMetric + - getSchema + - getValue + - tags + - toInfluxDBDataPoint + - toMap +NanosDuration: + methods: + - toBaseType + - toInputType + properties: + - IDENTIFIER +NanosInstant: + methods: + - toBaseType + - toInputType + properties: + - IDENTIFIER +Neo4jIO: + methods: + - apply + - create + - expand + - finishBundle + - of + - populateDisplayData + - processElement + - readAll + - setup + - startBundle + - withBatchSize + - withCoder + - withConfig + - withCypher + - withCypherLogging + - withDefaultConfig + - withDriverConfiguration + - withParametersFunction + - withPassword + - withReadTransaction + - withRowMapper + - withSessionConfig + - withTransactionConfig + - withUnwindMapName + - withUrl + - withUrls + - withUsername + - withWriteTransaction + - writeUnwind + properties: + - closed + - driver + - session +Never: + methods: + - ever + - getWatermarkThatGuaranteesFiring +NexmarkConfiguration: + methods: + - copy + - equals + - fromString + - hashCode + - overrideFromOptions + - toShortString + - toString + properties: + - DEFAULT + - auctionSkip + - avgAuctionByteSize + - avgBidByteSize + - avgPersonByteSize + - coderStrategy + - cpuDelayMs + - debug + - diskBusyBytes + - exportSummaryToBigQuery + - fanout + - firstEventRate + - generateEventFilePathPrefix + - hotAuctionRatio + - hotBiddersRatio + - hotSellersRatio + - isRateLimited + - maxAuctionsWaitingTime + - maxLogEvents + - nextEventRate + - numActivePeople + - numEventGenerators + - numEvents + - numInFlightAuctions + - numKeyBuckets + - occasionalDelaySec + - outOfOrderGroupSize + - pardoCPUFactor + - preloadSeconds + - probDelayedEvent + - pubSubMode + - pubsubMessageSerializationMethod + - query + - ratePeriodSec + - rateShape + - rateUnit + - sessionGap + - sideInputNumShards + - sideInputRowCount + - sideInputType + - sideInputUrl + - sinkType + - sourceType + - streamTimeout + - usePubsubPublishTime + - useWallclockEventTime + - watermarkHoldbackSec + - windowPeriodSec + - windowSizeSec +NexmarkLauncher: + methods: + - processElement + - run +NexmarkPerf: + methods: + - anyActivity + - fromString + - toMap + - toString + properties: + - errors + - eventBytesPerSec + - eventsPerSec + - jobId + - numEvents + - numResults + - processingDelaySec + - resultBytesPerSec + - resultsPerSec + - runtimeSec + - shutdownDelaySec + - snapshots + - startupDelaySec +NexmarkQuery: + methods: + - expand + - getTransform + properties: + - eventMonitor + - resultMonitor +NexmarkQueryModel: + methods: + - apply + - assertionFor + - simulator + properties: + - configuration +NexmarkQueryTransform: + methods: + - getSideInput + - needsSideInput + - setSideInput +NexmarkQueryUtil: + methods: + - expand + - processElement + properties: + - AS_AUCTION + - AS_BID + - AS_PERSON + - AUCTION_BY_ID + - AUCTION_BY_SELLER + - AUCTION_TAG + - BID_BY_AUCTION + - BID_TAG + - BID_TO_AUCTION + - BID_TO_PRICE + - EVENT_TIMESTAMP_FROM_DATA + - IS_BID + - IS_NEW_AUCTION + - IS_NEW_PERSON + - JUST_BIDS + - JUST_NEW_AUCTIONS + - JUST_NEW_PERSONS + - PERSON_BY_ID + - PERSON_TAG +NexmarkUtils: + methods: + - apply + - batchEventsSource + - castToKnownSize + - cleanUpSideInput + - console + - cpuDelay + - decode + - devNull + - diskBusy + - encode + - expand + - format + - hash + - info + - interEventDelayUs + - log + - prepareSideInput + - processElement + - processingMode + - rateToPeriodUs + - setupPipeline + - snoop + - stamp + - standardEventIterator + - stepLengthSec + - streamEventsSource + properties: + - BEGINNING_OF_TIME + - END_OF_TIME + - MAPPER + - PUBSUB_ID + - PUBSUB_TIMESTAMP +NoSuchSchemaException: {} +NodeStats: + methods: + - create + - getRate + - getRowCount + - getWindow + - isUnknown + - minus + - multiply + - plus + properties: + - UNKNOWN +NonMergingWindowFn: + methods: + - isNonMerging + - mergeWindows +NoopCredentialFactory: + methods: + - fromOptions + - getAuthenticationType + - getCredential + - getRequestMetadata + - hasRequestMetadata + - hasRequestMetadataOnly + - refresh +NoopLock: + methods: + - get + - lock + - lockInterruptibly + - newCondition + - tryLock + - unlock +NoopPathValidator: + methods: + - fromOptions + - validateInputFilePatternSupported + - validateOutputFilePrefixSupported + - validateOutputResourceSupported + - verifyPath +NullCredentialInitializer: + methods: + - handleResponse + - initialize + - throwNullCredentialException +NullThroughputEstimator: + methods: + - getFrom + - update +NullableCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - getEncodedTypeDescriptor + - getValueCoder + - isRegisterByteSizeObserverCheap + - of + - registerByteSizeObserver + - structuralValue + - verifyDeterministic +NumberedShardedFile: + methods: + - getFilePattern + - readFilesWithRetries + - toString +ObjectPool: + methods: + - equals + - hashCode + - pooledClientFactory + - release + - releaseByKey + - retain +OffsetBasedSource: + methods: + - advance + - allowsDynamicSplitting + - createSourceForSubrange + - getBytesPerOffset + - getCurrentSource + - getEndOffset + - getEstimatedSizeBytes + - getFractionConsumed + - getMaxEndOffset + - getMinBundleSize + - getSplitPointsConsumed + - getSplitPointsRemaining + - getStartOffset + - isDone + - isStarted + - populateDisplayData + - split + - splitAtFraction + - start + - toString + - validate +OffsetByteRangeCoder: + methods: + - decode + - encode + - getCoderProvider +OffsetRange: + methods: + - consistentWithEquals + - decode + - encode + - equals + - getEncodedTypeDescriptor + - getFrom + - getTo + - hashCode + - isRegisterByteSizeObserverCheap + - newTracker + - of + - split + - toString +OffsetRangeTracker: + methods: + - checkDone + - currentRestriction + - getFractionConsumed + - getPositionForFractionConsumed + - getProgress + - getSplitPointsProcessed + - getStartPosition + - getStopPosition + - isBounded + - isDone + - isStarted + - markDone + - toString + - tryClaim + - tryReturnRecordAt + - trySplit + - trySplitAtPosition + properties: + - OFFSET_INFINITY +OneOfType: + methods: + - create + - createValue + - equals + - getArgument + - getArgumentType + - getBaseType + - getCaseEnumType + - getCaseType + - getFieldType + - getIdentifier + - getOneOfSchema + - getValue + - hashCode + - toBaseType + - toInputType + - toString + properties: + - IDENTIFIER +OpenModuleAgent: + methods: + - premain +Operator: + methods: + - getName + - getOutputType + - toString +OperatorTransform: + methods: + - apply + - expand + - getOperator +OrFinallyTrigger: + methods: + - getMainTrigger + - getUntilTrigger + - getWatermarkThatGuaranteesFiring + - mayFinish + - toString +Order: + methods: + - equals + - getCustomerId + - getId + - hashCode + - setCustomerId + - setId + - toString +OrderKey: + methods: + - getDir + - getIndex + - getNullFirst + - of +OutboundObserverFactory: + methods: + - clientBuffered + - clientDirect + - outboundObserverFor + - serverDirect + - trivial +PAssert: + methods: + - apply + - assertFor + - containsInAnyOrder + - countAsserts + - empty + - enterCompositeTransform + - equals + - expand + - from + - hashCode + - inCombinedNonLatePanes + - inEarlyGlobalWindowPanes + - inEarlyPane + - inFinalPane + - inLatePane + - inOnTimePane + - inOnlyPane + - inWindow + - isEqualTo + - leaveCompositeTransform + - notEqualTo + - of + - prepareActuals + - processElement + - satisfies + - that + - thatFlattened + - thatList + - thatMap + - thatMultimap + - thatSingleton + - thatSingletonIterable + - visitPrimitiveTransform + - windowActuals + - windowDummy + - wrap + properties: + - FAILURE_COUNTER + - SUCCESS_COUNTER +PBegin: + methods: + - apply + - expand + - getPipeline + - in +PCollection: + methods: + - and + - apply + - createPrimitiveOutputInternal + - expand + - finishSpecifying + - finishSpecifyingOutput + - getCoder + - getFromRowFunction + - getName + - getSchema + - getToRowFunction + - getTypeDescriptor + - getWindowingStrategy + - hasSchema + - isBounded + - setCoder + - setIsBoundedInternal + - setName + - setRowSchema + - setSchema + - setTypeDescriptor + - setWindowingStrategyInternal +PCollectionConsumerRegistry: + methods: + - accept + - finishLazyUpdate + - forConsumer + - getConsumer + - getExecutionState + - getMetricsContainer + - getMultiplexingConsumer + - getPTransformId + - getProgress + - register + - trySplit + - tryUpdate +PCollectionList: + methods: + - and + - apply + - empty + - equals + - expand + - finishSpecifyingOutput + - get + - getAll + - getPipeline + - hashCode + - of + - size +PCollectionLists: + methods: + - getOnlyElement +PCollectionRowTuple: + methods: + - and + - apply + - empty + - equals + - expand + - finishSpecifyingOutput + - get + - getAll + - getPipeline + - has + - hashCode + - of +PCollectionTuple: + methods: + - and + - apply + - empty + - equals + - expand + - finishSpecifyingOutput + - get + - getAll + - getPipeline + - has + - hashCode + - of + - ofPrimitiveOutputsInternal +PCollectionViews: + methods: + - apply + - compare + - contains + - containsKey + - create + - createMetadata + - decode + - encode + - entrySet + - equals + - expand + - get + - getCoderArguments + - getCoderInternal + - getDefaultValue + - getMaterialization + - getMetadata + - getPCollection + - getTagInternal + - getTypeDescriptor + - getViewFn + - getWindowMappingFn + - getWindowingStrategyInternal + - hasDefault + - hashCode + - isMetadata + - iterableView + - iterableViewUsingVoidKey + - iterator + - listIterator + - listView + - listViewUsingVoidKey + - mapView + - mapViewUsingVoidKey + - multimapView + - multimapViewUsingVoidKey + - singletonView + - singletonViewUsingVoidKey + - size + - toAdditionalInputs + - toString + - verifyDeterministic +PDone: + methods: + - expand + - finishSpecifyingOutput + - getPipeline + - in +POJOUtils: + methods: + - appender + - createConstructorCreator + - createStaticCreator + - getConstructorCreator + - getFieldTypes + - getGetters + - getSetFieldCreator + - getSetters + - getStaticCreator + - prepare + - schemaFromPojoClass + properties: + - CACHED_CREATORS +PTransform: + methods: + - compose + - expand + - getAdditionalInputs + - getDefaultOutputCoder + - getName + - getResourceHints + - populateDisplayData + - setResourceHints + - toString + - validate +PTransformFunctionRegistry: + methods: + - getFunctions + - register +PTransformOverride: + methods: + - getMatcher + - getOverrideFactory + - of +PValueBase: + methods: + - finishSpecifying + - finishSpecifyingOutput + - getName + - getPipeline + - setName + - toString +PValues: + methods: + - expandInput + - expandOutput + - expandValue + - fullyExpand +PaneInfo: + methods: + - createPane + - decode + - decodePane + - encode + - equals + - fromTag + - getIndex + - getNonSpeculativeIndex + - getTiming + - hashCode + - isFirst + - isLast + - isUnknown + - of + - toString + - verifyDeterministic + properties: + - INSTANCE + - NO_FIRING + - ON_TIME_AND_ONLY_FIRING + - tag +ParDo: + methods: + - dispatchBag + - dispatchCombining + - dispatchMap + - dispatchOrderedList + - dispatchSet + - dispatchValue + - expand + - getAdditionalInputs + - getAdditionalOutputTags + - getDoFnSchemaInformation + - getFn + - getMainOutputTag + - getSideInputs + - of + - populateDisplayData + - toString + - withOutputTags + - withSideInput + - withSideInputs +ParDoLoadTest: + methods: + - main + - processElement +ParquetIO: + methods: + - apply + - close + - create + - createOrOverwrite + - defaultBlockSize + - expand + - flush + - from + - getConfWithModelClass + - getInitialRestriction + - getLength + - getPos + - getProgress + - getRestrictionCoder + - getSize + - makeProgress + - newStream + - newTracker + - open + - parseFilesGenericRecords + - parseGenericRecords + - populateDisplayData + - processElement + - read + - readFiles + - seek + - sink + - split + - splitBlockWithLimit + - supportsBlockSize + - withAvroDataModel + - withBeamSchemas + - withCoder + - withCompressionCodec + - withConfiguration + - withProjection + - withRowGroupSize + - write +ParquetTableProvider: + methods: + - buildBeamSqlTable + - getTableType +ParseException: {} +ParseJsons: + methods: + - apply + - exceptionsInto + - exceptionsVia + - expand + - of + - withMapper +ParseResult: + methods: + - equals + - failure + - getContent + - getError + - getErrorAsString + - getFileLocation + - getMetadata + - hashCode + - isSuccess + - success + - toString +Partition: + methods: + - expand + - getOutputTags + - getSideInputs + - of + - populateDisplayData + - processElement +PartitionMetadata: + methods: + - build + - equals + - getCreatedAt + - getEndTimestamp + - getFinishedAt + - getHeartbeatMillis + - getParentTokens + - getPartitionToken + - getRunningAt + - getScheduledAt + - getStartTimestamp + - getState + - getWatermark + - hashCode + - newBuilder + - setCreatedAt + - setEndTimestamp + - setFinishedAt + - setHeartbeatMillis + - setParentTokens + - setPartitionToken + - setRunningAt + - setScheduledAt + - setStartTimestamp + - setState + - setWatermark + - toBuilder + - toString +PartitionMetadataAdminDao: + methods: + - createPartitionMetadataTable + - deletePartitionMetadataTable + properties: + - COLUMN_CREATED_AT + - COLUMN_END_TIMESTAMP + - COLUMN_FINISHED_AT + - COLUMN_HEARTBEAT_MILLIS + - COLUMN_PARENT_TOKENS + - COLUMN_PARTITION_TOKEN + - COLUMN_RUNNING_AT + - COLUMN_SCHEDULED_AT + - COLUMN_START_TIMESTAMP + - COLUMN_STATE + - COLUMN_WATERMARK +PartitionMetadataDao: + methods: + - countPartitionsCreatedAfter + - getAllPartitionsCreatedAfter + - getCommitTimestamp + - getPartition + - getResult + - getUnfinishedMinWatermark + - insert + - runInTransaction + - tableExists + - toString + - updateToFinished + - updateToRunning + - updateToScheduled + - updateWatermark +PartitionMetadataMapper: + methods: + - from +PartitionPosition: + methods: + - done + - equals + - getMode + - getTimestamp + - hashCode + - queryChangeStream + - stop + - toString + - updateState + - waitForChildPartitions +PartitionRestriction: + methods: + - done + - equals + - getEndTimestamp + - getMetadata + - getMode + - getStartTimestamp + - getStoppedMode + - hashCode + - queryChangeStream + - stop + - toString + - updateState + - waitForChildPartitions + - withMetadata +PartitionRestrictionClaimer: + methods: + - tryClaim +PartitionRestrictionMetadata: + methods: + - build + - getPartitionEndTimestamp + - getPartitionStartTimestamp + - getPartitionToken + - newBuilder + - toString + - withPartitionEndTimestamp + - withPartitionStartTimestamp + - withPartitionToken +PartitionRestrictionProgressChecker: + methods: + - getProgress + - setTimeSupplier +PartitionRestrictionSplitter: + methods: + - trySplit +PartitionRestrictionTracker: + methods: + - checkDone + - currentRestriction + - getProgress + - isBounded + - setTimeSupplier + - tryClaim + - trySplit +PartitioningWindowFn: + methods: + - assignWindow + - assignWindows + - assignsToOneWindow + - getDefaultWindowMappingFn + - getSideInputWindow +PassThroughLogicalType: + methods: + - getArgument + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType +PatternCondition: + methods: + - eval +PayloadSerializerKafkaTable: + methods: + - expand +PayloadSerializers: + methods: + - getSerializer +PeriodicImpulse: + methods: + - applyWindowing + - create + - expand + - startAt + - stopAt + - withInterval +PeriodicSequence: + methods: + - checkDone + - create + - currentRestriction + - equals + - expand + - getInitialRange + - getInitialWatermarkState + - getProgress + - hashCode + - isBounded + - newTracker + - newWatermarkEstimator + - processElement + - toString + - tryClaim + - trySplit + properties: + - durationMilliSec + - first + - last +Person: + methods: + - decode + - encode + - equals + - hasAnnotation + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + - withAnnotation + - withoutAnnotation + properties: + - CODER + - city + - creditCard + - dateTime + - emailAddress + - extra + - id + - name + - state +PersonGenerator: + methods: + - lastBase0PersonId + - nextBase0PersonId + - nextPerson +Pipeline: + methods: + - apply + - applyTransform + - begin + - create + - enterCompositeTransform + - enterPipeline + - forTransformHierarchy + - getCoderRegistry + - getOptions + - getSchemaRegistry + - leaveCompositeTransform + - leavePipeline + - replaceAll + - run + - setCoderRegistry + - toString + - traverseTopologically + - visitPrimitiveTransform + - visitValue +PipelineOptionsFactory: + methods: + - apply + - as + - compare + - create + - describe + - fromArgs + - getRegisteredOptions + - printHelp + - register + - resetCache + - withValidation + - withoutStrictParsing +PipelineOptionsValidator: + methods: + - validate + - validateCli +PipelineRunner: + methods: + - create + - fromOptions + - run +Plugin: + methods: + - build + - builder + - createBatch + - createStreaming + - getContext + - getFormatClass + - getFormatProviderClass + - getHadoopConfiguration + - getPluginClass + - getPluginConfig + - getPluginType + - initContext + - initPluginType + - isUnbounded + - prepareRun + - setContext + - setFormatClass + - setFormatProviderClass + - setPluginClass + - setPluginType + - withConfig + - withHadoopConfiguration +PluginConfigInstantiationUtils: {} +PluginConstants: + methods: + - getFormatClass + - getFormatName + - getFormatProviderClass + - getFormatProviderName + - getKeyClass + - getValueClass +PostProcessingMetricsDoFn: + methods: + - processElement +PrecombineGroupingTable: + methods: + - add + - combining + - combiningAndSampling + - compact + - equals + - estimateSize + - flush + - getAccumulator + - getGroupingKey + - getKey + - getOutputTimestamp + - getStructuralKey + - getWeight + - getWindows + - hashCode + - put + - shrink + - toString +PrecombineGroupingTableBenchmark: + methods: + - setUp + - sumIntegerBinaryCombine + properties: + - distribution + - globallyWindowed +Preconditions: + methods: + - checkArgumentNotNull + - checkStateNotNull +PrefetchableIterables: + methods: + - concat + - createIterator + - emptyIterable + - fromArray + - hasNext + - isReady + - iterator + - limit + - next + - prefetch +PrefetchableIterators: + methods: + - concat + - concatIterators + - emptyIterator + - fromArray + - hasNext + - isReady + - next + - prefetch +PrepareWrite: + methods: + - expand + - processElement +PriceGenerator: + methods: + - nextPrice +ProcessBundleBenchmark: + methods: + - getCacheTokens + - handle + - log + - process + - tearDown + - testLargeBundle + - testStateWithCaching + - testStateWithoutCaching + - testTinyBundle +ProcessBundleHandler: + methods: + - activate + - addBundleProgressReporter + - addFinishBundleFunction + - addIncomingDataEndpoint + - addIncomingTimerEndpoint + - addOutgoingDataEndpoint + - addOutgoingTimersEndpoint + - addPCollectionConsumer + - addResetFunction + - addStartBundleFunction + - addTearDownFunction + - afterBundleCommit + - close + - create + - createRunnerForPTransform + - find + - getActiveBundleProcessors + - getBeamFnDataClient + - getBeamFnStateClient + - getBundleCacheSupplier + - getBundleFinalizer + - getBundleProcessorCache + - getCacheTokensSupplier + - getCoders + - getPCollectionConsumer + - getPCollections + - getPTransform + - getPTransformId + - getPipelineOptions + - getProcessBundleInstructionIdSupplier + - getProcessWideCache + - getRunnerCapabilities + - getShortIdMap + - getSplitListener + - getStateTracker + - getWindowingStrategies + - handle + - hashCode + - load + - processBundle + - progress + - reset + - shutdown + - start + - trySplit + properties: + - JAVA_SOURCE_URN +ProducerRecordCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - isRegisterByteSizeObserverCheap + - of + - structuralValue + - verifyDeterministic +ProtoCoder: + methods: + - coderFor + - decode + - encode + - equals + - getCoderProvider + - getExtensionHosts + - getExtensionRegistry + - getMessageType + - hashCode + - of + - verifyDeterministic + - withExtensionsFrom + properties: + - serialVersionUID +ProtoDomain: + methods: + - buildFrom + - contains + - equals + - getDescriptor + - getFieldOptionById + - getFileDescriptor + - hashCode + properties: + - serialVersionUID +ProtoDynamicMessageSchema: + methods: + - apply + - forDescriptor + - getBaseClass + - getFromRowFunction + - getSchema + - getSubContext + - getToRowFunction + - invokeNewBuilder + properties: + - serialVersionUID +ProtoFromBytes: + methods: + - apply + - expand +ProtoMessageSchema: + methods: + - fieldValueGetters + - fieldValueTypeInformations + - get + - getProtoBytesToRowFn + - getRowToProtoBytesFn + - schemaFor + - schemaTypeCreator +ProtoPayloadSerializerProvider: + methods: + - getSerializer + - identifier +ProtoSchemaLogicalTypes: + methods: + - toDuration + - toRow + - toTimestamp + properties: + - IDENTIFIER +ProtoToBytes: + methods: + - apply + - expand +ProtobufCoderProviderRegistrar: + methods: + - getCoderProviders +Providers: + methods: + - loadProviders +PubSubPayloadTranslation: + methods: + - getTransformPayloadTranslators + - getUrn + - translate +PublishResponseCoders: + methods: + - decode + - defaultPublishResponse + - encode + - fullPublishResponse + - fullPublishResponseWithoutHeaders + - verifyDeterministic +PublishResultCoders: + methods: + - decode + - defaultPublishResult + - encode + - fullPublishResult + - fullPublishResultWithoutHeaders + - verifyDeterministic +PublisherOptions: + methods: + - build + - newBuilder + - setTopicPath + - topicPath +PubsubClient: + methods: + - ackDeadlineSeconds + - ackId + - acknowledge + - createRandomSubscription + - createSubscription + - createTopic + - deleteSubscription + - deleteTopic + - equals + - getFullPath + - getId + - getName + - getPath + - hashCode + - isEOF + - listSubscriptions + - listTopics + - message + - modifyAckDeadline + - of + - projectPathFromId + - projectPathFromPath + - publish + - pull + - recordId + - requestTimeMsSinceEpoch + - subscriptionPathFromName + - subscriptionPathFromPath + - timestampMsSinceEpoch + - toString + - topicPathFromName + - topicPathFromPath +PubsubCoderProviderRegistrar: + methods: + - getCoderProviders +PubsubDlqProvider: + methods: + - expand + - identifier + - newDlqTransform +PubsubGrpcClient: + methods: + - ackDeadlineSeconds + - acknowledge + - close + - createSubscription + - createTopic + - deleteSubscription + - deleteTopic + - getKind + - isEOF + - listSubscriptions + - listTopics + - modifyAckDeadline + - newClient + - publish + - pull + properties: + - FACTORY +PubsubHelper: + methods: + - cleanup + - create + - createOrReuseTopic + - createSubscription + - createTopic + - reuseSubscription + - reuseTopic + - subscriptionExists + - topicExists +PubsubIO: + methods: + - apply + - asPath + - asV1Beta1Path + - asV1Beta2Path + - expand + - finishBundle + - fromPath + - fromSubscription + - fromTopic + - populateDisplayData + - processElement + - readAvroGenericRecords + - readAvros + - readAvrosWithBeamSchema + - readMessages + - readMessagesWithAttributes + - readMessagesWithAttributesAndMessageId + - readMessagesWithAttributesAndMessageIdAndOrderingKey + - readMessagesWithCoderAndParseFn + - readMessagesWithMessageId + - readProtoDynamicMessages + - readProtos + - readStrings + - startBundle + - to + - toString + - withClientFactory + - withCoderAndParseFn + - withDeadLetterTopic + - withIdAttribute + - withMaxBatchBytesSize + - withMaxBatchSize + - withPubsubRootUrl + - withTimestampAttribute + - writeAvros + - writeMessages + - writeProtos + - writeStrings +PubsubJsonClient: + methods: + - ackDeadlineSeconds + - acknowledge + - close + - createSubscription + - createTopic + - deleteSubscription + - deleteTopic + - getKind + - isEOF + - listSubscriptions + - listTopics + - modifyAckDeadline + - newClient + - publish + - pull + properties: + - FACTORY +PubsubLiteIO: + methods: + - addUuids + - deduplicate + - expand + - read + - write +PubsubLiteSink: + methods: + - finishBundle + - processElement + - startBundle +PubsubLiteTableProvider: + methods: + - buildBeamSqlTable + - getTableType +PubsubMessage: + methods: + - equals + - getAttribute + - getAttributeMap + - getMessageId + - getOrderingKey + - getPayload + - hashCode + - toString +PubsubMessagePayloadOnlyCoder: + methods: + - decode + - encode + - of +PubsubMessageWithAttributesAndMessageIdAndOrderingKeyCoder: + methods: + - decode + - encode + - of +PubsubMessageWithAttributesAndMessageIdCoder: + methods: + - decode + - encode + - of +PubsubMessageWithAttributesCoder: + methods: + - decode + - encode + - of +PubsubMessageWithMessageIdCoder: + methods: + - decode + - encode + - of +PubsubMessages: + methods: + - apply + - fromProto + - toProto +PubsubSchemaIOProvider: + methods: + - buildReader + - buildWriter + - configurationSchema + - expand + - from + - identifier + - isBounded + - requiresDataSchema + - schema + properties: + - ATTRIBUTE_ARRAY_ENTRY_SCHEMA + - ATTRIBUTE_ARRAY_FIELD_TYPE + - ATTRIBUTE_MAP_FIELD_TYPE +PubsubSchemaTransformReadConfiguration: + methods: + - build + - builder + - getDataSchema + - getDeadLetterQueue + - getFormat + - getIdAttribute + - getProtoClass + - getSubscription + - getThriftClass + - getThriftProtocolFactoryClass + - getTimestampAttribute + - getTopic + - setDataSchema + - setDeadLetterQueue + - setFormat + - setIdAttribute + - setProtoClass + - setSubscription + - setThriftClass + - setThriftProtocolFactoryClass + - setTimestampAttribute + - setTopic +PubsubSchemaTransformReadProvider: + methods: + - buildTransform + - expand + - identifier + - inputCollectionNames + - outputCollectionNames + - validate +PubsubSchemaTransformWriteConfiguration: + methods: + - build + - getFormat + - getIdAttribute + - getTimestampAttribute + - getTopic + - setFormat + - setIdAttribute + - setTimestampAttribute + - setTopic +PubsubTableProvider: + methods: + - getSchemaIOProvider + - getTableType +PubsubTestClient: + methods: + - ackDeadlineSeconds + - acknowledge + - advance + - close + - createFactoryForCreateSubscription + - createFactoryForPublish + - createFactoryForPull + - createFactoryForPullAndPublish + - createSubscription + - createTopic + - deleteSubscription + - deleteTopic + - getKind + - isEOF + - listSubscriptions + - listTopics + - modifyAckDeadline + - newClient + - publish + - pull +PubsubUnboundedSink: + methods: + - decode + - encode + - expand + - finishBundle + - getIdAttribute + - getTimestampAttribute + - getTopic + - getTopicProvider + - populateDisplayData + - processElement + - startBundle + properties: + - outer +PubsubUnboundedSource: + methods: + - advance + - apply + - close + - createReader + - decode + - encode + - expand + - finalizeCheckpoint + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentRecordId + - getCurrentSource + - getCurrentTimestamp + - getIdAttribute + - getNeedsAttributes + - getNeedsMessageId + - getNeedsOrderingKey + - getOutputCoder + - getProject + - getSplitBacklogBytes + - getSubscription + - getSubscriptionProvider + - getTimestampAttribute + - getTopic + - getTopicProvider + - getWatermark + - identity + - nackAll + - nackBatch + - of + - populateDisplayData + - processElement + - requiresDeduping + - split + - start + - validate + properties: + - outer +PulsarIO: + methods: + - expand + - read + - useProcessingTime + - usePublishTime + - withAdminUrl + - withClientUrl + - withEndMessageId + - withEndTimestamp + - withExtractOutputTimestampFn + - withProcessingTime + - withPublishTime + - withPulsarClient + - withStartTimestamp + - withTopic + - write +PulsarMessage: + methods: + - getMessageRecord + - getPublishTimestamp + - getTopic + - setMessageRecord +PulsarMessageCoder: + methods: + - decode + - encode + - of +PulsarSourceDescriptor: + methods: + - of +PythonCallable: + methods: + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - IDENTIFIER +PythonCallableSource: + methods: + - getPythonCallableCode + - of +PythonExternalTransform: + methods: + - expand + - from + - withArgs + - withExtraPackages + - withKwarg + - withKwargs + - withOutputCoder + - withOutputCoders + - withTypeHint +PythonMap: + methods: + - expand + - viaFlatMapFn + - viaMapFn + - withExpansionService +PythonService: + methods: + - findAvailablePort + - start + - waitForPort + - withExtraPackages +Quantifier: + methods: + - toString + properties: + - ASTERISK + - ASTERISK_RELUCTANT + - NONE + - PLUS + - PLUS_RELUCTANT + - QMARK + - QMARK_RELUCTANT +Query0: + methods: + - expand + - processElement +Query0Model: + methods: + - simulator +Query1: + methods: + - expand + - processElement +Query10: + methods: + - expand + - processElement + - setMaxNumWorkers + - setOutputPath + - toString +Query11: + methods: + - expand + - processElement +Query12: + methods: + - expand + - processElement +Query13: + methods: + - expand + - processElement +Query14: + methods: + - expand +Query1Model: + methods: + - simulator +Query2: + methods: + - expand + - processElement +Query2Model: + methods: + - simulator +Query3: + methods: + - expand + - onTimerCallback + - processElement +Query3Model: + methods: + - simulator +Query4: + methods: + - expand + - processElement +Query4Model: + methods: + - simulator +Query5: + methods: + - addInput + - createAccumulator + - equals + - expand + - extractOutput + - getAccumulatorCoder + - hashCode + - mergeAccumulator + - processElement + properties: + - auctions + - count +Query5Model: + methods: + - run + - simulator +Query6: + methods: + - addInput + - createAccumulator + - expand + - extractOutput + - mergeAccumulators + - processElement +Query6Model: + methods: + - simulator +Query7: + methods: + - expand + - processElement +Query7Model: + methods: + - simulator +Query8: + methods: + - expand + - processElement +Query8Model: + methods: + - run + - simulator +Query9: + methods: + - expand +Query9Model: + methods: + - simulator +QueryChangeStreamAction: + methods: + - run +QueryReader: + methods: + - getQueryIdentifiers + - readQuery +QueryStatementConverter: + methods: + - convert + - convertRootQuery +QueryTrait: + methods: + - addOutputColumnList + - addResolvedTable + - getTablePath + - isTableResolved + - resolveAlias + - retrieveFieldNames + properties: + - outputColumnMap + - resolvedTables + - withEntries +RabbitMqIO: + methods: + - advance + - advanceWatermark + - close + - createReader + - expand + - finalizeCheckpoint + - getChannel + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentRecordId + - getCurrentSource + - getCurrentTimestamp + - getOutputCoder + - getWatermark + - processElement + - read + - requiresDeduping + - setup + - split + - start + - stop + - teardown + - withExchange + - withMaxNumRecords + - withMaxReadTime + - withQueue + - withQueueDeclare + - withUri + - withUseCorrelationId + - write +RabbitMqMessage: + methods: + - createProperties + - equals + - getAppId + - getBody + - getClusterId + - getContentEncoding + - getContentType + - getCorrelationId + - getDeliveryMode + - getExpiration + - getHeaders + - getMessageId + - getPriority + - getReplyTo + - getRoutingKey + - getTimestamp + - getType + - getUserId + - hashCode +RampupThrottlingFn: + methods: + - populateDisplayData + - processElement + - setup +RawUnionValue: + methods: + - equals + - getUnionTag + - getValue + - hashCode + - toString +Read: + methods: + - advance + - checkDone + - close + - create + - createReader + - currentRestriction + - decode + - encode + - expand + - from + - getCheckpoint + - getCheckpointMark + - getCheckpointMarkCoder + - getCoderArguments + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getId + - getInitialWatermarkEstimatorState + - getKindString + - getProgress + - getSize + - getSource + - getTimestamp + - getValue + - getWatermark + - initialRestriction + - isBounded + - newWatermarkEstimator + - populateDisplayData + - processElement + - restrictionCoder + - restrictionTracker + - setUp + - split + - splitRestriction + - start + - tryClaim + - trySplit + - verifyDeterministic + - withMaxNumRecords + - withMaxReadTime +ReadAllViaFileBasedSource: + methods: + - apply + - expand + - process +ReadBuilder: + methods: + - buildExternal + - getCsvMapper +ReadChangeStreamPartitionDoFn: + methods: + - getInitialWatermarkEstimatorState + - getSize + - initialRestriction + - newTracker + - newWatermarkEstimator + - processElement + - setThroughputEstimator + - setup +ReadChangeStreamPartitionRangeTracker: + methods: + - tryClaim + - trySplit +ReadFromPulsarDoFn: + methods: + - estimate + - getInitialRestriction + - getInitialWatermarkEstimatorState + - getRestrictionCoder + - getSize + - initPulsarClients + - newReader + - newWatermarkEstimator + - processElement + - restrictionTracker + - teardown +ReadOnlyTableProvider: + methods: + - buildBeamSqlTable + - createTable + - dropTable + - getTableType + - getTables +ReadOperation: + methods: + - create + - getColumns + - getIndex + - getKeySet + - getQuery + - getQueryName + - getTable + - withColumns + - withIndex + - withKeySet + - withPartitionOptions + - withQuery + - withQueryName + - withTable +ReadableFileCoder: + methods: + - decode + - encode + - getCoderArguments + - getMetadataCoder + - of + - verifyDeterministic +ReadableStates: + methods: + - immediate + - read + - readLater +ReaderDelay: + methods: + - delayRecord + - delayStart +ReceiverBuilder: + methods: + - build + - getSparkReceiverClass + - withConstructorArgs +RecommendationAICreateCatalogItem: + methods: + - build + - catalogName + - expand + - processElement + - projectId + - setCatalogName + - setProjectId + - withCatalogName + - withProjectId + properties: + - FAILURE_TAG + - SUCCESS_TAG +RecommendationAIIO: + methods: + - createCatalogItems + - importCatalogItems + - importUserEvents + - predictAll + - writeUserEvent +RecommendationAIImportCatalogItems: + methods: + - batchSize + - build + - catalogName + - expand + - maxBufferingDuration + - processElement + - projectId + - setBatchSize + - setCatalogName + - setMaxBufferingDuration + - setProjectId + - withBatchSize + - withCatalogName + - withProjectId + properties: + - FAILURE_TAG + - SUCCESS_TAG +RecommendationAIImportUserEvents: + methods: + - batchSize + - build + - catalogName + - eventStore + - expand + - maxBufferingDuration + - processElement + - projectId + - setBatchSize + - setCatalogName + - setEventStore + - setMaxBufferingDuration + - setProjectId + - withBatchSize + - withCatalogName + - withEventStore + - withProjectId + properties: + - FAILURE_TAG + - SUCCESS_TAG +RecommendationAIPredict: + methods: + - build + - catalogName + - eventStore + - expand + - placementId + - processElement + - projectId + - setCatalogName + - setEventStore + - setPlacementId + - setProjectId + - withCatalogName + - withEventStore + - withPlacementId + - withProjectId + properties: + - FAILURE_TAG + - SUCCESS_TAG +RecommendationAIWriteUserEvent: + methods: + - build + - catalogName + - eventStore + - expand + - processElement + - projectId + - setCatalogName + - setEventStore + - setProjectId + - withCatalogName + - withEventStore + - withProjectId + properties: + - FAILURE_TAG + - SUCCESS_TAG +RecordWithMetadata: + methods: + - getArgument + - getArgumentType + - getBaseType + - getIdentifier + - getSchema + - toBaseType + - toInputType + properties: + - RANGE_OFFSET + - RECORD_NUM + - RECORD_NUM_IN_OFFSET + - RECORD_OFFSET + - RESOURCE_ID + - VALUE +RedisConnectionConfiguration: + methods: + - connect + - create + - enableSSL + - populateDisplayData + - withAuth + - withHost + - withPort + - withSSL + - withTimeout +RedisCursor: + methods: + - compareTo + - equals + - getCursor + - getDbSize + - hashCode + - isStart + - of + properties: + - END_CURSOR + - ZERO_CURSOR + - ZERO_KEY +RedisIO: + methods: + - expand + - finishBundle + - getInitialRestriction + - populateDisplayData + - processElement + - read + - readKeyPatterns + - setup + - startBundle + - teardown + - withApproximateTrim + - withAuth + - withBatchSize + - withConnectionConfiguration + - withEndpoint + - withExpireTime + - withKeyPattern + - withMaxLen + - withMethod + - withOutputParallelization + - withTimeout + - write + - writeStreams +ReduceByKey: + methods: + - accumulationMode + - combineBy + - getAccumulate + - getAccumulatorFactory + - getAccumulatorType + - getMergeAccumulators + - getOutputFn + - getReducer + - getValueComparator + - getValueExtractor + - getValueType + - isCombinable + - isCombineFnStyle + - keyBy + - named + - of + - output + - outputValues + - reduceBy + - triggeredBy + - valueBy + - windowBy + - withAllowedLateness + - withOnTimeBehavior + - withSortedValues + - withTimestampCombiner +ReduceByKeyTranslator: + methods: + - addInput + - apply + - canTranslate + - createAccumulator + - extractOutput + - getAccumulatorCoder + - mergeAccumulators + - processElement + - translate +ReduceWindow: + methods: + - accumulationMode + - combineBy + - expand + - getReducer + - getValueComparator + - getValueExtractor + - getValueType + - isCombinable + - isCombineFnStyle + - named + - of + - output + - reduceBy + - triggeredBy + - valueBy + - windowBy + - withAllowedLateness + - withOnTimeBehavior + - withSortedValues + - withTimestampCombiner +ReflectHelpers: + methods: + - compare + - declaredFieldsWithAnnotation + - declaredMethodsWithAnnotation + - findClassLoader + - formatAnnotation + - formatMethod + - formatMethodWithClass + - getClosureOfMethodsOnInterface + - getClosureOfMethodsOnInterfaces + - loadServicesOrdered + - simpleTypeDescription + properties: + - INSTANCE +ReflectUtils: + methods: + - boxIfPrimitive + - create + - getAnnotatedConstructor + - getAnnotatedCreateMethod + - getClazz + - getFields + - getIterableComponentType + - getMapType + - getMethods + - getMethodsMap + - getSchema + - isGetter + - isSetter + - stripGetterPrefix + - stripPrefix + - stripSetterPrefix +Regex: + methods: + - allMatches + - expand + - find + - findAll + - findKV + - matches + - matchesKV + - processElement + - replaceAll + - replaceFirst + - split +RegexMatcher: + methods: + - describeTo + - matches +Reify: + methods: + - expand + - extractTimestampsFromValues + - getAllowedTimestampSkew + - process + - processElement + - timestamps + - timestampsInValue + - viewAsValues + - viewInGlobalWindow + - windows + - windowsInValue +ReifyAsIterable: + methods: + - expand + - processElement +RelMdNodeStats: + methods: + - getDef + - getNodeStats + properties: + - SOURCE +ReleaseInfo: + methods: + - getDefaultDockerRepoPrefix + - getDefaultDockerRepoRoot + - getName + - getProperties + - getReleaseInfo + - getSdkVersion + - getVersion + - isDevSdkVersion +RemoteGrpcPortRead: + methods: + - fromPTransform + - getPort + - readFromPort + - toPTransform + properties: + - URN +RemoteGrpcPortWrite: + methods: + - fromPTransform + - getPort + - toPTransform + - writeToPort + properties: + - URN +RenameFields: + methods: + - create + - expand + - processElement + - rename +Repeatedly: + methods: + - forever + - getRepeatedTrigger + - getWatermarkThatGuaranteesFiring + - mayFinish + - toString +Requirements: + methods: + - empty + - getSideInputs + - isEmpty + - requiresSideInputs + - union +Reshuffle: + methods: + - expand + - of + - processElement + - setup + - viaRandomKey + - withNumBuckets +ReshuffleTrigger: + methods: + - getWatermarkThatGuaranteesFiring + - mayFinish + - toString +ResourceHint: + methods: + - equals + - hashCode + - mergeWithOuter + - toBytes +ResourceHints: + methods: + - create + - equals + - fromOptions + - hashCode + - hints + - mergeWithOuter + - parse + - toBytes + - withAccelerator + - withHint + - withMinRam +ResourceIdCoder: + methods: + - consistentWithEquals + - decode + - encode + - of +ResourceIdTester: + methods: + - runResourceIdBattery +RestrictionTracker: + methods: + - checkDone + - currentRestriction + - from + - getTruncatedRestriction + - getWorkCompleted + - getWorkRemaining + - isBounded + - of + - tryClaim + - trySplit +RestrictionTrackers: + methods: + - checkDone + - currentRestriction + - getProgress + - isBounded + - observe + - tryClaim + - trySplit +RetryConfiguration: + methods: + - baseBackoff + - build + - builder + - convert + - maxBackoff + - numRetries + - throttledBaseBackoff + - toBuilder +RetryHttpRequestInitializer: + methods: + - handleIOException + - handleResponse + - initialize + - setCustomErrors + - setWriteTimeout +RightJoin: + methods: + - by + - named + - of + - using +RingRange: + methods: + - equals + - getEnd + - getStart + - hashCode + - isWrapping + - of + - toString +Row: + methods: + - addArray + - addIterable + - addValue + - addValues + - attachValues + - build + - deepEquals + - deepHashCode + - equals + - fromRow + - getArray + - getBaseValue + - getBaseValues + - getBoolean + - getByte + - getBytes + - getDateTime + - getDecimal + - getDouble + - getFieldCount + - getFloat + - getInt16 + - getInt32 + - getInt64 + - getIterable + - getLogicalTypeValue + - getMap + - getRow + - getSchema + - getString + - getValue + - getValues + - hashCode + - nextFieldId + - nullRow + - toRow + - toString + - withFieldAccessDescriptors + - withFieldValue + - withFieldValueGetters + - withFieldValues + - withSchema +RowBundle: + methods: + - processRows + - setup +RowCoder: + methods: + - equals + - hashCode + - of + - overrideEncodingPositions +RowCoderGenerator: + methods: + - appender + - generate + - overrideEncodingPositions + - prepare +RowJson: + methods: + - deserialize + - forSchema + - serialize + - toString + - verifySchemaSupported + - withDropNullsOnWrite + - withNullBehavior +RowJsonUtils: + methods: + - jsonToRow + - newObjectMapperWith + - rowToJson +RowMessages: + methods: + - apply + - bytesToRowFn + - rowToBytesFn +RowToCsv: + methods: + - expand + - getCsvFormat +RowToEntity: + methods: + - create + - createTest + - expand + - processElement +RowUtils: + methods: + - byteString + - byteStringUtf8 + properties: + - COLUMNS_MAPPING + - KEY + - LABELS + - TIMESTAMP_MICROS + - VALUE +RowWithGetters: + methods: + - equals + - getFieldCount + - getGetterTarget + - getGetters + - getValue + - getValues + - hashCode +RowWithStorage: + methods: + - getFieldCount + - getValue + - getValues +RpcQosOptions: + methods: + - build + - defaultOptions + - equals + - getBatchInitialCount + - getBatchMaxBytes + - getBatchMaxCount + - getBatchTargetLatency + - getHintMaxNumWorkers + - getInitialBackoff + - getMaxAttempts + - getOverloadRatio + - getSamplePeriod + - getSamplePeriodBucketSize + - getThrottleDuration + - hashCode + - isShouldReportDiagnosticMetrics + - newBuilder + - populateDisplayData + - toBuilder + - toString + - withBatchInitialCount + - withBatchMaxBytes + - withBatchMaxCount + - withBatchTargetLatency + - withHintMaxNumWorkers + - withInitialBackoff + - withMaxAttempts + - withOverloadRatio + - withReportDiagnosticMetrics + - withSamplePeriod + - withSamplePeriodBucketSize + - withThrottleDuration +RunInference: + methods: + - expand + - of + - ofKVs + - withExpansionService + - withExtraPackages + - withKwarg +S3FileSystemConfiguration: + methods: + - build + - builder + - builderFrom + - fromS3Options + - getBucketKeyEnabled + - getBuilder + - getS3ClientBuilder + - getS3StorageClass + - getS3ThreadPoolSize + - getS3UploadBufferSizeBytes + - getSSEAlgorithm + - getSSEAwsKeyManagementParams + - getSSECustomerKey + - getSSEKMSKeyId + - getScheme + - setBucketKeyEnabled + - setS3ClientBuilder + - setS3StorageClass + - setS3ThreadPoolSize + - setS3UploadBufferSizeBytes + - setSSEAlgorithm + - setSSEAwsKeyManagementParams + - setSSECustomerKey + - setSSEKMSKeyId + - setScheme + - toBuilder + properties: + - MINIMUM_UPLOAD_BUFFER_SIZE_BYTES +S3FileSystemRegistrar: + methods: + - fromOptions +SSECustomerKey: + methods: + - algorithm + - build + - builder + - getAlgorithm + - getKey + - getMD5 + - key + - md5 +Sample: + methods: + - addInput + - any + - anyCombineFn + - anyValueCombineFn + - combineFn + - createAccumulator + - expand + - extractOutput + - fixedSizeGlobally + - fixedSizePerKey + - getAccumulatorCoder + - getDefaultOutputCoder + - mergeAccumulators + - populateDisplayData +SbeLogicalTypes: + methods: + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - IDENTIFIER +SbeSchema: + methods: + - assumeSingleMessageSchema + - build + - builder + - fromIr + - getIr + - getIrOptions + - getSbeFields + - messageId + - messageName + - setMessageId + - setMessageName + - toBuilder + properties: + - DEFAULT +ScalarFn: {} +ScalarFnReflector: + methods: + - getApplyMethod +ScalarFunctionImpl: + methods: + - create + - createAll + - getImplementor + - getJarPath + - getReturnType + - implement +Schema: + methods: + - addArrayField + - addBooleanField + - addByteArrayField + - addByteField + - addDateTimeField + - addDecimalField + - addDoubleField + - addField + - addFields + - addFloatField + - addInt16Field + - addInt32Field + - addInt64Field + - addIterableField + - addLogicalTypeField + - addMapField + - addNullableField + - addOptions + - addRowField + - addStringField + - array + - assignableTo + - assignableToIgnoreNullable + - build + - builder + - equals + - equivalent + - forTypeName + - getAllMetadata + - getCollectionElementType + - getDescription + - getEncodingPositions + - getField + - getFieldCount + - getFieldNames + - getFields + - getLastFieldId + - getLogicalType + - getMapKeyType + - getMapValueType + - getMetadata + - getMetadataString + - getName + - getNullable + - getOptionNames + - getOptions + - getRowSchema + - getType + - getTypeName + - getUUID + - getValue + - getValueOrDefault + - hasField + - hasOption + - hasOptions + - hashCode + - indexOf + - isCollectionType + - isCompositeType + - isDateType + - isEncodingPositionsOverridden + - isLogicalType + - isMapType + - isNumericType + - isPrimitiveType + - isStringType + - isSubtypeOf + - isSupertypeOf + - iterable + - logicalType + - map + - nameOf + - none + - nullable + - of + - row + - setDescription + - setEncodingPositions + - setName + - setOption + - setOptions + - setType + - setUUID + - toBuilder + - toSchema + - toString + - typesEqual + - withDescription + - withMetadata + - withName + - withNullable + - withOptions + - withType + properties: + - BOOLEAN + - BYTE + - BYTES + - COLLECTION_TYPES + - COMPOSITE_TYPES + - DATETIME + - DATE_TYPES + - DECIMAL + - DOUBLE + - FLOAT + - INT16 + - INT32 + - INT64 + - MAP_TYPES + - NUMERIC_TYPES + - STRING + - STRING_TYPES +SchemaAndRecord: + methods: + - getRecord + - getTableSchema +SchemaBaseBeamTable: + methods: + - getSchema +SchemaCoder: + methods: + - coderForFieldType + - consistentWithEquals + - decode + - encode + - equals + - getEncodedTypeDescriptor + - getFromRowFunction + - getSchema + - getToRowFunction + - hashCode + - of + - overrideEncodingPositions + - toString + - verifyDeterministic +SchemaIOTableProviderWrapper: + methods: + - buildBeamSqlTable + - buildIOReader + - buildIOWriter + - getSchema + - getSchemaIOProvider + - getTableStatistics + - getTableType + - isBounded + - supportsProjects +SchemaLogicalType: + methods: + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - IDENTIFIER +SchemaRegistry: + methods: + - createDefault + - fromRowFunction + - getFromRowFunction + - getSchema + - getSchemaCoder + - getToRowFunction + - registerJavaBean + - registerPOJO + - registerSchemaForClass + - registerSchemaForType + - registerSchemaProvider + - schemaFor + - toRowFunction +SchemaTranslation: + methods: + - rowFromProto + - rowToProto + - schemaFromProto + - schemaToProto +SchemaUtils: + methods: + - mergeWideningNullable + - toLogicalBaseType + - toLogicalInputType +SchemaVerification: + methods: + - verifyFieldValue +SchemaZipFold: + methods: + - accept + - accumulate + - apply + - create + - parent + - path + - withParent + - withPathPart + properties: + - EMPTY +Select: + methods: + - concatFieldNames + - create + - expand + - fieldAccess + - fieldIds + - fieldNames + - flattenedSchema + - keepMostNestedFieldName + - process + - withFieldNameAs + - withOutputSchema +SelectEvent: + methods: + - expand + - processElement +SelectHelpers: + methods: + - allLeavesDescriptor + - getOutputSchema + - getRowSelector + - getRowSelectorOptimized + - select + properties: + - CONCAT_FIELD_NAMES + - KEEP_NESTED_NAME +SellerPrice: + methods: + - decode + - encode + - equals + - hashCode + - sizeInBytes + - structuralValue + - toString + - verifyDeterministic + properties: + - CODER + - seller +SerializableCoder: + methods: + - coderFor + - consistentWithEquals + - decode + - encode + - equals + - getCoderProvider + - getCoderProviders + - getEncodedTypeDescriptor + - getRecordType + - hashCode + - of + - structuralValue + - toString + - verifyDeterministic +SerializableConfiguration: + methods: + - fromMap + - get + - newConfiguration + - newJob + - readExternal + - writeExternal +SerializableFunctions: + methods: + - apply + - clonesOf + - constant + - identity +SerializableIr: + methods: + - fromIr + - ir +SerializableMatchers: + methods: + - allOf + - anyOf + - anything + - arrayContaining + - arrayContainingInAnyOrder + - arrayWithSize + - closeTo + - contains + - containsInAnyOrder + - containsString + - describeMismatch + - describeTo + - empty + - emptyArray + - emptyIterable + - endsWith + - equalTo + - fromSupplier + - get + - greaterThan + - greaterThanOrEqualTo + - hasItem + - hasSize + - isIn + - isOneOf + - iterableWithSize + - kv + - kvWithKey + - kvWithValue + - lessThan + - lessThanOrEqualTo + - matches + - not + - nullValue + - startsWith + - toString +SerializableRexFieldAccess: + methods: + - getIndexes +SerializableRexInputRef: + methods: + - getIndex +SerializableRexNode: + methods: + - build + - builder +SerializableThrowable: + methods: + - equals + - getThrowable + - hashCode +SerializableUtils: + methods: + - clone + - deserializeFromByteArray + - ensureSerializable + - ensureSerializableByCoder + - ensureSerializableRoundTrip + - serializeToByteArray +ServerFactory: + methods: + - allocateAddressAndCreate + - create + - createDefault + - createEpollDomainSocket + - createEpollSocket + - createWithPortSupplier + - createWithUrlFactory + - createWithUrlFactoryAndPortSupplier +SessionSideInputJoin: + methods: + - expand + - needsSideInput + - processElement +SessionSideInputJoinModel: + methods: + - simulator +Sessions: + methods: + - assignWindows + - equals + - getDefaultWindowMappingFn + - getGapDuration + - getWindowTypeDescriptor + - hashCode + - isCompatible + - mergeWindows + - populateDisplayData + - verifyCompatibility + - windowCoder + - withGapDuration +SetCoder: + methods: + - getEncodedTypeDescriptor + - of + - verifyDeterministic +Sets: + methods: + - apply + - exceptAll + - exceptDistinct + - expand + - intersectAll + - intersectDistinct + - processElement + - unionAll + - unionDistinct +ShardNameTemplate: + properties: + - DIRECTORY_CONTAINER + - INDEX_OF_MAX +ShardedKey: + methods: + - consistentWithEquals + - decode + - encode + - equals + - getCoderArguments + - getKey + - getKeyCoder + - getShardNumber + - hashCode + - isRegisterByteSizeObserverCheap + - of + - registerByteSizeObserver + - structuralValue + - toString + - verifyDeterministic +ShardedKeyCoder: + methods: + - decode + - encode + - getCoderArguments + - of + - verifyDeterministic +ShardingWritableByteChannel: + methods: + - addChannel + - close + - getChannel + - getNumShards + - isOpen + - write + - writeToShard + properties: + - ALL_SHARDS +ShuffleOperator: + methods: + - getKeyExtractor + - getKeyType + - getWindow +SideInputLoadTest: + methods: + - main + - processElement +SideInputSpec: + methods: + - create +SimpleFunction: + methods: + - apply + - fromSerializableFunctionWithOutputType + - getOutputTypeDescriptor +SingleStoreIO: + methods: + - create + - expand + - finish + - getDataSource + - getInitialRange + - populateDisplayData + - process + - processElement + - read + - readWithPartitions + - run + - splitRange + - withBatchSize + - withConnectionProperties + - withDataSourceConfiguration + - withDatabase + - withOutputParallelization + - withPassword + - withQuery + - withRowMapper + - withStatementPreparator + - withTable + - withUserDataMapper + - withUsername + - write +SingleValueCollector: + methods: + - asContext + - collect + - get + - getCounter + - getHistogram + - getTimer +SingleValueContext: + methods: + - asContext + - collect + - get + - getAndResetValue + - getCounter + - getHistogram + - getTimer +SinkMetrics: + methods: + - bytesWritten + - elementsWritten +SizeEstimator: + methods: + - sizeOf +SketchFrequencies: + methods: + - add + - addInput + - create + - createAccumulator + - decode + - encode + - estimateCount + - expand + - extractOutput + - getAccumulatorCoder + - globally + - isRegisterByteSizeObserverCheap + - mergeAccumulators + - perKey + - populateDisplayData + - withAccuracy + - withConfidence + - withRelativeError +SlidingWindows: + methods: + - assignWindows + - assignsToOneWindow + - equals + - every + - getDefaultWindowMappingFn + - getOffset + - getPeriod + - getSideInputWindow + - getSize + - hashCode + - isCompatible + - of + - populateDisplayData + - verifyCompatibility + - windowCoder + - withOffset +SnappyCoder: + methods: + - decode + - encode + - getCoderArguments + - of + - verifyDeterministic +SnowflakeArray: + methods: + - of + - sql +SnowflakeBatchServiceConfig: + methods: + - getCreateDisposition + - getDataSourceProviderFn + - getDatabase + - getFilesList + - getQuery + - getQuotationMark + - getSchema + - getStagingBucketDir + - getStorageIntegrationName + - getTable + - getTableSchema + - getWriteDisposition +SnowflakeBatchServiceImpl: + methods: + - read + - write +SnowflakeBinary: + methods: + - getSize + - of + - setSize + - sql + properties: + - MAX_SIZE +SnowflakeBoolean: + methods: + - of + - sql +SnowflakeChar: + methods: + - of +SnowflakeColumn: + methods: + - getDataType + - getName + - isNullable + - of + - setDataType + - setName + - setNullable + - sql +SnowflakeDate: + methods: + - of + - sql +SnowflakeDateTime: + methods: + - of +SnowflakeDecimal: + methods: + - of +SnowflakeDouble: + methods: + - of +SnowflakeFloat: + methods: + - of + - sql +SnowflakeGeography: + methods: + - of + - sql +SnowflakeIO: + methods: + - addInput + - apply + - buildDatasource + - create + - createAccumulator + - expand + - extractOutput + - finishBundle + - fromQuery + - fromTable + - getAuthenticator + - getConfig + - getDataSource + - getDatabase + - getLoginTimeout + - getOauthToken + - getPassword + - getPortNumber + - getPrivateKey + - getPrivateKeyPassphrase + - getRawPrivateKey + - getRole + - getSchema + - getServerName + - getSsl + - getUrl + - getUsername + - getWarehouse + - mergeAccumulators + - of + - populateDisplayData + - processElement + - read + - setup + - to + - withAuthenticator + - withCoder + - withCreateDisposition + - withCsvMapper + - withDataSourceConfiguration + - withDataSourceProviderFn + - withDatabase + - withDebugMode + - withFileNameTemplate + - withFlushRowLimit + - withFlushTimeLimit + - withKeyPairAuth + - withKeyPairPathAuth + - withKeyPairRawAuth + - withLoginTimeout + - withOAuth + - withPortNumber + - withQueryTransformation + - withQuotationMark + - withRole + - withSchema + - withServerName + - withShardsNumber + - withSnowPipe + - withSnowflakeServices + - withStagingBucketName + - withStorageIntegrationName + - withTableSchema + - withUrl + - withUserDataMapper + - withUsernamePasswordAuth + - withWarehouse + - withWriteDisposition + - write +SnowflakeInteger: + methods: + - of +SnowflakeNumber: + methods: + - getPrecision + - getScale + - of + - setPrecision + - setScale + - sql +SnowflakeNumeric: + methods: + - of +SnowflakeObject: + methods: + - of + - sql +SnowflakeReal: + methods: + - of +SnowflakeServicesImpl: + methods: + - getBatchService + - getStreamingService +SnowflakeStreamingServiceConfig: + methods: + - getFilesList + - getIngestManager + - getStagingBucketDir +SnowflakeStreamingServiceImpl: + methods: + - read + - write +SnowflakeString: + methods: + - of +SnowflakeTableSchema: + methods: + - getColumns + - of + - setColumns + - sql +SnowflakeText: + methods: + - of +SnowflakeTime: + methods: + - of + - sql +SnowflakeTimestamp: + methods: + - of +SnowflakeTimestampLTZ: + methods: + - of + - sql +SnowflakeTimestampNTZ: + methods: + - of + - sql +SnowflakeTimestampTZ: + methods: + - of + - sql +SnowflakeTransformRegistrar: + methods: + - knownBuilderInstances + properties: + - READ_URN + - WRITE_URN +SnowflakeVarBinary: + methods: + - of +SnowflakeVarchar: + methods: + - getLength + - of + - setLength + - sql + properties: + - MAX_LENGTH +SnowflakeVariant: + methods: + - of + - sql +SnsCoderProviderRegistrar: + methods: + - getCoderProviders +SnsIO: + methods: + - create + - expand + - processElement + - setup + - tearDown + - test + - withAWSClientsProvider + - withClientConfiguration + - withCoder + - withFullPublishResponse + - withFullPublishResponseWithoutHeaders + - withFullPublishResult + - withFullPublishResultWithoutHeaders + - withPublishRequestBuilder + - withPublishRequestFn + - withResultOutputTag + - withRetryConfiguration + - withSnsClientProvider + - withTopicArn + - withTopicName + - write + - writeAsync +SocketAddressFactory: + methods: + - createFrom +SolrIO: + methods: + - baseUrl + - closeClient + - coreName + - coreUrl + - create + - expand + - finishBundle + - from + - populateDisplayData + - process + - processElement + - read + - readAll + - setup + - startBundle + - test + - to + - withBasicCredentials + - withConnectionConfiguration + - withMaxBatchSize + - withQuery + - withReplicaInfo + - withRetryConfiguration + - write +SortValues: + methods: + - create + - expand + - hasNext + - iterator + - next + - processElement + - remove +SortedMapCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - getEncodedTypeDescriptor + - getKeyCoder + - getValueCoder + - of + - registerByteSizeObserver + - structuralValue + - verifyDeterministic +Source: + methods: + - advance + - close + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getDefaultOutputCoder + - getOutputCoder + - populateDisplayData + - start + - validate +SourceMetrics: + methods: + - backlogBytes + - backlogBytesOfSplit + - backlogElements + - backlogElementsOfSplit + - bytesRead + - bytesReadBySplit + - elementsRead + - elementsReadBySplit +SourceRecordJson: + methods: + - mapSourceRecord + - toJson +SourceTestUtils: + methods: + - advance + - assertSourcesEqualReferenceSource + - assertSplitAtFractionBehavior + - assertSplitAtFractionExhaustive + - assertSplitAtFractionFails + - assertSplitAtFractionSucceedsAndConsistent + - assertUnstartedReaderReadsSameAsItsSource + - close + - createReader + - createStructuralValues + - equals + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getEstimatedSizeBytes + - getFractionConsumed + - getOutputCoder + - getSplitPointsConsumed + - getSplitPointsRemaining + - hashCode + - populateDisplayData + - readFromSource + - readFromSplitsOfSource + - readFromStartedReader + - readFromUnstartedReader + - readNItemsFromStartedReader + - readNItemsFromUnstartedReader + - readRemainingFromReader + - split + - splitAtFraction + - start + - toString + - toUnsplittableSource + - validate + properties: + - numPrimaryItems + - numResidualItems +SpannerAccessor: + methods: + - close + - getBatchClient + - getDatabaseAdminClient + - getDatabaseClient + - getOrCreate +SpannerConfig: + methods: + - build + - create + - getCommitDeadline + - getCommitRetrySettings + - getDatabaseId + - getDatabaseRole + - getEmulatorHost + - getExecuteStreamingSqlRetrySettings + - getHost + - getInstanceId + - getIsLocalChannelProvider + - getMaxCumulativeBackoff + - getProjectId + - getRetryableCodes + - getRpcPriority + - populateDisplayData + - validate + - withCommitDeadline + - withCommitRetrySettings + - withDatabaseId + - withDatabaseRole + - withEmulatorHost + - withExecuteStreamingSqlRetrySettings + - withHost + - withInstanceId + - withIsLocalChannelProvider + - withMaxCumulativeBackoff + - withProjectId + - withRetryableCodes + - withRpcPriority +SpannerIO: + methods: + - build + - compareTo + - createTransaction + - expand + - finishBundle + - grouped + - load + - of + - output + - outputWithTimestamp + - populateDisplayData + - processElement + - read + - readAll + - readChangeStream + - setSpannerConfig + - setTimestampBound + - setup + - teardown + - withBatchSizeBytes + - withBatching + - withChangeStreamName + - withColumns + - withCommitDeadline + - withDatabaseId + - withDialectView + - withEmulatorHost + - withFailureMode + - withGroupingFactor + - withHighPriority + - withHost + - withInclusiveEndAt + - withInclusiveStartAt + - withIndex + - withInstanceId + - withKeySet + - withLowPriority + - withMaxCumulativeBackoff + - withMaxNumMutations + - withMaxNumRows + - withMetadataDatabase + - withMetadataInstance + - withMetadataTable + - withPartitionOptions + - withProjectId + - withQuery + - withQueryName + - withReadOperation + - withRpcPriority + - withSchemaReadySignal + - withSpannerConfig + - withTable + - withTimestamp + - withTimestampBound + - withTraceSampleProbability + - withTransaction + - write +SpannerTransformRegistrar: + methods: + - buildExternal + - getReadOperation + - knownBuilderInstances + - setBatching + - setCommitDeadline + - setDatabaseId + - setEmulatorHost + - setGroupingFactor + - setHost + - setInstanceId + - setMaxBatchSizeBytes + - setMaxCumulativeBackoff + - setMaxNumberMutations + - setMaxNumberRows + - setProjectId + - setReadTimestamp + - setSchema + - setSql + - setStaleness + - setTable + - setTimeUnit + - setTimestampBoundMode + properties: + - DELETE_URN + - INSERT_OR_UPDATE_URN + - INSERT_URN + - READ_URN + - REPLACE_URN + - UPDATE_URN +SpannerWriteResult: + methods: + - expand + - finishSpecifyingOutput + - getFailedMutations + - getOutput + - getPipeline +SpannerWriteSchemaTransformProvider: + methods: + - build + - buildTransform + - builder + - expand + - getDatabaseId + - getInstanceId + - getTableId + - identifier + - inputCollectionNames + - outputCollectionNames + - setDatabaseId + - setInstanceId + - setTableId +SparkReceiverIO: + methods: + - expand + - read + - validateTransform + - withGetOffsetFn + - withSparkReceiverBuilder + - withTimestampFn +Split: + methods: + - named + - negative + - of + - output + - positive + - using +SplitResult: + methods: + - getPrimary + - getResidual + - of +SplunkEvent: + methods: + - create + - event + - host + - index + - newBuilder + - source + - sourceType + - time + - withEvent + - withHost + - withIndex + - withSource + - withSourceType + - withTime +SplunkIO: + methods: + - expand + - processElement + - setup + - withBatchCount + - withDisableCertificateValidation + - withEnableBatchLogs + - withEnableGzipHttpCompression + - withParallelism + - withRootCaCertificatePath + - write +SplunkWriteError: + methods: + - create + - newBuilder + - payload + - statusCode + - statusMessage + - withPayload + - withStatusCode + - withStatusMessage +SqlAnalyzer: {} +SqlBoundedSideInputJoin: + methods: + - calciteSqlBoundedSideInputJoin + - expand + - needsSideInput + - zetaSqlBoundedSideInputJoin +SqlCheckConstraint: + methods: + - getOperandList + - getOperator + - unparse +SqlColumnDeclaration: + methods: + - getOperandList + - getOperator + - unparse +SqlConversionException: {} +SqlCreateExternalTable: + methods: + - execute + - getOperandList + - unparse +SqlCreateFunction: + methods: + - execute + - getOperandList + - getOperator + - unparse +SqlDdlNodes: + methods: + - column + - dropTable +SqlDropTable: {} +SqlOperators: + methods: + - createStringAggOperator + - createZetaSqlFunction + - getSyntax + properties: + - ARRAY_AGG_FN + - BIT_XOR + - CAST_OP + - CHAR_LENGTH + - CONCAT + - COUNTIF + - DATE_OP + - ENDS_WITH + - LIKE + - LTRIM + - REPLACE + - REVERSE + - RTRIM + - START_WITHS + - SUBSTR + - TIMESTAMP_OP + - TRIM + - VALIDATE_TIMESTAMP + - VALIDATE_TIME_INTERVAL + - ZETASQL_TIMESTAMP_ADD +SqlQuery0: + methods: + - calciteSqlQuery0 + - expand + - processElement + - zetaSqlQuery0 +SqlQuery1: + methods: + - apply + - expand +SqlQuery2: + methods: + - calciteSqlQuery2 + - expand + - zetaSqlQuery2 +SqlQuery3: + methods: + - calciteSqlQuery3 + - expand + - zetaSqlQuery3 +SqlQuery5: + methods: + - expand +SqlQuery7: + methods: + - expand +SqlSetOptionBeam: + methods: + - execute +SqlTransform: + methods: + - expand + - query + - registerUdaf + - registerUdf + - withAutoLoading + - withDdlString + - withDefaultTableProvider + - withErrorsTransformer + - withNamedParameters + - withPositionalParameters + - withQueryPlannerClass + - withTableProvider +SqlTransformRunner: + methods: + - getIdentifiers + - runUsingSqlTransform + - visit +SqlTypes: + properties: + - DATE + - DATETIME + - TIME + - TIMESTAMP +SqsIO: + methods: + - expand + - processElement + - read + - setup + - teardown + - withClientConfiguration + - withCoder + - withMaxNumRecords + - withMaxReadTime + - withQueueUrl + - withSqsClientProvider + - write +SqsMessage: + methods: + - create + - getBody + - getMessageId + - getReceiptHandle + - getRequestTimeStamp + - getTimeStamp +StateBackedIterable: + methods: + - encode + - fromComponents + - getCoderTranslators + - getCoderURNs + - getComponents + - iterator +StateContexts: + methods: + - getPipelineOptions + - nullContext + - sideInput + - window + - windowOnlyContext +StateFetchingIterators: + methods: + - append + - clearAndAppend + - createIterator + - fromValues + - getBlocks + - getContinuationToken + - getWeight + - hasNext + - isReady + - loadPrefetchedResponse + - mutatedBlock + - next + - prefetch + - readAllAndDecodeStartingFrom + - remove + - seekToContinuationToken + - shrink +StateKeySpec: + methods: + - fields +StateSpecs: + methods: + - bag + - bind + - combining + - combiningFromInputInternal + - convertToBagSpecInternal + - convertToMapSpecInternal + - equals + - finishSpecifying + - hashCode + - map + - match + - offerCoders + - orderedList + - rowBag + - rowMap + - rowOrderedList + - rowSet + - rowValue + - set + - value + - watermarkStateInternal +StaticSchemaInference: + methods: + - fieldFromType + - schemaFromClass + - sortBySchema +StorageApiConvertMessages: + methods: + - expand + - onTeardown + - processElement +StorageApiDynamicDestinationsTableRow: + methods: + - getMessageConverter + - getTableSchema + - toMessage + - toTableRow +StorageApiFlushAndFinalizeDoFn: + methods: + - compareTo + - equals + - hashCode + - onTeardown + - process +StorageApiLoads: + methods: + - expand + - expandInconsistent + - expandTriggered + - expandUntriggered + - processElement + - setup +StorageApiWritePayload: + methods: + - getPayload +StorageApiWriteRecordsInconsistent: + methods: + - expand +StorageApiWriteUnshardedRecords: + methods: + - expand + - finishBundle + - output + - outputWithTimestamp + - process + - startBundle + - teardown +StorageApiWritesShardedRecords: + methods: + - expand + - getAllowedTimestampSkew + - onTeardown + - onTimer + - onWindowExpiration + - process + - startBundle + - toString +StreamUtils: + methods: + - getBytesWithoutClosing +StreamingInserts: + methods: + - expand + - withExtendedErrorInfo + - withInsertRetryPolicy +StreamingSourceContextImpl: + methods: + - isPreviewEnabled + - registerLineage +StreamingWriteTables: + methods: + - apply + - expand +StringAgg: + methods: + - addInput + - createAccumulator + - extractOutput + - mergeAccumulators +StringDelegateCoder: + methods: + - decode + - encode + - equals + - getEncodedTypeDescriptor + - hashCode + - of + - structuralValue + - toString + - verifyDeterministic +StringFunctions: + methods: + - charLength + - concat + - endsWith + - like + - ltrim + - replace + - reverse + - rtrim + - startsWith + - substr + - trim + properties: + - SUBSTR_PARAMETER_EXCEED_INTEGER +StringUtf8Coder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedElementByteSize + - getEncodedTypeDescriptor + - of + - verifyDeterministic +StringUtils: + methods: + - byteArrayToJsonString + - getLevenshteinDistance + - jsonStringToByteArray +StringsGenerator: + methods: + - nextExactString + - nextExtra + - nextString +StructuralByteArray: + methods: + - equals + - getValue + - hashCode + - toString +StructuredCoder: + methods: + - equals + - getComponents + - hashCode + - toString +SubscribeTransform: + methods: + - expand + - getReplacementTransform + - mapOutputs + properties: + - V1_READ_OVERRIDE +SubscriberOptions: + methods: + - build + - newBuilder + - setSubscriptionPath + - subscriptionPath + - toBuilder +SubscriptionPartition: {} +SubscriptionPartitionCoder: + methods: + - decode + - encode + - getCoderProvider +SuccessOrFailure: + methods: + - assertionError + - equals + - failure + - hashCode + - isSuccess + - success + - toString +Sum: + methods: + - apply + - doublesGlobally + - doublesPerKey + - equals + - hashCode + - identity + - integersGlobally + - integersPerKey + - longsGlobally + - longsPerKey + - ofDoubles + - ofIntegers + - ofLongs +SumByKey: + methods: + - accumulationMode + - expand + - getValueExtractor + - keyBy + - named + - of + - output + - triggeredBy + - valueBy + - windowBy + - withAllowedLateness + - withOnTimeBehavior + - withTimestampCombiner +SummaryGenerator: + methods: + - generateTable +Sums: + methods: + - apply + - identity + - ofDoubles + - ofFloats + - ofInts + - ofLongs + - valueDesc +SynchronizedStreamObserver: + methods: + - onCompleted + - onError + - onNext + - wrapping +SyntheticBoundedSource: + methods: + - allowsDynamicSplitting + - close + - createReader + - createSourceForSubrange + - getBytesPerOffset + - getCurrent + - getCurrentSource + - getDefaultOutputCoder + - getFractionConsumed + - getMaxEndOffset + - split + - toString + - validate +SyntheticDataPublisher: + methods: + - apply + - main +SyntheticDelay: + methods: + - delay +SyntheticOptions: + methods: + - deserialize + - fromIntegerDistribution + - fromJsonString + - fromRealDistribution + - genKvPair + - getDistribution + - hashFunction + - nextDelay + - sample + - setSeed + - toString + - validate + properties: + - bytesPerRecord + - cpuUtilizationInMixedDelay + - delayType + - hotKeyFraction + - keySizeBytes + - largeKeyFraction + - largeKeySizeBytes + - numHotKeys + - seed + - valueSizeBytes +SyntheticSourceOptions: + methods: + - genRecord + - nextInitializeDelay + - nextProcessingTimeDelay + - validate + properties: + - bundleSizeDistribution + - forceNumInitialBundles + - kv + - numRecords + - progressShape + - sleepMsec + - splitPointFrequencyRecords + - watermarkDriftMillis + - watermarkSearchInAdvanceCount +SyntheticStep: + methods: + - load + - processElement + - startBundle + - validate + properties: + - maxWorkerThroughput + - outputRecordsPerInputRecord + - perBundleDelay + - perBundleDelayType + - preservesInputKeyDistribution + - reportThrottlingMicros +SyntheticUnboundedSource: + methods: + - advance + - close + - createReader + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getOutputCoder + - getWatermark + - split + - start + - toString + - validate +TDigestQuantiles: + methods: + - addInput + - create + - createAccumulator + - decode + - encode + - expand + - extractOutput + - getAccumulatorCoder + - getDefaultOutputCoder + - globally + - isRegisterByteSizeObserverCheap + - mergeAccumulators + - perKey + - populateDisplayData + - withCompression +TFRecordIO: + methods: + - allowsDynamicSplitting + - apply + - createWriteOperation + - createWriter + - expand + - flush + - from + - getCurrent + - getOutputCoder + - matches + - open + - populateDisplayData + - read + - readFiles + - recordLength + - sink + - to + - toResource + - withCompression + - withCompressionType + - withNoSpilling + - withNumShards + - withShardNameTemplate + - withSuffix + - withoutSharding + - withoutValidation + - write + properties: + - DEFAULT_BYTE_ARRAY_CODER +TVFSlidingWindowFn: + methods: + - assignWindows + - getDefaultWindowMappingFn + - getPeriod + - getSize + - isCompatible + - of + - windowCoder +TVFStreamingUtils: + properties: + - FIXED_WINDOW_TVF + - SESSION_WINDOW_TVF + - SLIDING_WINDOW_TVF + - WINDOW_END + - WINDOW_START +Table: + methods: + - build + - builder + - comment + - getComment + - getLocation + - getName + - getProperties + - getSchema + - getType + - location + - name + - properties + - schema + - toBuilder + - type +TableAndRecord: + methods: + - getRecord + - getTable +TableDestination: + methods: + - equals + - getClustering + - getJsonClustering + - getJsonTimePartitioning + - getTableDescription + - getTableReference + - getTableSpec + - getTableUrn + - getTimePartitioning + - hashCode + - toString + - withTableReference +TableDestinationCoder: + methods: + - decode + - encode + - of + - verifyDeterministic +TableDestinationCoderV2: + methods: + - decode + - encode + - of + - verifyDeterministic +TableDestinationCoderV3: + methods: + - decode + - encode + - of + - verifyDeterministic +TableName: + methods: + - create + - getPath + - getPrefix + - getTableName + - isCompound + - isSimple + - removePrefix +TableNameExtractionUtils: + methods: + - extractTableNamesFromNode +TableResolution: + methods: + - registerTables + - resolveCalciteTable +TableRowJsonCoder: + methods: + - decode + - encode + - getEncodedElementByteSize + - getEncodedTypeDescriptor + - of + - verifyDeterministic +TableRowToStorageApiProto: + methods: + - getDescriptorFromTableSchema + - getFullName + - getName + - getSchemaForField + - getType + - jsonValueFromMessageValue + - messageFromMap + - messageFromTableRow + - modeToProtoMode + - protoModeToJsonMode + - protoSchemaToTableSchema + - protoTableFieldToTableField + - protoTypeToJsonType + - schemaToProtoTableSchema + - tableFieldToProtoTableField + - tableRowFromMessage + - typeToProtoType +TableSchema: + methods: + - array + - arrayElementType + - build + - builder + - columnType + - columns + - defaultType + - defaultValue + - enum16 + - enum8 + - enumValues + - fixedString + - fixedStringSize + - getEquivalentFieldType + - getEquivalentSchema + - materializedOrAlias + - name + - nullable + - of + - parse + - parseDefaultExpression + - typeName + - withNullable + properties: + - DATE + - DATETIME + - FLOAT32 + - FLOAT64 + - INT16 + - INT32 + - INT64 + - INT8 + - STRING + - UINT16 + - UINT32 + - UINT64 + - UINT8 +TableSchemaCache: + methods: + - getSchema + - isSatisfied + - putSchemaIfAbsent + - refreshSchema + - refreshThread +TableSchemaJSONLoader: + methods: + - getAllTableNames + - parseTableSchema +TaggedPValue: + methods: + - getTag + - getValue + - of + - ofExpandedValue +TestBigQuery: + methods: + - apply + - assertThatAllRows + - create + - evaluate + - eventually + - getFlatJsonRows + - insertRows + - now + - tableReference + - tableSpec +TestPipeline: + methods: + - apply + - create + - enableAbandonedNodeEnforcement + - enableAutoRunIfMissing + - evaluate + - fromOptions + - getOptions + - isEmpty + - leaveCompositeTransform + - newProvider + - run + - runWithAdditionalOptionArgs + - testingPipelineOptions + - toString + - verifyPAssertsSucceeded + - visitPrimitiveTransform + properties: + - PROPERTY_BEAM_TEST_PIPELINE_OPTIONS +TestPubsub: + methods: + - apply + - assertSubscriptionEventuallyCreated + - assertThatTopicEventuallyReceives + - checkIfAnySubscriptionExists + - create + - evaluate + - fromOptions + - publish + - subscriptionPath + - topicPath + - waitForNMessages +TestPubsubSignal: + methods: + - apply + - create + - evaluate + - expand + - processElement + - signalStart + - signalSuccessWhen + - waitForStart + - waitForSuccess +TestStream: + methods: + - add + - addElements + - advanceBy + - advanceProcessingTime + - advanceTo + - advanceWatermarkTo + - advanceWatermarkToInfinity + - create + - decode + - encode + - equals + - expand + - fromRawEvents + - getCoderArguments + - getElements + - getEvents + - getProcessingTimeAdvance + - getValueCoder + - getWatermark + - hashCode + - of + - verifyDeterministic +TextIO: + methods: + - apply + - expand + - flush + - from + - matches + - open + - populateDisplayData + - read + - readAll + - readFiles + - sink + - skipIfEmpty + - to + - toResource + - watchForNewFiles + - withCompression + - withCompressionType + - withDelimiter + - withEmptyMatchTreatment + - withFooter + - withFormatFunction + - withHeader + - withHintMatchesManyFiles + - withMatchConfiguration + - withNoSpilling + - withNumShards + - withOutputFilenames + - withShardNameTemplate + - withSuffix + - withTempDirectory + - withWindowedWrites + - withWritableByteChannelFactory + - withoutSharding + - write + - writeCustomType +TextJsonTable: {} +TextMessageMapper: + methods: + - apply +TextRowCountEstimator: + methods: + - build + - builder + - estimateRowCount + - getCompression + - getDelimiters + - getDirectoryTreatment + - getEmptyMatchTreatment + - getFilePattern + - getNumSampledBytesPerFile + - getSamplingStrategy + - setCompression + - setDelimiters + - setDirectoryTreatment + - setEmptyMatchTreatment + - setFilePattern + - setNumSampledBytesPerFile + - setSamplingStrategy + - stopSampling +TextSourceBenchmark: + methods: + - benchmarkHadoopLineReader + - benchmarkTextSource + - createFile + - deleteFile + properties: + - length + - path + - pathString +TextTable: + methods: + - buildIOReader + - buildIOWriter + - getFilePattern + - getTableStatistics + - isBounded +TextTableProvider: + methods: + - buildBeamSqlTable + - create + - deadLetterFile + - expand + - getCsvFormat + - getTableType + - processElement + - schema +TextualIntegerCoder: + methods: + - decode + - encode + - getEncodedTypeDescriptor + - of + - verifyDeterministic +ThriftCoder: + methods: + - decode + - encode + - of +ThriftIO: + methods: + - build + - close + - expand + - flush + - open + - populateDisplayData + - processElement + - readFiles + - sink + - withProtocol + - write +ThriftPayloadSerializerProvider: + methods: + - getSerializer + - identifier +ThriftSchema: + methods: + - custom + - fieldValueGetters + - fieldValueTypeInformations + - get + - name + - provider + - schemaFor + - schemaTypeCreator + - toString + - typedef +TikaIO: + methods: + - expand + - filepattern + - parse + - parseFiles + - populateDisplayData + - processElement + - setup + - withContentTypeHint + - withInputMetadata + - withTikaConfigPath +Time: + methods: + - getArgument + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - IDENTIFIER +TimeMonitor: + methods: + - processElement +TimeUtil: + methods: + - toJava + - toJoda +TimerEndpoint: + methods: + - create + - getCoder + - getReceiver + - getTimerFamilyId + - getTransformId +TimerSpecs: + methods: + - getTimeDomain + - timer + - timerMap +TimestampEncoding: {} +TimestampExtractTransform: + methods: + - expand + - of + - processElement +TimestampFunctions: + methods: + - timestamp +TimestampPolicy: + methods: + - getBacklogCheckTime + - getMessageBacklog + - getTimestampForRecord + - getWatermark +TimestampPrefixingWindowCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - getWindowCoder + - isRegisterByteSizeObserverCheap + - of + - registerByteSizeObserver + - verifyDeterministic +TimestampRange: + methods: + - equals + - getFrom + - getTo + - hashCode + - of + - toString +TimestampRangeTracker: + methods: + - checkDone + - currentRestriction + - getProgress + - isBounded + - setTimeSupplier + - tryClaim + - trySplit +TimestampTransform: + methods: + - alignTo + - delay + - getDelay + - getOffset + - getPeriod +TimestampUtils: + methods: + - next + - previous + - toNanos + - toTimestamp +TimestampedValue: + methods: + - atMinimumTimestamp + - decode + - encode + - equals + - getCoderArguments + - getComponents + - getEncodedTypeDescriptor + - getTimestamp + - getValue + - getValueCoder + - hashCode + - of + - structuralValue + - toString + - verifyDeterministic +ToJson: + methods: + - expand + - of + - processElement +ToString: + methods: + - apply + - elements + - expand + - iterables + - kvs +ToStringFnRunner: + methods: + - getPTransformRunnerFactories +Top: + methods: + - addInput + - compare + - createAccumulator + - decode + - encode + - equals + - extractOutput + - getAccumulatorCoder + - getIncompatibleGlobalWindowErrorMessage + - getNameOverride + - hashCode + - isRegisterByteSizeObserverCheap + - largest + - largestDoublesFn + - largestFn + - largestIntsFn + - largestLongsFn + - largestPerKey + - mergeAccumulator + - of + - perKey + - populateDisplayData + - registerByteSizeObserver + - smallest + - smallestDoublesFn + - smallestFn + - smallestIntsFn + - smallestLongsFn + - smallestPerKey + - verifyDeterministic +TopPerKey: + methods: + - accumulationMode + - expand + - getScoreExtractor + - getScoreType + - getValueExtractor + - getValueType + - keyBy + - named + - of + - output + - scoreBy + - triggeredBy + - valueBy + - windowBy + - withAllowedLateness + - withOnTimeBehavior + - withTimestampCombiner +TopicPartitionCoder: + methods: + - decode + - encode + - getCoderArguments + - verifyDeterministic +TpcdsOptionsRegistrar: + methods: + - getPipelineOptions +TpcdsParametersReader: + methods: + - getAndCheckDataSize + - getAndCheckQueryNames + - getAndCheckTpcParallel + properties: + - ALL_QUERY_NAMES +TpcdsRun: + methods: + - call +TpcdsRunResult: + methods: + - getDataSize + - getDialect + - getElapsedTime + - getEndDate + - getIsSuccessful + - getJobName + - getPipelineOptions + - getPipelineResult + - getQueryName + - getStartDate +TpcdsSchemas: + methods: + - getCallCenterSchema + - getCatalogPageSchema + - getCatalogReturnsSchema + - getCatalogSalesSchema + - getCustomerAddressSchema + - getCustomerDemographicsSchema + - getCustomerSchema + - getDateDimSchema + - getHouseholdDemographicsSchema + - getIncomeBandSchema + - getInventorySchema + - getItemSchema + - getPromotionSchema + - getReasonSchema + - getShipModeSchema + - getStoreReturnsSchema + - getStoreSalesSchema + - getStoreSchema + - getTimeDimSchema + - getTpcdsSchemas + - getTpcdsSchemasImmutableMap + - getWarehouseSchema + - getWebReturnsSchema + - getWebSalesSchema + - getWebSiteSchema + - getWebpageSchema +TpcdsUtils: {} +TrackerWithProgress: {} +Transaction: + methods: + - create + - transactionId +TransformHierarchy: + methods: + - addComposite + - finishSpecifyingInput + - getCurrent + - getEnclosingNode + - getFullName + - getInputs + - getOutputs + - getTransform + - isCompositeNode + - isRootNode + - popNode + - pushNode + - replaceChild + - replaceNode + - replaceOutputs + - setOutput + - toAppliedPTransform + - toString + - visit +Transport: + methods: + - getJsonFactory + - getTransport + - newStorageClient + properties: + - rootUrl + - servicePath +Trigger: + methods: + - equals + - getContinuationTrigger + - getWatermarkThatGuaranteesFiring + - hashCode + - isCompatible + - mayFinish + - orFinally + - subTriggers + - toString +Triple: + methods: + - equals + - getFirst + - getSecond + - getThird + - hashCode + - of + - toString +TupleTag: + methods: + - equals + - getId + - getOutName + - getTypeDescriptor + - hashCode + - toString +TupleTagList: + methods: + - and + - empty + - get + - getAll + - of + - size + - toString +TypeAware: {} +TypeAwareness: + methods: + - orObjects +TypeCode: + methods: + - equals + - getCode + - hashCode + - toString +TypeDescriptor: + methods: + - equals + - getArgumentTypes + - getClasses + - getComponentType + - getInterfaces + - getRawType + - getSupertype + - getType + - getTypeParameter + - getTypes + - hasUnresolvedParameters + - hashCode + - isArray + - isSubtypeOf + - isSupertypeOf + - of + - resolveType + - toString + - where +TypeDescriptors: + methods: + - bigdecimals + - bigintegers + - booleans + - bytes + - characters + - doubles + - extractFromTypeParameters + - floats + - inputOf + - integers + - iterables + - kvs + - lists + - longs + - maps + - nulls + - outputOf + - rows + - sets + - shorts + - strings + - voids +TypeParameter: + methods: + - equals + - hashCode + - toString +TypeUtils: + methods: + - keyValues + - triplets +TypedCombineFnDelegate: + methods: + - addInput + - apply + - compact + - createAccumulator + - defaultValue + - extractOutput + - getAccumTVariable + - getAccumulatorCoder + - getDefaultOutputCoder + - getIncompatibleGlobalWindowErrorMessage + - getInputTVariable + - getInputType + - getOutputTVariable + - getOutputType + - mergeAccumulators + - populateDisplayData +TypedSchemaTransformProvider: + methods: + - configurationSchema + - dependencies + - from +UdafImpl: + methods: + - getCombineFn + - getImplementor + - getName + - getOrdinal + - getParameters + - getReturnType + - getType + - isOptional +UdfImplReflectiveFunctionBase: + methods: + - add + - addMethodParameters + - build + - builder + - getName + - getOrdinal + - getParameters + - getType + - isOptional + properties: + - method + - parameters +UdfTestProvider: + methods: + - addInput + - createAccumulator + - extractOutput + - helloWorld + - increment + - incrementAll + - isNull + - matches + - mergeAccumulators + - notRegistered + - userDefinedAggregateFunctions + - userDefinedScalarFunctions +UnboundedEventSource: + methods: + - advance + - close + - createReader + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getDefaultOutputCoder + - getSplitBacklogBytes + - getWatermark + - split + - start + - toString + - validate +UnboundedReaderImpl: + methods: + - advance + - close + - getCheckpointMark + - getCurrent + - getCurrentSource + - getCurrentTimestamp + - getSplitBacklogBytes + - getWatermark + - start +UnboundedScheduledExecutorService: + methods: + - awaitTermination + - call + - cancel + - compareTo + - execute + - getDelay + - invokeAll + - invokeAny + - isPeriodic + - isShutdown + - isTerminated + - run + - schedule + - scheduleAtFixedRate + - scheduleWithFixedDelay + - shutdown + - shutdownNow + - submit +UnboundedSource: + methods: + - advance + - createReader + - finalizeCheckpoint + - getCheckpointMark + - getCheckpointMarkCoder + - getCurrentRecordId + - getCurrentSource + - getSplitBacklogBytes + - getTotalBacklogBytes + - getWatermark + - requiresDeduping + - split + - start + properties: + - BACKLOG_UNKNOWN +UnboundedSourceImpl: + methods: + - createReader + - getCheckpointMarkCoder + - getOutputCoder + - split +Union: + methods: + - named + - of + - output +UnionCoder: + methods: + - decode + - encode + - getCoderArguments + - getComponents + - getElementCoders + - isRegisterByteSizeObserverCheap + - of + - registerByteSizeObserver + - verifyDeterministic +UnionTranslator: + methods: + - translate +UnknownLogicalType: + methods: + - getPayload +UnownedInputStream: + methods: + - close + - equals + - hashCode + - mark + - markSupported + - reset + - toString +UnownedOutputStream: + methods: + - close + - equals + - hashCode + - toString + - write +UnsignedOptions: + methods: + - build + - builder + - setUint16Behavior + - setUint32Behavior + - setUint64Behavior + - setUint8Behavior + - toBuilder + - uint16Behavior + - uint32Behavior + - uint64Behavior + - uint8Behavior + - usingHigherBitSize + - usingSameBitSize +UpdateConfiguration: + methods: + - create + - withFindKey + - withIsUpsert + - withUpdateFields + - withUpdateKey +UpdateField: + methods: + - fieldUpdate + - fullUpdate +UpdateSchemaDestination: + methods: + - finishBundle + - onTeardown + - processElement + - startBundle +UploadIdResponseInterceptor: + methods: + - interceptResponse +UserCodeException: + methods: + - wrap + - wrapIf +UserFunctionDefinitions: + methods: + - build + - create + - jarPath + - javaAggregateFunctions + - javaScalarFunctions + - method + - newBuilder + - setJavaAggregateFunctions + - setJavaScalarFunctions + - setSqlScalarFunctions + - setSqlTableValuedFunctions + - sqlScalarFunctions + - sqlTableValuedFunctions +UsesAttemptedMetrics: {} +UsesCounterMetrics: {} +UsesDistributionMetrics: {} +UsesGaugeMetrics: {} +UsesImpulse: {} +UsesOrderedListState: {} +Uuid: + methods: + - of + - random + - value + properties: + - DEFAULT_ATTRIBUTE +UuidCoder: + methods: + - decode + - encode + - getCoderProvider +UuidDeduplicationOptions: + methods: + - build + - deduplicate + - newBuilder + - setDeduplicate + - setUuidExtractor + - uuidExtractor + properties: + - DEFAULT_DEDUPLICATE_DURATION + - DEFAULT_TIME_DOMAIN + - DEFAULT_UUID_EXTRACTOR +UuidDeduplicationTransform: + methods: + - expand +UuidLogicalType: + methods: + - getArgument + - getArgumentType + - getBaseType + - getIdentifier + - toBaseType + - toInputType + properties: + - IDENTIFIER + - LEAST_SIGNIFICANT_BITS_FIELD_NAME + - MOST_SIGNIFICANT_BITS_FIELD_NAME + - UUID_SCHEMA +ValueInSingleWindow: + methods: + - decode + - encode + - getCoderArguments + - getComponents + - getPane + - getTimestamp + - getValue + - getWindow + - of + - verifyDeterministic +ValueProviders: + methods: + - updateSerializedOptions +ValueWithRecordId: + methods: + - decode + - encode + - equals + - getCoderArguments + - getId + - getValue + - getValueCoder + - hashCode + - of + - processElement + - toString + - verifyDeterministic +Values: + methods: + - apply + - create + - expand +VarInt: + methods: + - decodeInt + - decodeLong + - encode + - getLength +VarIntCoder: + methods: + - consistentWithEquals + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +VarLongCoder: + methods: + - consistentWithEquals + - decode + - encode + - getCoderArguments + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - verifyDeterministic +VariableBytes: + methods: + - getMaxLength + - getName + - of + - toInputType + - toString + properties: + - IDENTIFIER +VariableString: + methods: + - getMaxLength + - getName + - of + - toInputType + - toString + properties: + - IDENTIFIER +VarianceFn: + methods: + - addInput + - createAccumulator + - extractOutput + - getAccumulatorCoder + - mergeAccumulators + - newPopulation + - newSample +VideoIntelligence: + methods: + - annotateFromBytes + - annotateFromBytesWithContext + - annotateFromURI + - annotateFromUriWithContext + - expand +View: + methods: + - apply + - asIterable + - asList + - asMap + - asMultimap + - asSingleton + - defaultValue + - expand + - finishBundle + - getView + - hasDefaultValue + - identity + - of + - processElement + - withDefaultValue + - withSingletonValues +ViewFn: + methods: + - apply + - getMaterialization + - getTypeDescriptor +VoidAccumulatorProvider: + methods: + - add + - create + - get + - getCounter + - getFactory + - getHistogram + - getTimer + - increment +VoidCoder: + methods: + - decode + - encode + - getEncodedTypeDescriptor + - isRegisterByteSizeObserverCheap + - of + - structuralValue + - verifyDeterministic +Wait: + methods: + - expand + - finishBundle + - on + - process + - startBundle +Watch: + methods: + - afterIterations + - afterTimeSinceNewOutput + - afterTotalOf + - allOf + - canStopPolling + - checkDone + - complete + - currentRestriction + - decode + - eitherOf + - encode + - equals + - expand + - forNewInput + - getCoderArguments + - getCompleted + - getInitialRestriction + - getInitialWatermarkEstimatorState + - getPending + - getPollWatermark + - getRestrictionCoder + - getStateCoder + - getTerminationState + - growthOf + - hashCode + - ignoreInput + - incomplete + - isBounded + - never + - newTracker + - newWatermarkEstimator + - of + - onPollComplete + - onSeenNewOutput + - process + - processElement + - toString + - tryClaim + - trySplit + - verifyDeterministic + - withOutputCoder + - withOutputKeyCoder + - withOutputs + - withPollInterval + - withTerminationPerInput + - withWatermark +WatermarkEstimators: + methods: + - currentWatermark + - getState + - getWatermarkAndState + - observeTimestamp + - setWatermark + - threadSafe +WatermarkLatency: + methods: + - main + - process +WatermarkParameters: + methods: + - builder + - create + - toBuilder + - withTimestampFn + - withWatermarkIdleDurationThreshold +WebPathParser: + methods: + - parseDicomWebpath + properties: + - dataset + - dicomStorePath + - instanceId + - location + - project + - seriesId + - storeId + - studyId +WeightedValue: + methods: + - equals + - getValue + - getWeight + - hashCode + - of + - toString +Window: + methods: + - accumulatingFiredPanes + - apply + - configure + - discardingFiredPanes + - expand + - getOutputStrategyInternal + - getWindowFn + - into + - populateDisplayData + - remerge + - triggering + - withAllowedLateness + - withOnTimeBehavior + - withTimestampCombiner +WindowFn: + methods: + - assignWindows + - assignsToOneWindow + - element + - getDefaultWindowMappingFn + - getWindowTypeDescriptor + - isCompatible + - isNonMerging + - merge + - mergeWindows + - populateDisplayData + - timestamp + - verifyCompatibility + - window + - windowCoder + - windows +WindowFnTestUtils: + methods: + - assignedWindows + - assignedWindowsWithValue + - element + - get + - merge + - put + - runWindowFn + - runWindowFnWithValue + - set + - timestamp + - validateGetOutputTimestamps + - validateGetOutputTimestampsWithValue + - window + - windows +WindowMappingFn: + methods: + - getSideInputWindow + - maximumLookback +WindowMappingFnRunner: + methods: + - getPTransformRunnerFactories +WindowMergingFnRunner: + methods: + - getPTransformRunnerFactories + - merge + - windows +WindowTracing: + methods: + - debug + - trace +WindowedValue: + methods: + - decode + - encode + - equals + - explodeWindows + - fromComponents + - getCoderArguments + - getComponents + - getFullCoder + - getPane + - getParamWindowedValueCoder + - getPayload + - getTimestamp + - getValue + - getValueCoder + - getValueOnlyCoder + - getWindow + - getWindowCoder + - getWindows + - getWindowsCoder + - hashCode + - isSingleWindowedValue + - of + - registerByteSizeObserver + - timestampedValueInGlobalWindow + - toString + - valueInGlobalWindow + - verifyDeterministic + - withValue + - withValueCoder +WindowingStrategy: + methods: + - equals + - fixDefaults + - getAllowedLateness + - getClosingBehavior + - getEnvironmentId + - getMode + - getOnTimeBehavior + - getTimestampCombiner + - getTrigger + - getWindowFn + - globalDefault + - hashCode + - isAllowedLatenessSpecified + - isAlreadyMerged + - isModeSpecified + - isTimestampCombinerSpecified + - isTriggerSpecified + - needsMerge + - of + - toString + - withAllowedLateness + - withAlreadyMerged + - withClosingBehavior + - withEnvironmentId + - withMode + - withOnTimeBehavior + - withTimestampCombiner + - withTrigger + - withWindowFn +WinningBids: + methods: + - assignWindows + - decode + - encode + - equals + - expand + - forAuction + - forBid + - getDefaultWindowMappingFn + - hashCode + - isAuctionWindow + - isCompatible + - mergeWindows + - of + - processElement + - structuralValue + - toString + - verifyDeterministic + - windowCoder + properties: + - auction + - isAuctionWindow +WinningBidsSimulator: {} +WithFailures: + methods: + - apply + - element + - exception + - expand + - failures + - failuresTo + - finishSpecifyingOutput + - getPipeline + - of + - output +WithKeys: + methods: + - apply + - expand + - of + - process + - withKeyType +WithTimestamps: + methods: + - expand + - getAllowedTimestampSkew + - of + - processElement + - withAllowedTimestampSkew +WordCount: + methods: + - apply + - expand + - main + - processElement +WrappedSupervisor: + methods: + - createBlockGenerator + - getCurrentRateLimit + - isReceiverStopped + - logInfo + - onReceiverStart + - pushArrayBuffer + - pushBytes + - pushIterator + - pushSingle + - reportError +WritableCoder: + methods: + - coderFor + - decode + - encode + - equals + - getCoderArguments + - getCoderProvider + - getCoderProviders + - hashCode + - of + - verifyDeterministic +WriteBuilder: + methods: + - buildExternal + - getCreateDisposition + - getTableSchema + - getWriteDisposition + - setCreateDisposition + - setTableSchema + - setWriteDisposition +WriteFiles: + methods: + - apply + - assignShardKey + - equals + - expand + - finishBundle + - getAdditionalInputs + - getComputeNumShards + - getNumShardsProvider + - getShardingFunction + - getSink + - getWindowedWrites + - hashCode + - populateDisplayData + - process + - processElement + - startBundle + - to + - validate + - withMaxNumWritersPerBundle + - withNoSpilling + - withNumShards + - withRunnerDeterminedSharding + - withSharding + - withShardingFunction + - withSideInputs + - withSkipIfEmpty + - withWindowedWrites + properties: + - CONCRETE_CLASS +WriteFilesResult: + methods: + - expand + - finishSpecifyingOutput + - getPerDestinationOutputFilenames + - getPipeline +WriteJmsResult: + methods: + - expand + - finishSpecifyingOutput + - getFailedMessages + - getPipeline +WriteResult: + methods: + - expand + - finishSpecifyingOutput + - getFailedInserts + - getFailedInsertsWithErr + - getFailedStorageApiInserts + - getPipeline + - getSuccessfulInserts + - getSuccessfulTableLoads +WriteToPulsarDoFn: + methods: + - processElement + - setup + - teardown +XmlIO: + methods: + - apply + - expand + - flush + - from + - matches + - open + - populateDisplayData + - read + - readFiles + - sink + - to + - withCharset + - withCompression + - withCompressionType + - withMinBundleSize + - withRecordClass + - withRecordElement + - withRootElement + - withValidationEventHandler + - write +XmlSource: + methods: + - getCurrent + - getCurrentSource + - getOutputCoder +ZetaSQLQueryPlanner: + methods: + - convertToBeamRel + - createPlanner + - getDefaultTimezone + - getLanguageOptions + - getZetaSqlRuleSets + - parse + - setDefaultTimezone + properties: + - DEFAULT_CALC + - FACTORY +ZetaSqlBeamTranslationUtils: + methods: + - toBeamObject + - toBeamRow + - toBeamType + - toZetaSqlStructType + - toZetaSqlStructValue + - toZetaSqlType + - toZetaSqlValue +ZetaSqlCalciteTranslationUtils: + methods: + - toCalciteType + - toRexNode + - toZetaSqlType + properties: + - ZETASQL_NUMERIC_MAX_VALUE + - ZETASQL_NUMERIC_MIN_VALUE + - ZETASQL_NUMERIC_SCALE +ZetaSqlException: {} +ZetaSqlScalarFunctionImpl: + methods: + - create + properties: + - functionGroup +ZetaSqlUnnest: + methods: + - copy + - create + - deriveUncollectRowType + - explainTerms + properties: + - withOrdinality +ZetaSqlUserDefinedSQLNativeTableValuedFunction: {} +ZipFiles: + methods: + - iterator + - openStream + - toString + - zipDirectory + - zipDirectoryOverwrite +ZstdCoder: + methods: + - consistentWithEquals + - decode + - encode + - equals + - getCoderArguments + - hashCode + - of + - structuralValue + - toString + - verifyDeterministic diff --git a/playground/frontend/playground_components/assets/symbols/python.g.yaml b/playground/frontend/playground_components/assets/symbols/python.g.yaml index e3edda0b3a4d..01d5afbf2d89 100644 --- a/playground/frontend/playground_components/assets/symbols/python.g.yaml +++ b/playground/frontend/playground_components/assets/symbols/python.g.yaml @@ -378,11 +378,6 @@ ArtifactRetrievalService: methods: - GetArtifact - ResolveArtifacts -ArtifactRetrievalServiceServicer: - methods: - - GetArtifact - - ResolveArtifacts -ArtifactRetrievalServiceStub: {} Artifacts: properties: - images @@ -392,10 +387,6 @@ ArtifactStagingService: - register_job - resolved_deps - ReverseArtifactRetrievalService -ArtifactStagingServiceServicer: - methods: - - ReverseArtifactRetrievalService -ArtifactStagingServiceStub: {} AsDict: {} AsIter: methods: @@ -556,6 +547,9 @@ AvroTestCoder: properties: - SCHEMA AvroTestRecord: {} +AzureOptions: + methods: + - validate BackgroundCachingJob: methods: - cancel @@ -570,7 +564,11 @@ BagInStateOutputAfterTimer: properties: - EMIT_TIMER - SET_STATE -BagRuntimeState: {} +BagRuntimeState: + methods: + - add + - clear + - read BagStateSpec: methods: - to_runner_api @@ -643,7 +641,6 @@ BatchToElementDoFn: methods: - process_batch BeamAssertException: {} -BeamConstants: {} BeamDataframeDoctestRunner: methods: - fake_pandas_module @@ -657,56 +654,24 @@ BeamFilesystemHandler: methods: - file_reader - file_writer -BeamFnControl: - methods: - - Control - - GetProcessBundleDescriptor BeamFnControlServicer: methods: - Control -BeamFnControlStub: {} -BeamFnData: - methods: - - Data BeamFnDataServicer: methods: - Data - get_conn_by_worker_id -BeamFnDataStub: {} -BeamFnExternalWorkerPool: - methods: - - StartWorker - - StopWorker BeamFnExternalWorkerPoolServicer: methods: - start - StartWorker - StopWorker -BeamFnExternalWorkerPoolStub: {} -BeamFnLogging: - methods: - - Logging BeamFnLoggingServicer: methods: - Logging -BeamFnLoggingStub: {} -BeamFnState: - methods: - - State -BeamFnStateServicer: - methods: - - State -BeamFnStateStub: {} BeamFnStatusServicer: methods: - WorkerStatus -BeamFnWorkerStatus: - methods: - - WorkerStatus -BeamFnWorkerStatusServicer: - methods: - - WorkerStatus -BeamFnWorkerStatusStub: {} BeamIOError: {} BeamJarExpansionService: {} BeamJob: @@ -2184,14 +2149,19 @@ CombiningTriggerDriver: methods: - process_elements - process_timer -CombiningValueRuntimeState: {} +CombiningValueRuntimeState: + methods: + - add + - clear + - commit + - finalize + - read CombiningValueStateSpec: methods: - to_runner_api Command: methods: - run -CommitManifestResponse: {} ComparableValue: methods: - hydrate @@ -2357,7 +2327,9 @@ ConvertToPubSubMessage: - process CopyRequest: {} CorruptMainSessionException: {} -Count: {} +Count: + methods: + - expand Count1: methods: - expand @@ -2370,11 +2342,18 @@ CountAccumulator: CountAndLog: methods: - expand -CountCombineFn: {} +CountCombineFn: + methods: + - add_input + - add_inputs + - create_accumulator + - extract_output + - merge_accumulators Counter: methods: - - dec - - inc + - error + - get + - increment CounterAggregator: methods: - combine @@ -2468,12 +2447,7 @@ CPUTime: - totalMs Create: methods: - - as_read - - expand - - get_output_type - - get_windowing - - infer_output_type - - to_runner_api_parameter + - apply CreateBitbucketServerConfigOperationMetadata: properties: - bitbucketServerConfig @@ -2492,6 +2466,9 @@ CreateDisposition: properties: - CREATE_IF_NEEDED - CREATE_NEVER +CreateFolderFn: + methods: + - process CreateGitHubEnterpriseConfigOperationMetadata: properties: - completeTime @@ -2553,6 +2530,7 @@ CustomCoder: methods: - decode - encode + - is_deterministic CustomCommands: methods: - finalize_options @@ -2662,6 +2640,7 @@ DataflowPipelineResult: - is_in_terminal_state - job_id - metrics + - monitoring_infos - state - wait_until_finish DataflowProjectsDeleteSnapshotsRequest: @@ -3582,10 +3561,19 @@ Disk: - mountPoint - sizeGb DisplayData: - methods: - - create_from - - create_from_options - - to_proto + properties: + - boolValue + - durationValue + - floatValue + - int64Value + - javaClassValue + - key + - label + - namespace + - shortStrValue + - strValue + - timestampValue + - url DisplayDataItem: methods: - drop_if_default @@ -3701,7 +3689,12 @@ DoFn: - TimestampParam - WatermarkEstimatorParam - WindowParam -DoFnContext: {} +DoFnContext: + methods: + - element + - set_element + - timestamp + - windows DoFnInfo: methods: - create @@ -3791,7 +3784,9 @@ DriverClassName: - MYSQL - ORACLE - POSTGRESQL -DummyClass: {} +DummyClass: + methods: + - func DummyCoder: methods: - decode @@ -3910,20 +3905,24 @@ Entry: - itemCount - predictedLabel Environment: - methods: - - artifacts - - capabilities - - from_options - - from_runner_api - - get_env_cls_from_urn - - register_urn - - register_urn - - register_urn - - register_urn - - register_urn - - resource_hints - - to_runner_api - - to_runner_api_parameter + properties: + - clusterManagerApiService + - dataset + - debugOptions + - experiments + - flexResourceSchedulingGoal + - internalExperiments + - sdkPipelineOptions + - serviceAccountEmail + - serviceKmsKeyName + - serviceOptions + - shuffleMode + - tempStoragePrefix + - userAgent + - version + - workerPools + - workerRegion + - workerZone EOL: properties: - CRLF @@ -4015,14 +4014,9 @@ ExpandStringsProvider: ExpansionAndArtifactRetrievalStub: methods: - artifact_service -ExpansionMethods: {} -ExpansionService: - methods: - - Expand ExpansionServiceServicer: methods: - Expand -ExpansionServiceStub: {} ExpectedSplitOutcome: properties: - MUST_BE_CONSISTENT_IF_SUCCEEDS @@ -4131,6 +4125,7 @@ ExternalTransform: - outer_namespace - replace_named_inputs - replace_named_outputs + - service - to_runner_api_transform - with_output_types ExternalTransformFinder: @@ -4365,6 +4360,7 @@ FileBasedCacheManager: - size - source - write +FileBasedIOTestOptions: {} FileBasedSink: methods: - close @@ -4511,13 +4507,9 @@ FixedWindows: - from_runner_api_parameter - get_window_coder - to_runner_api_parameter -FixedWindowsPayload: {} Flatten: methods: - - expand - - from_runner_api_parameter - - infer_output_type - - to_runner_api_parameter + - apply FlattenAndDouble: methods: - expand @@ -5028,7 +5020,6 @@ GlobalWindows: - windowed_batch - windowed_value - windowed_value_at_end_of_window -GlobalWindowsPayload: {} GoogleCloudOptions: methods: - validate @@ -5059,11 +5050,7 @@ GroupBy: - force_tuple_keys GroupByKey: methods: - - expand - - from_runner_api_parameter - - infer_output_type - - runner_api_requires_keyed_input - - to_runner_api_parameter + - apply GroupingBuffer: methods: - append @@ -5159,8 +5146,9 @@ HdfsUploader: - finish - put Histogram: - methods: - - update + properties: + - bucketCounts + - firstBucketOffset HistogramAggregator: methods: - combine @@ -5714,18 +5702,6 @@ JobServer: - start - stop JobServerOptions: {} -JobService: - methods: - - Cancel - - DescribePipelineOptions - - GetJobMetrics - - GetJobs - - GetMessageStream - - GetPipeline - - GetState - - GetStateStream - - Prepare - - Run JobServiceHandle: methods: - encode_pipeline_options @@ -5734,19 +5710,6 @@ JobServiceHandle: - run - stage - submit -JobServiceServicer: - methods: - - Cancel - - DescribePipelineOptions - - GetJobMetrics - - GetJobs - - GetMessageStream - - GetPipeline - - GetState - - GetStateStream - - Prepare - - Run -JobServiceStub: {} JobStatistics: properties: - completionRatio @@ -5838,6 +5801,7 @@ JrhReadPTransformOverride: - matches JsonCoder: methods: + - decode - encode JsonLogFormatter: methods: @@ -5974,24 +5938,6 @@ LeaseWorkItemResponse: properties: - unifiedWorkerResponse - workItems -LegacyArtifactRetrievalService: - methods: - - GetArtifact - - GetManifest -LegacyArtifactRetrievalServiceServicer: - methods: - - GetArtifact - - GetManifest -LegacyArtifactRetrievalServiceStub: {} -LegacyArtifactStagingService: - methods: - - CommitManifest - - PutArtifact -LegacyArtifactStagingServiceServicer: - methods: - - CommitManifest - - PutArtifact -LegacyArtifactStagingServiceStub: {} LengthPrefixCoder: methods: - as_cloud_object @@ -6032,13 +5978,7 @@ LinearRegressionBenchmarkConfig: - starting_point LineSource: methods: - - default_output_coder - - estimate_size - - get_range_tracker - - read - - split - properties: - - TEST_BUNDLE_SIZE + - read_records ListBatchConverter: methods: - combine_batches @@ -6186,7 +6126,6 @@ LogicalTypeRegistry: - get_logical_type_by_language_type - get_logical_type_by_urn - get_urn_by_logial_type -LogicalTypes: {} ManualWatermarkEstimator: methods: - current_watermark @@ -6361,9 +6300,7 @@ MetricResults: Metrics: methods: - counter - - distribution - - gauge - - get_namespace + - histogram MetricsContainer: methods: - get_counter @@ -6390,12 +6327,13 @@ MetricShortId: properties: - metricIndex - shortId +MetricsPublisher: + methods: + - publish MetricsReader: methods: - publish_metrics - publish_values - properties: - - publishers MetricStructuredName: properties: - context @@ -6540,9 +6478,6 @@ MonitorDoFn: - finish_bundle - process - start_bundle -MonitoringInfo: {} -MonitoringInfoSpecs: {} -MonitoringInfoTypeUrns: {} MonitorSuffix: properties: - ELEMENT_COUNTER @@ -6934,12 +6869,28 @@ OffsetRestrictionTracker: - try_split OldClassThatDoesNotImplementLen: {} Operation: - properties: - - done - - error - - metadata - - name - - response + methods: + - add_receiver + - current_element_progress + - execution_time_monitoring_infos + - finalize_bundle + - finish + - get_batching_preference + - get_input_batch_converter + - get_output_batch_converter + - monitoring_infos + - needs_finalization + - output + - pcollection_count_monitoring_infos + - process + - process_batch + - reset + - setup + - start + - str_internal + - teardown + - try_split + - user_monitoring_infos OperationCounters: methods: - do_sample @@ -7146,18 +7097,7 @@ ParamWindowedValueCoderImpl: - get_estimated_size_and_observables ParDo: methods: - - default_type_hints - - display_data - - expand - - from_runner_api_parameter - - get_restriction_coder - - infer_batch_converters - - infer_output_type - - make_fn - - runner_api_requires_keyed_input - - to_runner_api_parameter - - with_exception_handling - - with_outputs + - apply ParDoInstruction: properties: - input @@ -7444,11 +7384,30 @@ PipelineOptionsValidator: - PROJECT_ID_PATTERN - PROJECT_NUMBER_PATTERN - REQUIRED_ENVIRONMENT_OPTIONS +PipelineRenderer: + methods: + - info + - is_leaf + - layout_dot + - page + - page_callback_data + - pcoll_leaf_consumers + - pcoll_leaf_consumers_iter + - render_data + - render_json + - style + - to_dot + - to_dot_iter + - transform_attributes + - transform_node + - transform_to_dot + - update PipelineResult: methods: - cancel - - get - - read + - metrics + - pipeline_state_to_runner_api_state + - runner_api_state_to_pipeline_state - state - wait_until_finish PipelineRunner: @@ -7513,10 +7472,10 @@ Point: - value Policy: properties: - - auditConfigs - bindings - etag - - version + - kind + - resourceId PoolOption: properties: - name @@ -7568,6 +7527,9 @@ Position: - key - recordIndex - shufflePosition +Postprocess: + methods: + - process PostProcessor: methods: - process @@ -7582,6 +7544,9 @@ PrefixTransform: - expand - from_runner_api_parameter - to_runner_api_parameter +Preprocess: + methods: + - process PrintFn: methods: - process @@ -7759,13 +7724,6 @@ ProtoPlusMessageB: ProtoPlusMessageWithMap: properties: - field1 -ProvisionService: - methods: - - GetProvisionInfo -ProvisionServiceServicer: - methods: - - GetProvisionInfo -ProvisionServiceStub: {} PTransform: methods: - annotations @@ -8314,9 +8272,9 @@ RecommendationAIIT: - test_predict Record: properties: - - order_id - - product_id - - quantity + - age + - height + - name Recording: methods: - cancel @@ -8363,7 +8321,9 @@ RegressionMetrics: - meanSquaredLogError - medianAbsoluteError - rSquared -Reify: {} +Reify: + methods: + - process ReifyWindowsFn: methods: - process @@ -8373,6 +8333,14 @@ RekeyElements: RemoveBitbucketServerConnectedRepositoryRequest: properties: - connectedRepository +RenderOptions: {} +RenderPipelineResult: + methods: + - monitoring_infos + - wait_until_finish +RenderRunner: + methods: + - run_pipeline Repeatedly: methods: - from_runner_api @@ -8628,6 +8596,7 @@ RunnerIOOperation: {} RunnerResult: methods: - metrics + - monitoring_infos - monitoring_metrics - wait_until_finish RuntimeEnvironment: @@ -8742,12 +8711,19 @@ SampleCombineFn: - setup - teardown SampleOptions: {} +SchemaAwareExternalTransform: + methods: + - discover + - expand SchemaBasedPayloadBuilder: methods: - build SchemaLoadedSqlTransform: methods: - expand +SchemaTransformPayloadBuilder: + methods: + - build SchemaTranslation: methods: - atomic_value_from_runner_api @@ -8937,13 +8913,17 @@ Sessions: SessionsToStringsDoFn: methods: - process -SessionWindowsPayload: {} SetHint: {} SetIamPolicyRequest: properties: - policy - updateMask -SetRuntimeState: {} +SetRuntimeState: + methods: + - add + - clear + - is_modified + - read SetStateSpec: methods: - to_runner_api @@ -9122,7 +9102,6 @@ SlidingWindows: - from_runner_api_parameter - get_window_coder - to_runner_api_parameter -SlidingWindowsPayload: {} SlowCoders: methods: - test_using_slow_impl @@ -9168,11 +9147,9 @@ SortedConcatWithCounters: - merge_accumulators Source: properties: - - baseSpecs - - codec - - doesNotNeedSplitting - - metadata - - spec + - repoSource + - storageSource + - storageSourceManifest SourceBase: methods: - is_bounded @@ -9390,17 +9367,11 @@ StageSummary: - stageId - startTime - state -StandardArtifacts: {} -StandardCoders: {} -StandardDisplayData: {} -StandardEnvironments: {} StandardOptions: properties: - ALL_KNOWN_RUNNERS - DEFAULT_RUNNER - KNOWN_RUNNER_NAMES -StandardProtocols: {} -StandardPTransforms: {} StandardQueryParameters: properties: - access_token @@ -9415,10 +9386,6 @@ StandardQueryParameters: - trace - upload_protocol - uploadType -StandardRequirements: {} -StandardResourceHints: {} -StandardRunnerProtocols: {} -StandardSideInputTypes: {} StandardSqlDataType: properties: - arrayElementType @@ -9431,7 +9398,6 @@ StandardSqlField: StandardSqlStructType: properties: - fields -StandardUserStateTypes: {} StateBackedIterableCoder: methods: - from_runner_api_parameter @@ -10085,6 +10051,9 @@ SynchronousSetRuntimeState: - clear - commit - read +SyntheticRecordToStrFn: + methods: + - process SyntheticSDFAsSource: methods: - process @@ -10623,6 +10592,7 @@ TestIamPermissionsRequest: - permissions TestIamPermissionsResponse: properties: + - kind - permissions TestingFileSystem: methods: @@ -10962,18 +10932,11 @@ TestStreamIntegrationTests: - test_basic_execution - test_multiple_outputs - test_multiple_outputs_with_watermark_advancement -TestStreamService: - methods: - - Events TestStreamServiceController: methods: - Events - start - stop -TestStreamServiceServicer: - methods: - - Events -TestStreamServiceStub: {} TestTableReferenceParser: methods: - test_calling_with_all_arguments @@ -11716,12 +11679,9 @@ WatermarkEvent: - to_runner_api WatermarkManager: methods: - - extract_all_timers - - get_watermarks - - update_watermarks - properties: - - WATERMARK_NEG_INF - - WATERMARK_POS_INF + - get_pcoll_node + - get_stage_node + - set_pcoll_watermark WatermarkPolicy: methods: - validate_param @@ -11886,28 +11846,16 @@ WorkerOptions: - validate WorkerPool: properties: - - autoscalingSettings - - dataDisks - - defaultPackageSet - - diskSizeGb - - diskSourceImage - - diskType - - ipConfiguration - - kind - - machineType - - metadata - - network - - numThreadsPerWorker - - numWorkers - - onHostMaintenance - - packages - - poolArgs - - sdkHarnessContainerImages - - subnetwork - - taskrunnerSettings - - teardownPolicy - - workerHarnessContainerImage - - zone + - annotations + - createTime + - deleteTime + - displayName + - etag + - name + - privatePoolV1Config + - state + - uid + - updateTime WorkerSettings: properties: - baseUrl diff --git a/playground/frontend/playground_components/build.gradle.kts b/playground/frontend/playground_components/build.gradle.kts index e231cf7fc904..1af9d6053765 100644 --- a/playground/frontend/playground_components/build.gradle.kts +++ b/playground/frontend/playground_components/build.gradle.kts @@ -137,6 +137,7 @@ tasks.register("generateCode") { tasks.register("extractBeamSymbols") { dependsOn("ensureSymbolsDirectoryExists") dependsOn("extractBeamSymbolsGo") + dependsOn("extractBeamSymbolsJava") dependsOn("extractBeamSymbolsPython") group = "build" @@ -167,6 +168,10 @@ tasks.register("extractBeamSymbolsGo") { } } +tasks.register("extractBeamSymbolsJava") { + dependsOn("tools:extract_symbols_java:buildJava") +} + tasks.register("extractBeamSymbolsPython") { doLast { exec { diff --git a/playground/frontend/playground_components/lib/playground_components.dart b/playground/frontend/playground_components/lib/playground_components.dart index 9e08eaff733f..007a3ec29b64 100644 --- a/playground/frontend/playground_components/lib/playground_components.dart +++ b/playground/frontend/playground_components/lib/playground_components.dart @@ -54,6 +54,8 @@ export 'src/repositories/code_repository.dart'; export 'src/repositories/example_client/grpc_example_client.dart'; export 'src/repositories/example_repository.dart'; +export 'src/router/router_delegate.dart'; + export 'src/services/symbols/loaders/yaml.dart'; export 'src/theme/switch_notifier.dart'; @@ -71,6 +73,9 @@ export 'src/widgets/loading_error.dart'; export 'src/widgets/loading_indicator.dart'; export 'src/widgets/logo.dart'; export 'src/widgets/output/output.dart'; +export 'src/widgets/output/output_area.dart'; +export 'src/widgets/output/output_tab.dart'; +export 'src/widgets/output/output_tabs.dart'; export 'src/widgets/reset_button.dart'; export 'src/widgets/run_or_cancel_button.dart'; export 'src/widgets/shortcut_tooltip.dart'; diff --git a/playground/frontend/playground_components/lib/src/assets/assets.gen.dart b/playground/frontend/playground_components/lib/src/assets/assets.gen.dart index 0d02529875f9..2b64543628d7 100644 --- a/playground/frontend/playground_components/lib/src/assets/assets.gen.dart +++ b/playground/frontend/playground_components/lib/src/assets/assets.gen.dart @@ -58,6 +58,9 @@ class $AssetsSymbolsGen { /// File path: assets/symbols/go.g.yaml String get goG => 'assets/symbols/go.g.yaml'; + /// File path: assets/symbols/java.g.yaml + String get javaG => 'assets/symbols/java.g.yaml'; + /// File path: assets/symbols/python.g.yaml String get pythonG => 'assets/symbols/python.g.yaml'; } diff --git a/playground/frontend/playground_components/lib/src/controllers/example_loaders/standard_example_loader.dart b/playground/frontend/playground_components/lib/src/controllers/example_loaders/standard_example_loader.dart index 7a64b8aa818f..5f180589ea3c 100644 --- a/playground/frontend/playground_components/lib/src/controllers/example_loaders/standard_example_loader.dart +++ b/playground/frontend/playground_components/lib/src/controllers/example_loaders/standard_example_loader.dart @@ -51,16 +51,23 @@ class StandardExampleLoader extends ExampleLoader { } Future _load() async { - final example = await _loadExampleBase(); + try { + final example = await _loadExampleBase(); - if (example == null) { - _completer.completeError('Example not found: $descriptor'); + if (example == null) { + _completer.completeError('Example not found: $descriptor'); + return; + } + + _completer.complete( + exampleCache.loadExampleInfo(example), + ); + + // ignore: avoid_catches_without_on_clauses + } catch (ex, trace) { + _completer.completeError(ex, trace); return; } - - _completer.complete( - exampleCache.loadExampleInfo(example), - ); } Future _loadExampleBase() async { diff --git a/playground/frontend/playground_components/lib/src/controllers/playground_controller.dart b/playground/frontend/playground_components/lib/src/controllers/playground_controller.dart index decbd366f93a..463c7607dda3 100644 --- a/playground/frontend/playground_components/lib/src/controllers/playground_controller.dart +++ b/playground/frontend/playground_components/lib/src/controllers/playground_controller.dart @@ -30,6 +30,7 @@ import '../models/example_base.dart'; import '../models/example_loading_descriptors/empty_example_loading_descriptor.dart'; import '../models/example_loading_descriptors/example_loading_descriptor.dart'; import '../models/example_loading_descriptors/examples_loading_descriptor.dart'; +import '../models/example_loading_descriptors/standard_example_loading_descriptor.dart'; import '../models/example_loading_descriptors/user_shared_example_loading_descriptor.dart'; import '../models/intents.dart'; import '../models/outputs.dart'; @@ -173,6 +174,43 @@ class PlaygroundController with ChangeNotifier { ); } + Future setExampleBase(ExampleBase exampleBase) async { + final snippetEditingController = _getOrCreateSnippetEditingController( + exampleBase.sdk, + loadDefaultIfNot: false, + ); + + if (!snippetEditingController.lockExampleLoading()) { + return; + } + + notifyListeners(); + + try { + final example = await exampleCache.loadExampleInfo(exampleBase); + // TODO(alexeyinkin): setCurrentSdk = false when we do + // per-SDK output and run status. + // Now using true to reset the output and run status. + // https://github.com/apache/beam/issues/23248 + final descriptor = StandardExampleLoadingDescriptor( + sdk: example.sdk, + path: example.path, + ); + + setExample( + example, + descriptor: descriptor, + setCurrentSdk: true, + ); + + // ignore: avoid_catches_without_on_clauses + } catch (ex) { + snippetEditingController.releaseExampleLoading(); + notifyListeners(); + rethrow; + } + } + void setExample( Example example, { required ExampleLoadingDescriptor descriptor, @@ -291,6 +329,7 @@ class PlaygroundController with ChangeNotifier { code: controller.codeController.fullText, sdk: controller.sdk, pipelineOptions: parsedPipelineOptions, + datasets: selectedExample?.datasets ?? [], ); _runSubscription = _codeRepository?.runCode(request).listen((event) { _result = event; @@ -413,6 +452,7 @@ class PlaygroundController with ChangeNotifier { ); final sharedExample = Example( + datasets: controller.selectedExample?.datasets ?? [], source: code, name: name, sdk: controller.sdk, diff --git a/playground/frontend/playground_components/lib/src/controllers/snippet_editing_controller.dart b/playground/frontend/playground_components/lib/src/controllers/snippet_editing_controller.dart index 5963837f8c50..052ee41aea18 100644 --- a/playground/frontend/playground_components/lib/src/controllers/snippet_editing_controller.dart +++ b/playground/frontend/playground_components/lib/src/controllers/snippet_editing_controller.dart @@ -27,6 +27,7 @@ import '../models/example_loading_descriptors/content_example_loading_descriptor import '../models/example_loading_descriptors/empty_example_loading_descriptor.dart'; import '../models/example_loading_descriptors/example_loading_descriptor.dart'; import '../models/example_view_options.dart'; +import '../models/loading_status.dart'; import '../models/sdk.dart'; import '../services/symbols/symbols_notifier.dart'; @@ -39,6 +40,7 @@ class SnippetEditingController extends ChangeNotifier { ExampleLoadingDescriptor? _descriptor; String _pipelineOptions = ''; bool _isChanged = false; + LoadingStatus _exampleLoadingStatus = LoadingStatus.done; SnippetEditingController({ required this.sdk, @@ -65,6 +67,29 @@ class SnippetEditingController extends ChangeNotifier { } } + /// Attempts to acquire a lock for asynchronous example loading. + /// + /// This prevents race condition for quick example switching + /// and allows to show a loading indicator. + /// + /// Returns whether the lock was acquired. + bool lockExampleLoading() { + switch (_exampleLoadingStatus) { + case LoadingStatus.loading: + return false; + case LoadingStatus.done: + case LoadingStatus.error: + _exampleLoadingStatus = LoadingStatus.loading; + return true; + } + } + + void releaseExampleLoading() { + _exampleLoadingStatus = LoadingStatus.done; + } + + bool get isLoading => _exampleLoadingStatus == LoadingStatus.loading; + void setExample( Example example, { ExampleLoadingDescriptor? descriptor, @@ -73,6 +98,7 @@ class SnippetEditingController extends ChangeNotifier { _selectedExample = example; _pipelineOptions = example.pipelineOptions; _isChanged = false; + releaseExampleLoading(); final viewOptions = example.viewOptions; diff --git a/playground/frontend/playground_components/lib/src/enums/emulator_type.dart b/playground/frontend/playground_components/lib/src/enums/emulator_type.dart new file mode 100644 index 000000000000..74b9916316af --- /dev/null +++ b/playground/frontend/playground_components/lib/src/enums/emulator_type.dart @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +enum EmulatorType { + kafka, +} diff --git a/playground/frontend/playground_components/lib/src/models/dataset.dart b/playground/frontend/playground_components/lib/src/models/dataset.dart new file mode 100644 index 000000000000..5ec79d29112e --- /dev/null +++ b/playground/frontend/playground_components/lib/src/models/dataset.dart @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import '../enums/emulator_type.dart'; + +class Dataset { + final EmulatorType? type; + final Map options; + final String datasetPath; + + Dataset({ + required this.type, + required this.options, + required this.datasetPath, + }); +} diff --git a/playground/frontend/playground_components/lib/src/models/example.dart b/playground/frontend/playground_components/lib/src/models/example.dart index f55ee7357151..3f3d89c313b9 100644 --- a/playground/frontend/playground_components/lib/src/models/example.dart +++ b/playground/frontend/playground_components/lib/src/models/example.dart @@ -32,14 +32,15 @@ class Example extends ExampleBase { required super.sdk, required super.type, required super.path, - this.graph, - this.logs, - this.outputs, super.complexity, super.contextLine, + super.datasets, super.description, + this.graph, super.isMultiFile, + this.logs, super.link, + this.outputs, super.pipelineOptions, super.tags, super.viewOptions, @@ -54,6 +55,7 @@ class Example extends ExampleBase { }) : super( complexity: example.complexity, contextLine: example.contextLine, + datasets: example.datasets, description: example.description, isMultiFile: example.isMultiFile, link: example.link, diff --git a/playground/frontend/playground_components/lib/src/models/example_base.dart b/playground/frontend/playground_components/lib/src/models/example_base.dart index 2ca76d107b45..205f60edf2c4 100644 --- a/playground/frontend/playground_components/lib/src/models/example_base.dart +++ b/playground/frontend/playground_components/lib/src/models/example_base.dart @@ -20,6 +20,7 @@ import 'package:equatable/equatable.dart'; import '../enums/complexity.dart'; import '../repositories/example_repository.dart'; +import 'dataset.dart'; import 'example_view_options.dart'; import 'sdk.dart'; @@ -53,6 +54,7 @@ class ExampleBase with Comparable, EquatableMixin { /// Index of the line to focus, 1-based. final int contextLine; + final List datasets; final String description; final bool isMultiFile; final String? link; @@ -71,6 +73,7 @@ class ExampleBase with Comparable, EquatableMixin { required this.type, this.complexity, this.contextLine = 1, + this.datasets = const [], this.description = '', this.isMultiFile = false, this.link, @@ -87,4 +90,8 @@ class ExampleBase with Comparable, EquatableMixin { int compareTo(ExampleBase other) { return name.toLowerCase().compareTo(other.name.toLowerCase()); } + + bool get usesEmulatedData => datasets.any( + (dataset) => dataset.type != null, + ); } diff --git a/playground/frontend/playground_components/lib/src/repositories/code_client/grpc_code_client.dart b/playground/frontend/playground_components/lib/src/repositories/code_client/grpc_code_client.dart index 62f0df698bb1..a2ebdf8c1472 100644 --- a/playground/frontend/playground_components/lib/src/repositories/code_client/grpc_code_client.dart +++ b/playground/frontend/playground_components/lib/src/repositories/code_client/grpc_code_client.dart @@ -22,6 +22,7 @@ import '../../api/iis_workaround_channel.dart'; import '../../api/v1/api.pbgrpc.dart' as grpc; import '../../models/sdk.dart'; import '../../util/pipeline_options.dart'; +import '../dataset_grpc_extension.dart'; import '../models/check_status_response.dart'; import '../models/output_response.dart'; import '../models/run_code_error.dart'; @@ -213,10 +214,12 @@ class GrpcCodeClient implements CodeClient { } grpc.RunCodeRequest _grpcRunCodeRequest(RunCodeRequest request) { - return grpc.RunCodeRequest() - ..code = request.code - ..sdk = request.sdk.grpc - ..pipelineOptions = pipelineOptionsToString(request.pipelineOptions); + return grpc.RunCodeRequest( + code: request.code, + sdk: request.sdk.grpc, + pipelineOptions: pipelineOptionsToString(request.pipelineOptions), + datasets: request.datasets.map((e) => e.grpc), + ); } RunCodeStatus _toClientStatus(grpc.Status status) { diff --git a/playground/frontend/playground_components/lib/src/repositories/dataset_grpc_extension.dart b/playground/frontend/playground_components/lib/src/repositories/dataset_grpc_extension.dart new file mode 100644 index 000000000000..ee8b7539662e --- /dev/null +++ b/playground/frontend/playground_components/lib/src/repositories/dataset_grpc_extension.dart @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import '../api/v1/api.pbgrpc.dart' as g; +import '../models/dataset.dart'; +import 'emulator_type_grpc_extension.dart'; + +extension DatasetExtension on Dataset { + g.Dataset get grpc { + return g.Dataset( + type: type?.grpc ?? g.EmulatorType.EMULATOR_TYPE_UNSPECIFIED, + options: options, + datasetPath: datasetPath, + ); + } +} + +extension GrpcDatasetExtension on g.Dataset { + Dataset get model { + return Dataset( + type: type.model, + options: options, + datasetPath: datasetPath, + ); + } +} diff --git a/playground/frontend/playground_components/lib/src/repositories/emulator_type_grpc_extension.dart b/playground/frontend/playground_components/lib/src/repositories/emulator_type_grpc_extension.dart new file mode 100644 index 000000000000..7acbd0abd346 --- /dev/null +++ b/playground/frontend/playground_components/lib/src/repositories/emulator_type_grpc_extension.dart @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import '../api/v1/api.pbgrpc.dart' as g; +import '../enums/emulator_type.dart'; + +extension ExampleTypeExtension on EmulatorType { + g.EmulatorType get grpc { + switch (this) { + case EmulatorType.kafka: + return g.EmulatorType.EMULATOR_TYPE_KAFKA; + } + } +} + +extension GrpcExampleTypeExtension on g.EmulatorType { + EmulatorType? get model { + EmulatorType? result; + switch (this) { + case g.EmulatorType.EMULATOR_TYPE_KAFKA: + result = EmulatorType.kafka; + break; + case g.EmulatorType.EMULATOR_TYPE_UNSPECIFIED: + result = null; + break; + } + return result; + } +} diff --git a/playground/frontend/playground_components/lib/src/repositories/example_client/grpc_example_client.dart b/playground/frontend/playground_components/lib/src/repositories/example_client/grpc_example_client.dart index f53c14db0ab6..8df2a6b4204c 100644 --- a/playground/frontend/playground_components/lib/src/repositories/example_client/grpc_example_client.dart +++ b/playground/frontend/playground_components/lib/src/repositories/example_client/grpc_example_client.dart @@ -24,6 +24,7 @@ import '../../models/category_with_examples.dart'; import '../../models/example_base.dart'; import '../../models/sdk.dart'; import '../complexity_grpc_extension.dart'; +import '../dataset_grpc_extension.dart'; import '../models/get_default_precompiled_object_request.dart'; import '../models/get_precompiled_object_code_response.dart'; import '../models/get_precompiled_object_request.dart'; @@ -324,17 +325,18 @@ class GrpcExampleClient implements ExampleClient { ExampleBase _toExampleModel(Sdk sdk, grpc.PrecompiledObject example) { return ExampleBase( - sdk: sdk, - name: example.name, - description: example.description, - tags: example.tags, - type: _exampleTypeFromString(example.type), - path: example.cloudPath, + complexity: example.complexity.model, contextLine: example.contextLine, - pipelineOptions: example.pipelineOptions, + datasets: example.datasets.map((e) => e.model).toList(growable: false), + description: example.description, isMultiFile: example.multifile, link: example.link, - complexity: example.complexity.model, + name: example.name, + path: example.cloudPath, + pipelineOptions: example.pipelineOptions, + sdk: sdk, + tags: example.tags, + type: _exampleTypeFromString(example.type), ); } diff --git a/playground/frontend/playground_components/lib/src/repositories/models/run_code_request.dart b/playground/frontend/playground_components/lib/src/repositories/models/run_code_request.dart index 16a1e74df430..d2a49b9ee6d0 100644 --- a/playground/frontend/playground_components/lib/src/repositories/models/run_code_request.dart +++ b/playground/frontend/playground_components/lib/src/repositories/models/run_code_request.dart @@ -16,15 +16,18 @@ * limitations under the License. */ +import '../../models/dataset.dart'; import '../../models/sdk.dart'; class RunCodeRequest { final String code; + final List datasets; final Sdk sdk; final Map pipelineOptions; const RunCodeRequest({ required this.code, + required this.datasets, required this.sdk, required this.pipelineOptions, }); diff --git a/playground/frontend/playground_components/lib/src/repositories/sdk_grpc_extension.dart b/playground/frontend/playground_components/lib/src/repositories/sdk_grpc_extension.dart index d9c1b5863303..e0a3d30dda04 100644 --- a/playground/frontend/playground_components/lib/src/repositories/sdk_grpc_extension.dart +++ b/playground/frontend/playground_components/lib/src/repositories/sdk_grpc_extension.dart @@ -16,7 +16,6 @@ * limitations under the License. */ - import '../api/v1/api.pbgrpc.dart' as g; import '../models/sdk.dart'; @@ -29,8 +28,7 @@ extension SdkExtension on Sdk { }; g.Sdk get grpc => - _idToGrpcEnum[id] ?? - (throw Exception('SDK not supported for GRPS: $id')); + _idToGrpcEnum[id] ?? (throw Exception('SDK not supported for GRPS: $id')); } extension GrpcSdkExtension on g.Sdk { diff --git a/playground/frontend/playground_components/lib/src/router/router_delegate.dart b/playground/frontend/playground_components/lib/src/router/router_delegate.dart new file mode 100644 index 000000000000..41857979d9e1 --- /dev/null +++ b/playground/frontend/playground_components/lib/src/router/router_delegate.dart @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:app_state/app_state.dart'; +import 'package:flutter/material.dart'; + +import '../widgets/toasts/toast_listener.dart'; + +/// Wraps [pageStack] in widgets that must be above [Navigator] and can be +/// below [MaterialApp]. +class BeamRouterDelegate extends PageStackRouterDelegate { + BeamRouterDelegate(super.pageStack); + + @override + Widget build(BuildContext context) { + // Overlay: to float toasts. + // ToastListenerWidget: turns notification events into floating toasts. + return Overlay( + initialEntries: [ + OverlayEntry( + builder: (context) => ToastListenerWidget( + child: super.build(context), + ), + ), + ], + ); + } +} diff --git a/playground/frontend/playground_components/lib/src/services/symbols/loaders/map.dart b/playground/frontend/playground_components/lib/src/services/symbols/loaders/map.dart index 44c03811a3b9..5c5281ebcf02 100644 --- a/playground/frontend/playground_components/lib/src/services/symbols/loaders/map.dart +++ b/playground/frontend/playground_components/lib/src/services/symbols/loaders/map.dart @@ -17,20 +17,32 @@ */ import 'package:highlight/languages/go.dart'; +import 'package:highlight/languages/java.dart'; import 'package:highlight/languages/python.dart'; +import 'package:highlight/languages/scala.dart'; import '../../../assets/assets.gen.dart'; import '../../../playground_components.dart'; import 'yaml.dart'; +final _javaLoader = YamlSymbolsLoader( + path: Assets.symbols.javaG, + package: PlaygroundComponents.packageName, + ); + final symbolLoadersByMode = { + // go: YamlSymbolsLoader( path: Assets.symbols.goG, package: PlaygroundComponents.packageName, ), + java: _javaLoader, + python: YamlSymbolsLoader( path: Assets.symbols.pythonG, package: PlaygroundComponents.packageName, ), + + scala: _javaLoader, }; diff --git a/playground/frontend/playground_components/lib/src/theme/theme.dart b/playground/frontend/playground_components/lib/src/theme/theme.dart index 287eef0a14f3..1ebe89cb89b2 100644 --- a/playground/frontend/playground_components/lib/src/theme/theme.dart +++ b/playground/frontend/playground_components/lib/src/theme/theme.dart @@ -163,7 +163,7 @@ final kLightTheme = ThemeData( fontSize: codeFontSize, ), codeTheme: CodeThemeData( - styles: { + styles: const { 'root': TextStyle( backgroundColor: BeamLightThemeColors.primaryBackground, color: BeamLightThemeColors.text, @@ -194,8 +194,8 @@ final kLightTheme = ThemeData( 'symbol': TextStyle(color: BeamLightThemeColors.code2), 'bullet': TextStyle(color: BeamLightThemeColors.code2), 'link': TextStyle(color: BeamLightThemeColors.code2), - 'emphasis': const TextStyle(fontStyle: FontStyle.italic), - 'strong': const TextStyle(fontWeight: FontWeight.bold), + 'emphasis': TextStyle(fontStyle: FontStyle.italic), + 'strong': TextStyle(fontWeight: FontWeight.bold), }, ), ), @@ -239,7 +239,7 @@ final kDarkTheme = ThemeData( fontSize: codeFontSize, ), codeTheme: CodeThemeData( - styles: { + styles: const { 'root': TextStyle( backgroundColor: BeamDarkThemeColors.primaryBackground, color: BeamDarkThemeColors.text, @@ -270,8 +270,8 @@ final kDarkTheme = ThemeData( 'symbol': TextStyle(color: BeamDarkThemeColors.code2), 'bullet': TextStyle(color: BeamDarkThemeColors.code2), 'link': TextStyle(color: BeamDarkThemeColors.code2), - 'emphasis': const TextStyle(fontStyle: FontStyle.italic), - 'strong': const TextStyle(fontWeight: FontWeight.bold), + 'emphasis': TextStyle(fontStyle: FontStyle.italic), + 'strong': TextStyle(fontWeight: FontWeight.bold), }, ), ), diff --git a/playground/frontend/playground_components/lib/src/widgets/output/output.dart b/playground/frontend/playground_components/lib/src/widgets/output/output.dart index 194b5d754937..81a42f2a794e 100644 --- a/playground/frontend/playground_components/lib/src/widgets/output/output.dart +++ b/playground/frontend/playground_components/lib/src/widgets/output/output.dart @@ -79,11 +79,13 @@ class _OutputWidgetState extends State Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [ - TabHeader( - tabController: tabController, - tabsWidget: OutputTabs( - playgroundController: widget.playgroundController, + Flexible( + child: TabHeader( tabController: tabController, + tabsWidget: OutputTabs( + playgroundController: widget.playgroundController, + tabController: tabController, + ), ), ), if (widget.trailing != null) widget.trailing!, diff --git a/playground/frontend/playground_components/lib/src/widgets/split_view.dart b/playground/frontend/playground_components/lib/src/widgets/split_view.dart index 904d03788d61..6291161f135d 100644 --- a/playground/frontend/playground_components/lib/src/widgets/split_view.dart +++ b/playground/frontend/playground_components/lib/src/widgets/split_view.dart @@ -48,9 +48,9 @@ class _SplitViewState extends State { double _ratio = defaultRatio; double _maxSize = 0; - get _sizeFirst => _ratio * _maxSize; + int get _sizeFirst => (_ratio * _maxSize).toInt(); - get _sizeSecond => (1 - _ratio) * _maxSize; + int get _sizeSecond => ((1 - _ratio) * _maxSize).toInt(); get _isHorizontal => widget.direction == Axis.horizontal; @@ -78,13 +78,13 @@ class _SplitViewState extends State { width: constraints.maxWidth, child: Row( children: [ - SizedBox( - width: _sizeFirst, + Expanded( + flex: _sizeFirst, child: widget.first, ), _buildSeparator(context), - SizedBox( - width: _sizeSecond, + Expanded( + flex: _sizeSecond, child: widget.second, ), ], @@ -98,13 +98,13 @@ class _SplitViewState extends State { height: constraints.maxHeight, child: Column( children: [ - SizedBox( - height: _sizeFirst, + Expanded( + flex: _sizeFirst, child: widget.first, ), _buildSeparator(context), - SizedBox( - height: _sizeSecond, + Expanded( + flex: _sizeSecond, child: widget.second, ), ], diff --git a/playground/frontend/playground_components/lib/src/widgets/toasts/toast_listener.dart b/playground/frontend/playground_components/lib/src/widgets/toasts/toast_listener.dart index 38b76d06008c..9a83f0938976 100644 --- a/playground/frontend/playground_components/lib/src/widgets/toasts/toast_listener.dart +++ b/playground/frontend/playground_components/lib/src/widgets/toasts/toast_listener.dart @@ -27,6 +27,7 @@ import '../../models/toast.dart'; import '../../services/toast_notifier.dart'; import 'toast.dart'; +/// Turns events from [ToastNotifier] into floating [ToastWidget]s. class ToastListenerWidget extends StatefulWidget { final Widget child; diff --git a/playground/frontend/playground_components/pubspec.yaml b/playground/frontend/playground_components/pubspec.yaml index 419b8ceec863..e7c2943c9b70 100644 --- a/playground/frontend/playground_components/pubspec.yaml +++ b/playground/frontend/playground_components/pubspec.yaml @@ -26,6 +26,7 @@ environment: dependencies: aligned_dialog: ^0.0.6 + app_state: ^0.8.4 collection: ^1.16.0 easy_localization: ^3.0.1 easy_localization_ext: ^0.1.1 @@ -33,7 +34,7 @@ dependencies: enum_map: ^0.2.1 equatable: ^2.0.5 flutter: { sdk: flutter } - flutter_code_editor: ^0.2.1 + flutter_code_editor: ^0.2.4 flutter_markdown: ^0.6.12 flutter_svg: ^1.0.3 fluttertoast: ^8.1.1 @@ -67,6 +68,7 @@ flutter: - assets/png/ - assets/svg/ - assets/symbols/go.g.yaml + - assets/symbols/java.g.yaml - assets/symbols/python.g.yaml - assets/translations/en.yaml diff --git a/playground/frontend/playground_components/test/src/common/examples.dart b/playground/frontend/playground_components/test/src/common/examples.dart index b12a9bb3bc8d..50032f0c1127 100644 --- a/playground/frontend/playground_components/test/src/common/examples.dart +++ b/playground/frontend/playground_components/test/src/common/examples.dart @@ -22,47 +22,47 @@ import 'package:playground_components/src/models/example_base.dart'; import 'package:playground_components/src/models/sdk.dart'; const exampleMock1 = Example( + complexity: Complexity.basic, + description: 'description', + name: 'Example X1', + path: 'SDK_PYTHON/Category/Name1', sdk: Sdk.python, source: 'ex1', - name: 'Example X1', tags: ['tag1'], type: ExampleType.example, - description: 'description', - path: 'SDK_PYTHON/Category/Name1', - complexity: Complexity.basic, ); const exampleMock2 = Example( + complexity: Complexity.basic, + description: 'description', + name: 'Kata', + path: 'SDK_PYTHON/Category/Name2', sdk: Sdk.python, source: 'ex2', - name: 'Kata', tags: ['tag2'], type: ExampleType.kata, - description: 'description', - path: 'SDK_PYTHON/Category/Name2', - complexity: Complexity.basic, ); const exampleWithoutSourceMock = ExampleBase( - sdk: Sdk.python, - name: 'Test example', - type: ExampleType.example, + complexity: Complexity.basic, description: 'description', + name: 'Test example', path: 'SDK_PYTHON/Category/Name', - complexity: Complexity.basic, + sdk: Sdk.python, + type: ExampleType.example, ); const exampleWithAllAdditionsMock = Example( - sdk: Sdk.python, - name: 'Test example', - type: ExampleType.example, + complexity: Complexity.basic, description: 'description', + graph: 'test outputs', + logs: 'test outputs', + name: 'Test example', + outputs: 'test outputs', path: 'SDK_PYTHON/Category/Name', + sdk: Sdk.python, source: 'test outputs', - outputs: 'test outputs', - logs: 'test outputs', - graph: 'test outputs', - complexity: Complexity.basic, + type: ExampleType.example, ); const exampleGoPipelineOptions = Example( @@ -79,11 +79,11 @@ const exampleGoPipelineOptions = Example( ); const exampleMockGo = Example( + complexity: Complexity.medium, + description: 'description', + name: 'Example', + path: 'SDK_GO/Category/Name', sdk: Sdk.go, source: 'ex1', - name: 'Example', type: ExampleType.example, - description: 'description', - path: 'SDK_GO/Category/Name', - complexity: Complexity.medium, ); diff --git a/playground/frontend/playground_components/test/src/controllers/example_loaders/common.dart b/playground/frontend/playground_components/test/src/controllers/example_loaders/common.dart index 8763c8c0d8a8..324ff9c1e006 100644 --- a/playground/frontend/playground_components/test/src/controllers/example_loaders/common.dart +++ b/playground/frontend/playground_components/test/src/controllers/example_loaders/common.dart @@ -17,7 +17,6 @@ */ import 'package:playground_components/playground_components.dart'; - import 'package:playground_components/src/controllers/example_loaders/example_loader.dart'; import 'package:playground_components/src/controllers/example_loaders/example_loader_factory.dart'; diff --git a/playground/frontend/playground_components/test/src/controllers/example_loaders/examples_loader_test.mocks.dart b/playground/frontend/playground_components/test/src/controllers/example_loaders/examples_loader_test.mocks.dart index e20ace76e455..7bcc1204ebd7 100644 --- a/playground/frontend/playground_components/test/src/controllers/example_loaders/examples_loader_test.mocks.dart +++ b/playground/frontend/playground_components/test/src/controllers/example_loaders/examples_loader_test.mocks.dart @@ -3,7 +3,7 @@ // Do not manually edit this file. // ignore_for_file: no_leading_underscores_for_library_prefixes -import 'dart:async' as _i14; +import 'dart:async' as _i13; import 'dart:ui' as _i15; import 'package:mockito/mockito.dart' as _i1; @@ -19,7 +19,7 @@ import 'package:playground_components/src/models/category_with_examples.dart' import 'package:playground_components/src/models/example.dart' as _i9; import 'package:playground_components/src/models/example_base.dart' as _i8; import 'package:playground_components/src/models/example_loading_descriptors/example_loading_descriptor.dart' - as _i13; + as _i14; import 'package:playground_components/src/models/example_loading_descriptors/examples_loading_descriptor.dart' as _i7; import 'package:playground_components/src/models/example_loading_descriptors/user_shared_example_loading_descriptor.dart' @@ -193,9 +193,19 @@ class MockPlaygroundController extends _i1.Mock returnValueForMissingStub: null, ); @override + _i13.Future setExampleBase(_i8.ExampleBase? exampleBase) => + (super.noSuchMethod( + Invocation.method( + #setExampleBase, + [exampleBase], + ), + returnValue: Future.value(), + returnValueForMissingStub: Future.value(), + ) as _i13.Future); + @override void setExample( _i9.Example? example, { - _i13.ExampleLoadingDescriptor? descriptor, + _i14.ExampleLoadingDescriptor? descriptor, bool? setCurrentSdk, }) => super.noSuchMethod( @@ -288,14 +298,14 @@ class MockPlaygroundController extends _i1.Mock returnValueForMissingStub: null, ); @override - _i14.Future cancelRun() => (super.noSuchMethod( + _i13.Future cancelRun() => (super.noSuchMethod( Invocation.method( #cancelRun, [], ), returnValue: Future.value(), returnValueForMissingStub: Future.value(), - ) as _i14.Future); + ) as _i13.Future); @override void filterOutput(_i11.OutputType? type) => super.noSuchMethod( Invocation.method( @@ -305,7 +315,7 @@ class MockPlaygroundController extends _i1.Mock returnValueForMissingStub: null, ); @override - _i14.Future<_i6.UserSharedExampleLoadingDescriptor> saveSnippet() => + _i13.Future<_i6.UserSharedExampleLoadingDescriptor> saveSnippet() => (super.noSuchMethod( Invocation.method( #saveSnippet, @@ -313,7 +323,7 @@ class MockPlaygroundController extends _i1.Mock ), returnValue: Future<_i6.UserSharedExampleLoadingDescriptor>.value( _FakeUserSharedExampleLoadingDescriptor_4()), - ) as _i14.Future<_i6.UserSharedExampleLoadingDescriptor>); + ) as _i13.Future<_i6.UserSharedExampleLoadingDescriptor>); @override _i7.ExamplesLoadingDescriptor getLoadingDescriptor() => (super.noSuchMethod( Invocation.method( @@ -389,10 +399,10 @@ class MockExampleCache extends _i1.Mock implements _i2.ExampleCache { returnValueForMissingStub: null, ); @override - _i14.Future get allExamplesFuture => (super.noSuchMethod( + _i13.Future get allExamplesFuture => (super.noSuchMethod( Invocation.getter(#allExamplesFuture), returnValue: Future.value(), - ) as _i14.Future); + ) as _i13.Future); @override _i17.LoadingStatus get catalogStatus => (super.noSuchMethod( Invocation.getter(#catalogStatus), @@ -404,14 +414,14 @@ class MockExampleCache extends _i1.Mock implements _i2.ExampleCache { returnValue: false, ) as bool); @override - _i14.Future loadAllPrecompiledObjectsIfNot() => (super.noSuchMethod( + _i13.Future loadAllPrecompiledObjectsIfNot() => (super.noSuchMethod( Invocation.method( #loadAllPrecompiledObjectsIfNot, [], ), returnValue: Future.value(), returnValueForMissingStub: Future.value(), - ) as _i14.Future); + ) as _i13.Future); @override List<_i16.CategoryWithExamples> getCategories(_i12.Sdk? sdk) => (super.noSuchMethod( @@ -422,7 +432,7 @@ class MockExampleCache extends _i1.Mock implements _i2.ExampleCache { returnValue: <_i16.CategoryWithExamples>[], ) as List<_i16.CategoryWithExamples>); @override - _i14.Future<_i8.ExampleBase> getPrecompiledObject( + _i13.Future<_i8.ExampleBase> getPrecompiledObject( String? path, _i12.Sdk? sdk, ) => @@ -435,17 +445,17 @@ class MockExampleCache extends _i1.Mock implements _i2.ExampleCache { ], ), returnValue: Future<_i8.ExampleBase>.value(_FakeExampleBase_6()), - ) as _i14.Future<_i8.ExampleBase>); + ) as _i13.Future<_i8.ExampleBase>); @override - _i14.Future<_i9.Example> loadSharedExample(String? id) => (super.noSuchMethod( + _i13.Future<_i9.Example> loadSharedExample(String? id) => (super.noSuchMethod( Invocation.method( #loadSharedExample, [id], ), returnValue: Future<_i9.Example>.value(_FakeExample_7()), - ) as _i14.Future<_i9.Example>); + ) as _i13.Future<_i9.Example>); @override - _i14.Future saveSnippet({ + _i13.Future saveSnippet({ List<_i18.SharedFile>? files, _i12.Sdk? sdk, String? pipelineOptions, @@ -461,16 +471,16 @@ class MockExampleCache extends _i1.Mock implements _i2.ExampleCache { }, ), returnValue: Future.value(''), - ) as _i14.Future); + ) as _i13.Future); @override - _i14.Future<_i9.Example> loadExampleInfo(_i8.ExampleBase? example) => + _i13.Future<_i9.Example> loadExampleInfo(_i8.ExampleBase? example) => (super.noSuchMethod( Invocation.method( #loadExampleInfo, [example], ), returnValue: Future<_i9.Example>.value(_FakeExample_7()), - ) as _i14.Future<_i9.Example>); + ) as _i13.Future<_i9.Example>); @override void setSelectorOpened(bool? value) => super.noSuchMethod( Invocation.method( @@ -480,41 +490,41 @@ class MockExampleCache extends _i1.Mock implements _i2.ExampleCache { returnValueForMissingStub: null, ); @override - _i14.Future<_i9.Example?> getDefaultExampleBySdk(_i12.Sdk? sdk) => + _i13.Future<_i9.Example?> getDefaultExampleBySdk(_i12.Sdk? sdk) => (super.noSuchMethod( Invocation.method( #getDefaultExampleBySdk, [sdk], ), returnValue: Future<_i9.Example?>.value(), - ) as _i14.Future<_i9.Example?>); + ) as _i13.Future<_i9.Example?>); @override - _i14.Future loadDefaultPrecompiledObjects() => (super.noSuchMethod( + _i13.Future loadDefaultPrecompiledObjects() => (super.noSuchMethod( Invocation.method( #loadDefaultPrecompiledObjects, [], ), returnValue: Future.value(), returnValueForMissingStub: Future.value(), - ) as _i14.Future); + ) as _i13.Future); @override - _i14.Future loadDefaultPrecompiledObjectsIfNot() => (super.noSuchMethod( + _i13.Future loadDefaultPrecompiledObjectsIfNot() => (super.noSuchMethod( Invocation.method( #loadDefaultPrecompiledObjectsIfNot, [], ), returnValue: Future.value(), returnValueForMissingStub: Future.value(), - ) as _i14.Future); + ) as _i13.Future); @override - _i14.Future<_i8.ExampleBase?> getCatalogExampleByPath(String? path) => + _i13.Future<_i8.ExampleBase?> getCatalogExampleByPath(String? path) => (super.noSuchMethod( Invocation.method( #getCatalogExampleByPath, [path], ), returnValue: Future<_i8.ExampleBase?>.value(), - ) as _i14.Future<_i8.ExampleBase?>); + ) as _i13.Future<_i8.ExampleBase?>); @override void addListener(_i15.VoidCallback? listener) => super.noSuchMethod( Invocation.method( diff --git a/playground/frontend/playground_components/test/src/controllers/example_loaders/user_shared_example_loader_test.dart b/playground/frontend/playground_components/test/src/controllers/example_loaders/user_shared_example_loader_test.dart index b3f29988a394..a69303b7924e 100644 --- a/playground/frontend/playground_components/test/src/controllers/example_loaders/user_shared_example_loader_test.dart +++ b/playground/frontend/playground_components/test/src/controllers/example_loaders/user_shared_example_loader_test.dart @@ -16,7 +16,6 @@ * limitations under the License. */ -import 'package:easy_localization/easy_localization.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:playground_components/playground_components.dart'; import 'package:playground_components/src/controllers/example_loaders/user_shared_example_loader.dart'; @@ -24,7 +23,7 @@ import 'package:playground_components/src/controllers/example_loaders/user_share import '../../common/example_cache.dart'; void main() async { - await EasyLocalization.ensureInitialized(); + TestWidgetsFlutterBinding.ensureInitialized(); group('UserSharedExampleLoader', () { testWidgets('non-existent', (WidgetTester wt) async { diff --git a/playground/frontend/playground_components/test/src/repositories/code_repository_test.dart b/playground/frontend/playground_components/test/src/repositories/code_repository_test.dart index 757e4f57ba3f..7615fb115d78 100644 --- a/playground/frontend/playground_components/test/src/repositories/code_repository_test.dart +++ b/playground/frontend/playground_components/test/src/repositories/code_repository_test.dart @@ -34,6 +34,7 @@ const kRequestMock = RunCodeRequest( code: 'code', sdk: Sdk.java, pipelineOptions: {}, + datasets: [], ); const kPipelineUuid = '1234'; diff --git a/playground/frontend/playground_components/test/src/repositories/dataset_grpc_extension_test.dart b/playground/frontend/playground_components/test/src/repositories/dataset_grpc_extension_test.dart new file mode 100644 index 000000000000..ddca310c033b --- /dev/null +++ b/playground/frontend/playground_components/test/src/repositories/dataset_grpc_extension_test.dart @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground_components/src/api/v1/api.pbgrpc.dart' as g; +import 'package:playground_components/src/repositories/dataset_grpc_extension.dart'; + +void main() { + final datasets = [ + // + g.Dataset( + datasetPath: 'mockPath1', + options: {'key1': 'value1'}, + type: g.EmulatorType.EMULATOR_TYPE_KAFKA, + ), + + g.Dataset( + datasetPath: 'mockPath2', + options: {'key2': 'value2'}, + type: g.EmulatorType.EMULATOR_TYPE_UNSPECIFIED, + ), + ]; + + group('Dataset extensions test.', () { + for (final dataset in datasets) { + test('Dataset with type ${dataset.type.name} converts to the same value', + () { + expect(dataset.model.grpc, dataset); + }); + } + }); +} diff --git a/playground/frontend/playground_components/test/src/repositories/emulator_type_grpc_extension_test.dart b/playground/frontend/playground_components/test/src/repositories/emulator_type_grpc_extension_test.dart new file mode 100644 index 000000000000..627ca11a620b --- /dev/null +++ b/playground/frontend/playground_components/test/src/repositories/emulator_type_grpc_extension_test.dart @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground_components/src/api/v1/api.pbgrpc.dart' as g; +import 'package:playground_components/src/repositories/emulator_type_grpc_extension.dart'; + +void main() { + group('Emulator type extensions test', () { + for (final value in g.EmulatorType.values) { + test('Emulator type ${value.name} converts to the same value', () { + expect( + value.model?.grpc ?? g.EmulatorType.EMULATOR_TYPE_UNSPECIFIED, + value, + ); + }); + } + }); +} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/extract_symbols_java_test.dart b/playground/frontend/playground_components/test/tools/extract_symbols_java/extract_symbols_java_test.dart new file mode 100644 index 000000000000..d944441f10a5 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/extract_symbols_java_test.dart @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'dart:io'; + +import 'package:flutter_test/flutter_test.dart'; + +import '../common.dart'; + +const _lang = 'java'; +const _dependenciesDir = 'test/tools/extract_symbols_$_lang/dependencies'; + +void main() { + test('Extract SDK Symbols. $_lang', () async { + final classPath = await _buildClassPath(); + await _compileClasses(classPath); + await testExtractSymbols( + language: _lang, + executables: ['java'], + arguments: [ + '-classpath', + classPath, + 'com.playground.extract_symbols.Main', + '../../test/tools/extract_symbols_$_lang/sdk_mock', + ], + ); + }); +} + +Future _buildClassPath() async { + const dependencies = [ + 'https://repo1.maven.org/maven2/com/github/javaparser/javaparser-core/3.24.9/javaparser-core-3.24.9.jar', + 'https://repo1.maven.org/maven2/com/esotericsoftware/yamlbeans/yamlbeans/1.15/yamlbeans-1.15.jar', + ]; + + await _downloadDependenciesIfNeed(dependencies); + + final workingDirectory = Directory.current.path; + + return [ + '$workingDirectory/tools/extract_symbols_$_lang/build/classes/java/main', + ...dependencies.map( + (f) => '$workingDirectory/$_dependenciesDir/${f.split('/').last}', + ), + ].join(':'); +} + +Future _downloadDependenciesIfNeed(List dependencies) async { + for (final dependency in dependencies) { + final fileName = dependency.split('/').last; + final file = File('$_dependenciesDir/$fileName'); + if (!file.existsSync()) { + final request = await HttpClient().getUrl(Uri.parse(dependency)); + final response = await request.close(); + await file.create(recursive: true); + await response.pipe(file.openWrite()); + } + } +} + +Future _compileClasses(String classPath) async { + await Process.run( + 'mkdir', + ['-p', 'build/classes/java/main'], + workingDirectory: 'tools/extract_symbols_$_lang', + ); + await Process.run( + 'javac', + [ + '-d', + 'build/classes/java/main/', + '-classpath', + classPath, + 'src/main/java/com/playground/extract_symbols/Main.java', + 'src/main/java/com/playground/extract_symbols/ClassInfo.java', + ], + workingDirectory: 'tools/extract_symbols_$_lang', + ); +} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/java.golden.yaml b/playground/frontend/playground_components/test/tools/extract_symbols_java/java.golden.yaml new file mode 100644 index 000000000000..d881f0c68044 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/java.golden.yaml @@ -0,0 +1,13 @@ +ExtendedPublicClass: + methods: + - extendedPublicMethod + properties: + - extendedPublicField +PublicClass: + methods: + - anotherPublicMethod + - publicMethod + properties: + - anotherPublicField + - publicField +Test: {} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/DefaultClass.java b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/DefaultClass.java new file mode 100644 index 000000000000..62c016e83a36 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/DefaultClass.java @@ -0,0 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +class DefaultClass {} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/KotlinClass.kt b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/KotlinClass.kt new file mode 100644 index 000000000000..54d56cc4f27e --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/KotlinClass.kt @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +public class KotlinClass() { + public fun publicMethod() {} +} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/PrivateClass.java b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/PrivateClass.java new file mode 100644 index 000000000000..bc45d6127006 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/PrivateClass.java @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +private class PrivateClass { + public void publicMethod() {} +} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/PublicClass.java b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/PublicClass.java new file mode 100644 index 000000000000..978345eaacc6 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/PublicClass.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +public class PublicClass { + int defaultField; + public int publicField; + protected int protectedField; + private int privateField; + + void defaultMethod() {} + public void publicMethod() {} + protected void protectedMethod() {} + private void privateMethod() {} +} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/Test.java b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/Test.java new file mode 100644 index 000000000000..92029db1cb39 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/Test.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This test file is in the result because it is not in .../test/... . +public class Test {} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/directory/ExtendedPublicClass.java b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/directory/ExtendedPublicClass.java new file mode 100644 index 000000000000..924f91cbac2b --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/directory/ExtendedPublicClass.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +public class ExtendedPublicClass extends PublicClass { + int extendedDefaultField; + public int extendedPublicField; + protected int protectedField; + private int privateField; + + public void extendedPublicMethod() {} +} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/directory/PublicClass.java b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/directory/PublicClass.java new file mode 100644 index 000000000000..5f9eae157e50 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/directory/PublicClass.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +public class PublicClass { + int defaultField; + public int publicField; + public int anotherPublicField; + protected int protectedField; + private int privateField; + + void defaultMethod() {} + public void publicMethod() {} + public void publicMethod(int i) {} + public void anotherPublicMethod() {} + protected void protectedMethod() {} + private void privateMethod() {} +} diff --git a/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/test/TestFolderPublicClass.java b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/test/TestFolderPublicClass.java new file mode 100644 index 000000000000..6ad8cd789497 --- /dev/null +++ b/playground/frontend/playground_components/test/tools/extract_symbols_java/sdk_mock/test/TestFolderPublicClass.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This class is skipped because it is in .../test/... . +public class TestFolderPublicClass { + public int publicField; + + public void publicMethod() {} +} diff --git a/playground/frontend/playground_components/tools/extract_symbols_java/build.gradle b/playground/frontend/playground_components/tools/extract_symbols_java/build.gradle new file mode 100644 index 000000000000..a71f7d4d2539 --- /dev/null +++ b/playground/frontend/playground_components/tools/extract_symbols_java/build.gradle @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * License); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id 'java' +} + +group 'com.playground.extract_symbols' + +repositories { + mavenCentral() +} + +ext { + javaMainClass = "com.playground.extract_symbols.Main" +} + +dependencies { + implementation group: 'com.github.javaparser', name: 'javaparser-core', version: '3.23.1' + implementation group: 'com.esotericsoftware.yamlbeans', name: 'yamlbeans', version: '1.15' +} + +tasks.register("buildJava") { + dependsOn "build" + doLast { + exec { + executable "java" + args "-classpath", sourceSets.main.runtimeClasspath.getAsPath(), javaMainClass, "../../../../../sdks/java" + standardOutput = new FileOutputStream("playground/frontend/playground_components/assets/symbols/java.g.yaml") + } + } +} diff --git a/playground/frontend/playground_components/tools/extract_symbols_java/src/main/java/com/playground/extract_symbols/ClassInfo.java b/playground/frontend/playground_components/tools/extract_symbols_java/src/main/java/com/playground/extract_symbols/ClassInfo.java new file mode 100644 index 000000000000..be1dd88a9b2d --- /dev/null +++ b/playground/frontend/playground_components/tools/extract_symbols_java/src/main/java/com/playground/extract_symbols/ClassInfo.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + package com.playground.extract_symbols; + +import java.util.*; +import java.util.stream.Collectors; + +public class ClassInfo { + final Set publicMethods = new HashSet<>(); + final Set publicFields = new HashSet<>(); + + Map> toMap() { + Map> map = new HashMap<>(); + if (!publicMethods.isEmpty()) { + map.put("methods", publicMethods.stream().sorted().collect(Collectors.toList())); + } + if (!publicFields.isEmpty()) { + map.put("properties", publicFields.stream().sorted().collect(Collectors.toList())); + } + return map; + } +} diff --git a/playground/frontend/playground_components/tools/extract_symbols_java/src/main/java/com/playground/extract_symbols/Main.java b/playground/frontend/playground_components/tools/extract_symbols_java/src/main/java/com/playground/extract_symbols/Main.java new file mode 100644 index 000000000000..7d6f81597fd8 --- /dev/null +++ b/playground/frontend/playground_components/tools/extract_symbols_java/src/main/java/com/playground/extract_symbols/Main.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.playground.extract_symbols; + +import com.esotericsoftware.yamlbeans.YamlConfig; +import com.esotericsoftware.yamlbeans.YamlException; +import com.esotericsoftware.yamlbeans.YamlWriter; +import com.github.javaparser.ParseProblemException; +import com.github.javaparser.StaticJavaParser; +import com.github.javaparser.ast.CompilationUnit; +import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; +import com.github.javaparser.ast.body.FieldDeclaration; +import com.github.javaparser.ast.body.MethodDeclaration; + +import java.io.File; +import java.io.IOException; +import java.io.StringWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; + +public class Main { + public static void main(String[] args) throws IOException { + final String sdkPath = args[0]; + final HashMap classInfoMap = getDirSymbols(sdkPath); + final String yamlString = buildYamlString(classInfoMap); + System.out.print(yamlString); + } + + private static HashMap getDirSymbols(String sdkPathString) throws IOException { + final HashMap classInfoMap = new HashMap<>(); + final Path sdkPath = new File(sdkPathString).toPath().toAbsolutePath(); + Files.walk(sdkPath).forEach(path -> { + String stringPath = path.toString(); + final String relativePath = sdkPath.relativize(path).toString(); + if (isJavaNonTestFile(relativePath)) { + String fileName = stringPath.substring(stringPath.lastIndexOf("/") + 1).replace(".java", ""); + try { + CompilationUnit unit = StaticJavaParser.parse(path); + if (unit.getClassByName(fileName).isPresent()) { + addClassSymbols(classInfoMap, unit.getClassByName(fileName).get()); + } + } catch (IOException | ParseProblemException ignored) { + } + } + }); + + return classInfoMap; + } + + static boolean isJavaNonTestFile(String stringPath) { + final boolean isInTestFolder = stringPath.contains("/test/") || stringPath.startsWith("test/"); + return stringPath.endsWith(".java") && !isInTestFolder; + } + + private static void addClassSymbols(HashMap classInfoList, ClassOrInterfaceDeclaration cl) { + if (!cl.isPublic()) { + return; + } + + ClassInfo classInfo; + if (classInfoList.containsKey(cl.getNameAsString())) { + classInfo = classInfoList.get(cl.getNameAsString()); + } else { + classInfo = new ClassInfo(); + classInfoList.put(cl.getNameAsString(), classInfo); + } + + cl.findAll(MethodDeclaration.class).forEach(method -> { + if (method.isPublic()) { + classInfo.publicMethods.add(method.getNameAsString()); + } + }); + cl.findAll(FieldDeclaration.class).forEach(field -> { + if (field.isPublic()) { + classInfo.publicFields.add(field.getVariable(0).getNameAsString()); + } + }); + } + + private static String buildYamlString(HashMap classInfoMap) throws YamlException { + final StringWriter stringWriter = new StringWriter(); + final YamlWriter yamlWriter = new YamlWriter(stringWriter); + yamlWriter.getConfig().writeConfig.setIndentSize(2); + yamlWriter.getConfig().writeConfig.setWriteClassname(YamlConfig.WriteClassName.NEVER); + final LinkedHashMap>> yamlMap = new LinkedHashMap<>(); + + classInfoMap.forEach((key, value) -> yamlMap.put(key, value.toMap())); + final LinkedHashMap>> sortedMap = sortMap(yamlMap); + + yamlWriter.write(sortedMap); + + yamlWriter.close(); + return stringWriter.toString(); + } + + private static LinkedHashMap>> sortMap(HashMap>> yamlMap) { + final Comparator> comparator = Comparator.comparing(Map.Entry::getKey); + final ArrayList>>> array = new ArrayList<>(yamlMap.entrySet()); + array.sort(comparator); + + final LinkedHashMap>> sortedMap = new LinkedHashMap<>(); + for (Map.Entry>> entry : array) { + sortedMap.put(entry.getKey(), entry.getValue()); + } + return sortedMap; + } +} diff --git a/playground/frontend/playground_components/CHANGELOG.md b/playground/frontend/playground_components_dev/README.md similarity index 83% rename from playground/frontend/playground_components/CHANGELOG.md rename to playground/frontend/playground_components_dev/README.md index 504fa05fe23c..cf1f2678a28d 100644 --- a/playground/frontend/playground_components/CHANGELOG.md +++ b/playground/frontend/playground_components_dev/README.md @@ -17,6 +17,7 @@ under the License. --> -## 0.0.1 +# playground_components_dev -* TODO: Describe initial release. +This is a non-pub.dev Flutter package that contains +helpers for testing [playground_components](../playground_components) package. diff --git a/playground/frontend/playground_components_dev/analysis_options.yaml b/playground/frontend/playground_components_dev/analysis_options.yaml new file mode 100644 index 000000000000..fe2e0e8eb952 --- /dev/null +++ b/playground/frontend/playground_components_dev/analysis_options.yaml @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +include: package:total_lints/app.yaml diff --git a/playground/frontend/playground_components_dev/lib/playground_components_dev.dart b/playground/frontend/playground_components_dev/lib/playground_components_dev.dart new file mode 100644 index 000000000000..19a653be0add --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/playground_components_dev.dart @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export 'src/common_finders.dart'; +export 'src/example_names.dart'; +export 'src/example_outputs.dart'; +export 'src/example_paths.dart'; +export 'src/examples.dart'; +export 'src/expect.dart'; +export 'src/finder.dart'; +export 'src/string.dart'; +export 'src/widget_tester.dart'; diff --git a/playground/frontend/playground_components_dev/lib/src/code.dart b/playground/frontend/playground_components_dev/lib/src/code.dart new file mode 100644 index 000000000000..2f85ac9f029d --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/code.dart @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_code_editor/flutter_code_editor.dart'; +import 'package:highlight/highlight_core.dart'; + +String foldLicenseAndImports(String text, Mode language) { + final controller = CodeController( + text: text, + language: language, + ); + + controller.foldCommentAtLineZero(); + controller.foldImports(); + + return controller.text; +} diff --git a/playground/frontend/playground_components_dev/lib/src/common_finders.dart b/playground/frontend/playground_components_dev/lib/src/common_finders.dart new file mode 100644 index 000000000000..4180ae02a0d1 --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/common_finders.dart @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/material.dart'; +import 'package:flutter_code_editor/flutter_code_editor.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground_components/playground_components.dart'; + +extension CommonFindersExtension on CommonFinders { + Finder codeField() { + return byType(CodeField); + } + + Finder graphTab() { + // TODO(alexeyinkin): Use keys when output tabs get to use enum, https://github.com/apache/beam/issues/22663 + return widgetWithText(OutputTab, 'Graph'); + } + + Finder outputArea() { + return byType(OutputArea); + } + + Finder outputSelectableText() { + final outputArea = find.outputArea(); + return find.descendant( + of: outputArea, + matching: find.byType(SelectableText), + ); + } + + Finder outputWidget() { + return byType(OutputWidget); + } + + Finder resultTab() { + // TODO(alexeyinkin): Use keys when output tabs get to use enum, https://github.com/apache/beam/issues/22663 + return widgetWithText(OutputTab, 'Result'); + } + + Finder runOrCancelButton() { + return byType(RunOrCancelButton); + } + + Finder splitView() { + return byType(SplitView); + } + + Finder toggleThemeButton() { + return byType(ToggleThemeButton); + } +} diff --git a/playground/frontend/playground_components_dev/lib/src/example_names.dart b/playground/frontend/playground_components_dev/lib/src/example_names.dart new file mode 100644 index 000000000000..204a6578ecd5 --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/example_names.dart @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +class ExampleNames { + static const aggregationMax = 'AggregationMax'; + static const aggregationMean = 'AggregationMean'; +} diff --git a/playground/frontend/playground_components_dev/lib/src/example_outputs.dart b/playground/frontend/playground_components_dev/lib/src/example_outputs.dart new file mode 100644 index 000000000000..5a548be683c8 --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/example_outputs.dart @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +class ExampleOutputs { + static const javaAggregationMaxTail = 'INFO: 10\n'; + + static const pythonAggregationMeanContains = + '16 [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]'; + + static const pythonWordCountWithMetricsTail = 'average word length: 4\n'; +} diff --git a/playground/frontend/playground_components_dev/lib/src/example_paths.dart b/playground/frontend/playground_components_dev/lib/src/example_paths.dart new file mode 100644 index 000000000000..f0f554c79226 --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/example_paths.dart @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +class ExamplePaths { + static const javaAggregationMax = + '/learning/katas/java/Common Transforms/Aggregation/Max/src/org/apache/beam/learning/katas/commontransforms/aggregation/max/Task.java'; + static const javaMinimalWordCount = + '/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java'; + + static const pythonAggregationMean = + '/learning/katas/python/Common Transforms/Aggregation/Mean/task.py'; + static const pythonMinimalWordCountWithMetrics = + '/sdks/python/apache_beam/examples/wordcount_with_metrics.py'; +} diff --git a/playground/frontend/playground_components_dev/lib/src/examples.dart b/playground/frontend/playground_components_dev/lib/src/examples.dart new file mode 100644 index 000000000000..c558133d742e --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/examples.dart @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:highlight/highlight_core.dart'; +import 'package:http/http.dart' as http; + +import 'code.dart'; + +class Examples { + static const _repoAndBranch = 'apache/beam/master'; + + static Future getVisibleTextByPath(String path, Mode language) async { + final uri = + Uri.parse('https://raw.githubusercontent.com/$_repoAndBranch$path'); + final response = await http.get(uri); + final content = response.body; + + return foldLicenseAndImports(content, language); + } +} diff --git a/playground/frontend/playground_components_dev/lib/src/expect.dart b/playground/frontend/playground_components_dev/lib/src/expect.dart new file mode 100644 index 000000000000..34e338dbec7b --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/expect.dart @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter_test/flutter_test.dart'; + +import 'widget_tester.dart'; + +void expectOutput(String text, WidgetTester wt) { + final actualText = wt.findOutputText(); + expect(actualText, text); +} + +void expectOutputContains(String text, WidgetTester wt) { + final actualText = wt.findOutputText(); + expect(actualText, contains(text)); +} + +void expectOutputEndsWith(String text, WidgetTester wt) { + final actualText = wt.findOutputText(); + expect(actualText, endsWith(text)); +} diff --git a/playground/frontend/playground_components_dev/lib/src/finder.dart b/playground/frontend/playground_components_dev/lib/src/finder.dart new file mode 100644 index 000000000000..72d2dd86a389 --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/finder.dart @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/widgets.dart'; +import 'package:flutter_test/flutter_test.dart'; + +extension FinderExtension on Finder { + // TODO(alexeyinkin): Push to Flutter or wait for them to make their own, https://github.com/flutter/flutter/issues/117675 + Finder and(Finder another) { + return _AndFinder(this, another); + } +} + +class _AndFinder extends ChainedFinder { + _AndFinder(super.parent, this.another); + + final Finder another; + + @override + String get description => '${parent.description} AND ${another.description}'; + + @override + Iterable filter(Iterable parentCandidates) { + return another.apply(parentCandidates); + } +} diff --git a/playground/frontend/playground_components_dev/lib/src/string.dart b/playground/frontend/playground_components_dev/lib/src/string.dart new file mode 100644 index 000000000000..7f300aeeb980 --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/string.dart @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/widgets.dart'; +import 'package:flutter_code_editor/flutter_code_editor.dart'; + +extension StringExtension on String { + /// Whether this is different from [another] only by cutting a single range + /// of zero or more characters. + bool isAsIfCutFrom(String another) { + final range = getChangedRange( + another, + attributeChangeTo: TextAffinity.downstream, + ); + + return range.isCollapsed; + } +} diff --git a/playground/frontend/playground_components_dev/lib/src/widget_tester.dart b/playground/frontend/playground_components_dev/lib/src/widget_tester.dart new file mode 100644 index 000000000000..6a833f0bcb8c --- /dev/null +++ b/playground/frontend/playground_components_dev/lib/src/widget_tester.dart @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:flutter/material.dart'; +import 'package:flutter_code_editor/flutter_code_editor.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:playground_components/playground_components.dart'; +import 'package:provider/provider.dart'; + +import 'common_finders.dart'; + +extension WidgetTesterExtension on WidgetTester { + CodeController findOneCodeController() { + final codeField = find.codeField(); + expect(codeField, findsOneWidget); + + return widget(codeField).controller; + } + + TabController findOutputTabController() { + final outputTabs = find.byType(OutputTabs); + expect(outputTabs, findsOneWidget); + + return widget(outputTabs).tabController; + } + + String? findOutputText() { + final selectableText = find.outputSelectableText(); + expect(selectableText, findsOneWidget); + + return widget(selectableText).data; + } + + PlaygroundController findPlaygroundController() { + final context = element(find.codeField()); + return context.read(); + } +} diff --git a/playground/frontend/playground_components_dev/pubspec.yaml b/playground/frontend/playground_components_dev/pubspec.yaml new file mode 100644 index 000000000000..a4998c7c1bfd --- /dev/null +++ b/playground/frontend/playground_components_dev/pubspec.yaml @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +name: playground_components_dev +description: Helpers for testing playground_components package +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=2.18.1 <4.0.0' + flutter: '>=3.3.2' + +dependencies: + flutter: { sdk: flutter } + flutter_code_editor: ^0.2.4 + flutter_test: { sdk: flutter } + highlight: ^0.7.0 + http: ^0.13.5 + playground_components: { path: ../playground_components } + provider: ^6.0.3 + total_lints: ^2.18.0 diff --git a/playground/frontend/pubspec.lock b/playground/frontend/pubspec.lock index af95acc0757f..db8ae40f1114 100644 --- a/playground/frontend/pubspec.lock +++ b/playground/frontend/pubspec.lock @@ -35,14 +35,14 @@ packages: name: app_state url: "https://pub.dartlang.org" source: hosted - version: "0.8.3" + version: "0.8.4" archive: dependency: transitive description: name: archive url: "https://pub.dartlang.org" source: hosted - version: "3.3.1" + version: "3.3.0" args: dependency: transitive description: @@ -287,12 +287,17 @@ packages: source: sdk version: "0.0.0" flutter_code_editor: - dependency: transitive + dependency: "direct dev" description: name: flutter_code_editor url: "https://pub.dartlang.org" source: hosted - version: "0.2.1" + version: "0.2.4" + flutter_driver: + dependency: transitive + description: flutter + source: sdk + version: "0.0.0" flutter_highlight: dependency: transitive description: @@ -357,6 +362,11 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "2.1.3" + fuchsia_remote_debug_protocol: + dependency: transitive + description: flutter + source: sdk + version: "0.0.0" get_it: dependency: "direct main" description: @@ -448,6 +458,11 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "4.0.1" + integration_test: + dependency: "direct dev" + description: flutter + source: sdk + version: "0.0.0" intl: dependency: "direct main" description: @@ -651,6 +666,13 @@ packages: relative: true source: path version: "0.0.1" + playground_components_dev: + dependency: "direct dev" + description: + path: playground_components_dev + relative: true + source: path + version: "0.0.1" plugin_platform_interface: dependency: transitive description: @@ -831,6 +853,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "1.1.1" + sync_http: + dependency: transitive + description: + name: sync_http + url: "https://pub.dartlang.org" + source: hosted + version: "0.3.1" term_glyph: dependency: transitive description: @@ -852,6 +881,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "1.0.0" + total_lints: + dependency: transitive + description: + name: total_lints + url: "https://pub.dartlang.org" + source: hosted + version: "2.18.0" tuple: dependency: transitive description: @@ -957,6 +993,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "2.1.2" + vm_service: + dependency: transitive + description: + name: vm_service + url: "https://pub.dartlang.org" + source: hosted + version: "9.0.0" watcher: dependency: transitive description: @@ -978,6 +1021,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "2.2.0" + webdriver: + dependency: transitive + description: + name: webdriver + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.0" win32: dependency: transitive description: diff --git a/playground/frontend/pubspec.yaml b/playground/frontend/pubspec.yaml index 655948c14426..10b92e26371f 100644 --- a/playground/frontend/pubspec.yaml +++ b/playground/frontend/pubspec.yaml @@ -27,7 +27,7 @@ environment: dependencies: akvelon_flutter_issue_106664_workaround: ^0.1.2 aligned_dialog: ^0.0.6 - app_state: ^0.8.3 + app_state: ^0.8.4 collection: ^1.15.0 easy_localization: ^3.0.1 easy_localization_ext: ^0.1.1 @@ -53,9 +53,12 @@ dependencies: dev_dependencies: build_runner: ^2.1.4 fake_async: ^1.3.0 + flutter_code_editor: ^0.2.4 flutter_lints: ^2.0.1 flutter_test: { sdk: flutter } + integration_test: { sdk: flutter } mockito: ^5.0.16 + playground_components_dev: { path: playground_components_dev } flutter: assets: @@ -64,3 +67,6 @@ flutter: generate: true uses-material-design: true + +flutter_gen: + output: lib/src/assets/ diff --git a/playground/frontend/test_driver/integration_test.dart b/playground/frontend/test_driver/integration_test.dart new file mode 100644 index 000000000000..6b59b37dd129 --- /dev/null +++ b/playground/frontend/test_driver/integration_test.dart @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import 'package:integration_test/integration_test_driver.dart'; + +Future main() => integrationDriver(); diff --git a/playground/infrastructure/checker.py b/playground/infrastructure/checker.py index 6543a2bda5be..5d235b415742 100644 --- a/playground/infrastructure/checker.py +++ b/playground/infrastructure/checker.py @@ -30,7 +30,7 @@ from api.v1.api_pb2 import Sdk from config import Config -from helper import get_tag +from helper import get_tag, load_supported_categories def parse_args() -> argparse.Namespace: @@ -93,6 +93,11 @@ def main(): root_dir = os.getenv("BEAM_ROOT_DIR") if root_dir is None: raise KeyError("BEAM_ROOT_DIR environment variable should be specified in os") + categories_file = os.getenv("BEAM_EXAMPLE_CATEGORIES") + if categories_file is None: + raise KeyError("BEAM_EXAMPLE_CATEGORIES environment variable should be specified in os") + + load_supported_categories(categories_file) logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARNING) diff --git a/runners/google-cloud-dataflow-java/build.gradle b/runners/google-cloud-dataflow-java/build.gradle index 4e1e7ebae24d..4b7d11e99f58 100644 --- a/runners/google-cloud-dataflow-java/build.gradle +++ b/runners/google-cloud-dataflow-java/build.gradle @@ -429,8 +429,6 @@ createCrossLanguageValidatesRunnerTask( "--project=${dataflowProject}", "--region=${dataflowRegion}", "--sdk_harness_container_image_overrides=.*java.*,${dockerJavaImageContainer}:${dockerTag}", - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved - "--experiments=shuffle_mode=appliance", ], javaPipelineOptions: [ "--runner=TestDataflowRunner", @@ -439,8 +437,6 @@ createCrossLanguageValidatesRunnerTask( "--tempRoot=${dataflowValidatesTempRoot}", "--sdkContainerImage=${dockerJavaImageContainer}:${dockerTag}", "--sdkHarnessContainerImageOverrides=.*python.*,${dockerPythonImageContainer}:${dockerTag}", - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - "--experiments=shuffle_mode=appliance", ], pytestOptions: [ "--capture=no", @@ -463,8 +459,7 @@ task validatesRunnerV2 { description = "Runs the ValidatesRunner tests on Dataflow Runner V2" dependsOn(createRunnerV2ValidatesRunnerTest( name: 'validatesRunnerV2Test', - // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - pipelineOptions: runnerV2PipelineOptions + ['--experiments=shuffle_mode=appliance'], + pipelineOptions: runnerV2PipelineOptions, excludedCategories: [ 'org.apache.beam.sdk.testing.UsesOnWindowExpiration', 'org.apache.beam.sdk.testing.UsesStatefulParDo', diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java index 05d6a43739ec..e06a3fb8324c 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java @@ -667,11 +667,7 @@ public static StreamingDataflowWorker fromDataflowWorkerHarnessOptions( this.isDoneFuture = new CompletableFuture<>(); this.threadFactory = - r -> { - Thread t = new Thread(r); - t.setDaemon(true); - return t; - }; + new ThreadFactoryBuilder().setNameFormat("DataflowWorkUnits-%d").setDaemon(true).build(); this.workUnitExecutor = new BoundedQueueExecutor( chooseMaximumNumberOfThreads(), @@ -691,7 +687,7 @@ public static StreamingDataflowWorker fromDataflowWorkerHarnessOptions( memoryMonitorThread.setName("MemoryMonitor"); dispatchThread = - threadFactory.newThread( + new Thread( new Runnable() { @Override public void run() { @@ -704,11 +700,12 @@ public void run() { LOG.info("Dispatch done"); } }); + dispatchThread.setDaemon(true); dispatchThread.setPriority(Thread.MIN_PRIORITY); dispatchThread.setName("DispatchThread"); commitThread = - threadFactory.newThread( + new Thread( new Runnable() { @Override public void run() { @@ -719,6 +716,7 @@ public void run() { } } }); + commitThread.setDaemon(true); commitThread.setPriority(Thread.MAX_PRIORITY); commitThread.setName("CommitThread"); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/options/StreamingDataflowWorkerOptions.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/options/StreamingDataflowWorkerOptions.java index 8df42ea42ff5..908221973fae 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/options/StreamingDataflowWorkerOptions.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/options/StreamingDataflowWorkerOptions.java @@ -121,6 +121,15 @@ public interface StreamingDataflowWorkerOptions extends DataflowWorkerHarnessOpt void setWindmillServiceStreamingRpcHealthCheckPeriodMs(int value); + @Description( + "If positive, the number of messages to send on streaming rpc before checking isReady." + + "Higher values reduce cost of output overhead at the cost of more memory used in grpc " + + "buffers.") + @Default.Integer(10) + int getWindmillMessagesBetweenIsReadyChecks(); + + void setWindmillMessagesBetweenIsReadyChecks(int value); + /** * Factory for creating local Windmill address. Reads from system propery 'windmill.hostport' for * backwards compatibility. diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java index 0646aba9c116..b2e9ec925153 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java @@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory; /** - * A {@link StreamObserver} which uses synchronization on the underlying {@link CallStreamObserver} + * A {@link StreamObserver} which synchronizes access to the underlying {@link CallStreamObserver} * to provide thread safety. * *

Flow control with the underlying {@link CallStreamObserver} is handled with a {@link Phaser} @@ -41,45 +41,66 @@ public final class DirectStreamObserver implements StreamObserver { private static final Logger LOG = LoggerFactory.getLogger(DirectStreamObserver.class); private final Phaser phaser; - @GuardedBy("outboundObserver") + private final Object lock = new Object(); + + @GuardedBy("lock") private final CallStreamObserver outboundObserver; private final long deadlineSeconds; + private final int messagesBetweenIsReadyChecks; - @GuardedBy("outboundObserver") - private boolean firstMessage = true; + @GuardedBy("lock") + private int messagesSinceReady = 0; public DirectStreamObserver( - Phaser phaser, CallStreamObserver outboundObserver, long deadlineSeconds) { + Phaser phaser, + CallStreamObserver outboundObserver, + long deadlineSeconds, + int messagesBetweenIsReadyChecks) { this.phaser = phaser; this.outboundObserver = outboundObserver; this.deadlineSeconds = deadlineSeconds; + // We always let the first message pass through without blocking because it is performed under + // the StreamPool synchronized block and single header message isn't going to cause memory + // issues due to excessive buffering within grpc. + this.messagesBetweenIsReadyChecks = Math.max(1, messagesBetweenIsReadyChecks); } @Override public void onNext(T value) { - final int phase = phaser.getPhase(); + int awaitPhase = -1; long totalSecondsWaited = 0; long waitSeconds = 1; while (true) { try { - synchronized (outboundObserver) { - // We let the first message passthrough without blocking because it is performed under the - // StreamPool synchronized block and single message isn't going to cause memory issues due - // to excessive buffering within grpc. - if (firstMessage || outboundObserver.isReady()) { - firstMessage = false; + synchronized (lock) { + // We only check isReady periodically to effectively allow for increasing the outbound + // buffer periodically. This reduces the overhead of blocking while still restricting + // memory because there is a limited # of streams, and we have a max messages size of 2MB. + if (++messagesSinceReady <= messagesBetweenIsReadyChecks) { + outboundObserver.onNext(value); + return; + } + // If we awaited previously and timed out, wait for the same phase. Otherwise we're + // careful to observe the phase before observing isReady. + if (awaitPhase < 0) { + awaitPhase = phaser.getPhase(); + } + if (outboundObserver.isReady()) { + messagesSinceReady = 0; outboundObserver.onNext(value); return; } } - // A callback has been registered to advance the phaser whenever the observer transitions to - // is ready. Since we are waiting for a phase observed before the outboundObserver.isReady() - // returned false, we expect it to advance after the channel has become ready. This doesn't - // always seem to be the case (despite documentation stating otherwise) so we poll - // periodically and enforce an overall timeout related to the stream deadline. - phaser.awaitAdvanceInterruptibly(phase, waitSeconds, TimeUnit.SECONDS); - synchronized (outboundObserver) { + // A callback has been registered to advance the phaser whenever the observer + // transitions to is ready. Since we are waiting for a phase observed before the + // outboundObserver.isReady() returned false, we expect it to advance after the + // channel has become ready. This doesn't always seem to be the case (despite + // documentation stating otherwise) so we poll periodically and enforce an overall + // timeout related to the stream deadline. + phaser.awaitAdvanceInterruptibly(awaitPhase, waitSeconds, TimeUnit.SECONDS); + synchronized (lock) { + messagesSinceReady = 0; outboundObserver.onNext(value); return; } @@ -88,33 +109,33 @@ public void onNext(T value) { if (totalSecondsWaited > deadlineSeconds) { LOG.error( "Exceeded timeout waiting for the outboundObserver to become ready meaning " - + "that the streamdeadline was not respected."); + + "that the stream deadline was not respected."); throw new RuntimeException(e); } + if (totalSecondsWaited > 30) { + LOG.info( + "Output channel stalled for {}s, outbound thread {}.", + totalSecondsWaited, + Thread.currentThread().getName()); + } waitSeconds = waitSeconds * 2; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } - if (totalSecondsWaited > 30) { - LOG.info( - "Output channel stalled for {}s, outbound thread {}.", - totalSecondsWaited, - Thread.currentThread().getName()); - } } } @Override public void onError(Throwable t) { - synchronized (outboundObserver) { + synchronized (lock) { outboundObserver.onError(t); } } @Override public void onCompleted() { - synchronized (outboundObserver) { + synchronized (lock) { outboundObserver.onCompleted(); } } diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java index 6a5e608f5b8f..d7ae8b2b7347 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java @@ -625,7 +625,8 @@ private static long uniqueId() { */ private abstract class AbstractWindmillStream implements WindmillStream { private final StreamObserverFactory streamObserverFactory = - StreamObserverFactory.direct(streamDeadlineSeconds * 2); + StreamObserverFactory.direct( + streamDeadlineSeconds * 2, options.getWindmillMessagesBetweenIsReadyChecks()); private final Function, StreamObserver> clientFactory; private final Executor executor = Executors.newSingleThreadExecutor( diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java index e3f344c1fb52..fe8878f8f52f 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java @@ -28,8 +28,9 @@ * to use. */ public abstract class StreamObserverFactory { - public static StreamObserverFactory direct(long deadlineSeconds) { - return new Direct(deadlineSeconds); + public static StreamObserverFactory direct( + long deadlineSeconds, int messagesBetweenIsReadyChecks) { + return new Direct(deadlineSeconds, messagesBetweenIsReadyChecks); } public abstract StreamObserver from( @@ -38,9 +39,11 @@ public abstract StreamObserver from( private static class Direct extends StreamObserverFactory { private final long deadlineSeconds; + private final int messagesBetweenIsReadyChecks; - Direct(long deadlineSeconds) { + Direct(long deadlineSeconds, int messagesBetweenIsReadyChecks) { this.deadlineSeconds = deadlineSeconds; + this.messagesBetweenIsReadyChecks = messagesBetweenIsReadyChecks; } @Override @@ -53,7 +56,8 @@ public StreamObserver from( clientFactory.apply( new ForwardingClientResponseObserver( inboundObserver, phaser::arrive, phaser::forceTermination)); - return new DirectStreamObserver<>(phaser, outboundObserver, deadlineSeconds); + return new DirectStreamObserver<>( + phaser, outboundObserver, deadlineSeconds, messagesBetweenIsReadyChecks); } } } diff --git a/sdks/go.mod b/sdks/go.mod index f483a2db846b..487bdd345338 100644 --- a/sdks/go.mod +++ b/sdks/go.mod @@ -32,7 +32,7 @@ require ( github.com/aws/aws-sdk-go-v2/config v1.18.7 github.com/aws/aws-sdk-go-v2/credentials v1.13.7 github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.46 - github.com/aws/aws-sdk-go-v2/service/s3 v1.29.6 + github.com/aws/aws-sdk-go-v2/service/s3 v1.30.0 github.com/aws/smithy-go v1.13.5 github.com/docker/go-connections v0.4.0 github.com/dustin/go-humanize v1.0.0 @@ -49,11 +49,11 @@ require ( github.com/xitongsys/parquet-go v1.6.2 github.com/xitongsys/parquet-go-source v0.0.0-20220315005136-aec0fe3e777c go.mongodb.org/mongo-driver v1.11.1 - golang.org/x/net v0.4.0 + golang.org/x/net v0.5.0 golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783 golang.org/x/sync v0.1.0 - golang.org/x/sys v0.3.0 - golang.org/x/text v0.5.0 + golang.org/x/sys v0.4.0 + golang.org/x/text v0.6.0 google.golang.org/api v0.106.0 google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef google.golang.org/grpc v1.51.0 @@ -80,7 +80,7 @@ require ( github.com/Microsoft/hcsshim v0.9.4 // indirect github.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516 // indirect github.com/apache/thrift v0.14.2 // indirect - github.com/aws/aws-sdk-go v1.30.19 // indirect + github.com/aws/aws-sdk-go v1.33.0 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.21 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.27 // indirect diff --git a/sdks/go.sum b/sdks/go.sum index f7b4c203522e..dea9d0bb4e87 100644 --- a/sdks/go.sum +++ b/sdks/go.sum @@ -113,8 +113,9 @@ github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5 github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.17.4/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.30.19 h1:vRwsYgbUvC25Cb3oKXTyTYk3R5n1LRVk8zbvL4inWsc= github.com/aws/aws-sdk-go v1.30.19/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go v1.33.0 h1:Bq5Y6VTLbfnJp1IV8EL/qUU5qO1DYHda/zis/sqevkY= +github.com/aws/aws-sdk-go v1.33.0/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/aws/aws-sdk-go-v2 v1.7.1/go.mod h1:L5LuPC1ZgDr2xQS7AmIec/Jlc7O/Y1u2KxJyNVab250= github.com/aws/aws-sdk-go-v2 v1.17.3 h1:shN7NlnVzvDUgPQ+1rLMSxY8OWRNDRYtiqe0p/PgrhY= github.com/aws/aws-sdk-go-v2 v1.17.3/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= @@ -153,8 +154,9 @@ github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.5.1/go.mod h1:6EQZIwNN github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.21 h1:vY5siRXvW5TrOKm2qKEf9tliBfdLxdfy0i02LOcmqUo= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.21/go.mod h1:WZvNXT1XuH8dnJM0HvOlvk+RNn7NbAPvA/ACO0QarSc= github.com/aws/aws-sdk-go-v2/service/s3 v1.11.1/go.mod h1:XLAGFrEjbvMCLvAtWLLP32yTv8GpBquCApZEycDLunI= -github.com/aws/aws-sdk-go-v2/service/s3 v1.29.6 h1:W8pLcSn6Uy0eXgDBUUl8M8Kxv7JCoP68ZKTD04OXLEA= github.com/aws/aws-sdk-go-v2/service/s3 v1.29.6/go.mod h1:L2l2/q76teehcW7YEsgsDjqdsDTERJeX3nOMIFlgGUE= +github.com/aws/aws-sdk-go-v2/service/s3 v1.30.0 h1:wddsyuESfviaiXk3w9N6/4iRwTg/a3gktjODY6jYQBo= +github.com/aws/aws-sdk-go-v2/service/s3 v1.30.0/go.mod h1:L2l2/q76teehcW7YEsgsDjqdsDTERJeX3nOMIFlgGUE= github.com/aws/aws-sdk-go-v2/service/sso v1.3.1/go.mod h1:J3A3RGUvuCZjvSuZEcOpHDnzZP/sKbhDWV2T1EOzFIM= github.com/aws/aws-sdk-go-v2/service/sso v1.11.28 h1:gItLq3zBYyRDPmqAClgzTH8PBjDQGeyptYGHIwtYYNA= github.com/aws/aws-sdk-go-v2/service/sso v1.11.28/go.mod h1:wo/B7uUm/7zw/dWhBJ4FXuw1sySU5lyIhVg1Bu2yL9A= @@ -972,8 +974,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.4.0 h1:Q5QPcMlvfxFTAPV0+07Xz/MpK9NTXu2VDUuy0FeMfaU= -golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1070,8 +1072,8 @@ golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1081,8 +1083,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= -golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/sdks/go/pkg/beam/core/metrics/metrics.go b/sdks/go/pkg/beam/core/metrics/metrics.go index 678a34a647d3..93dd9d0070e0 100644 --- a/sdks/go/pkg/beam/core/metrics/metrics.go +++ b/sdks/go/pkg/beam/core/metrics/metrics.go @@ -49,6 +49,7 @@ package metrics import ( "context" "fmt" + "hash" "hash/fnv" "sort" "sync" @@ -221,28 +222,33 @@ func newName(ns, n string) name { // We hash the name to a uint64 so we avoid using go's native string hashing for // every use of a metrics. uint64s have faster lookup than strings as a result. // Collisions are possible, but statistically unlikely as namespaces and names -// are usually short enough to avoid this. +// are usually short enough to avoid this. A sync.Pool is used because it can provide +// goroutine-local values that reduce contention and profiling shows hashName from NewCounter +// can be a contention hotspot. See parallel benches metrics_test.go:BenchmarkMetrics/* var ( - hasherMu sync.Mutex - hasher = fnv.New64a() + hashPool = sync.Pool{ + New: func() interface{} { + return fnv.New64a() + }, + } ) func hashName(ns, n string) nameHash { - hasherMu.Lock() + hasher := hashPool.Get().(hash.Hash64) hasher.Reset() var buf [64]byte b := buf[:] - hashString(ns, b) - hashString(n, b) + hashString(hasher, ns, b) + hashString(hasher, n, b) h := hasher.Sum64() - hasherMu.Unlock() + hashPool.Put(hasher) return nameHash(h) } // hashString hashes a string with the package level hasher // and requires posession of the hasherMu lock. The byte // slice is assumed to be backed by a [64]byte. -func hashString(s string, b []byte) { +func hashString(hasher hash.Hash64, s string, b []byte) { l := len(s) i := 0 for len(s)-i > 64 { diff --git a/sdks/go/pkg/beam/core/metrics/metrics_test.go b/sdks/go/pkg/beam/core/metrics/metrics_test.go index 75b483184ab2..33d1b2c7ddd1 100644 --- a/sdks/go/pkg/beam/core/metrics/metrics_test.go +++ b/sdks/go/pkg/beam/core/metrics/metrics_test.go @@ -617,21 +617,23 @@ func TestPcolQueryResult(t *testing.T) { } } -// Run on @lostluck's desktop (2020/01/21) go1.13.4 +// Run on @shanemhansen's desktop (2022/01/03) go1.20 RC1 after changing hashName to use a pool of hashers +// sync.Pool can return thread-local results eliminating the need for a lock and increasing throughput. +// There are users in the wild who create an excessive number of Counters so a 4x improvement in throughput at the expense of +// creating GOMAXPROCS hasher values seems reasonable. // -// Allocs & bytes should be consistent within go versions, but ns/op is relative to the running machine. -// -// BenchmarkMetrics/counter_inplace-12 6054129 208 ns/op 48 B/op 1 allocs/op -// BenchmarkMetrics/distribution_inplace-12 5707147 228 ns/op 48 B/op 1 allocs/op -// BenchmarkMetrics/gauge_inplace-12 4742331 259 ns/op 48 B/op 1 allocs/op -// BenchmarkMetrics/counter_predeclared-12 90147133 12.7 ns/op 0 B/op 0 allocs/op -// BenchmarkMetrics/distribution_predeclared-12 55396678 21.6 ns/op 0 B/op 0 allocs/op -// BenchmarkMetrics/gauge_predeclared-12 18535839 60.5 ns/op 0 B/op 0 allocs/op -// BenchmarkMetrics/counter_raw-12 159581343 7.18 ns/op 0 B/op 0 allocs/op -// BenchmarkMetrics/distribution_raw-12 82724314 14.7 ns/op 0 B/op 0 allocs/op -// BenchmarkMetrics/gauge_raw-12 23292386 55.2 ns/op 0 B/op 0 allocs/op -// BenchmarkMetrics/getStore-12 309361303 3.78 ns/op 0 B/op 0 allocs/op -// BenchmarkMetrics/getCounterSet-12 287720998 3.98 ns/op 0 B/op 0 allocs/op +// name old time/op new time/op delta +// Metrics/counter_inplace-12 376ns ±17% 88ns ± 7% -76.66% (p=0.008 n=5+5) +// Metrics/distribution_inplace-12 394ns ± 3% 153ns ± 8% -61.17% (p=0.008 n=5+5) +// Metrics/gauge_inplace-12 371ns ± 4% 258ns ± 1% -30.37% (p=0.008 n=5+5) +// Metrics/counter_predeclared-12 16.9ns ± 6% 17.0ns ± 3% ~ (p=0.595 n=5+5) +// Metrics/distribution_predeclared-12 83.2ns ± 2% 84.9ns ± 1% ~ (p=0.056 n=5+5) +// Metrics/gauge_predeclared-12 105ns ± 6% 110ns ± 5% +4.81% (p=0.032 n=5+5) +// Metrics/counter_raw-12 10.8ns ± 4% 12.0ns ±28% ~ (p=0.151 n=5+5) +// Metrics/distribution_raw-12 77.6ns ± 7% 78.8ns ± 5% ~ (p=0.841 n=5+5) +// Metrics/gauge_raw-12 78.9ns ± 1% 77.3ns ± 4% ~ (p=0.151 n=5+5) +// Metrics/getStore-12 0.27ns ± 3% 0.27ns ± 2% ~ (p=0.841 n=5+5) +// Metrics/getCounterSet-12 0.32ns ± 3% 0.31ns ± 0% -1.28% (p=0.048 n=5+4) func BenchmarkMetrics(b *testing.B) { pt, c, d, g := "bench.bundle.data", "counter", "distribution", "gauge" aBundleID := "benchBID" @@ -664,9 +666,11 @@ func BenchmarkMetrics(b *testing.B) { } for _, test := range tests { b.Run(test.name, func(b *testing.B) { - for i := 0; i < b.N; i++ { - test.call() - } + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + test.call() + } + }) }) } } diff --git a/sdks/go/pkg/beam/transforms/sql/sql_test.go b/sdks/go/pkg/beam/transforms/sql/sql_test.go new file mode 100644 index 000000000000..58d801f45f0b --- /dev/null +++ b/sdks/go/pkg/beam/transforms/sql/sql_test.go @@ -0,0 +1,96 @@ +// Licensed to the Apache Software Foundation (ASF) under one or more +// contributor license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright ownership. +// The ASF licenses this file to You under the Apache License, Version 2.0 +// (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sql + +import ( + "github.com/apache/beam/sdks/v2/go/pkg/beam" + "github.com/apache/beam/sdks/v2/go/pkg/beam/transforms/sql/sqlx" + "reflect" + "testing" +) + +func TestOptions_Add(t *testing.T) { + test := struct { + opt sqlx.Option + }{ + opt: sqlx.Option{ + Urn: "this is a string", + Payload: []byte{1, 2, 3, 4}, + }, + } + + o := options{} + o.Add(test.opt) + if o.customs == nil || !reflect.DeepEqual(o.customs[len(o.customs)-1], test.opt) { + t.Errorf("options.Add(%v) failed. For the customs field in options, got %v, want %v", test.opt, o.customs, test.opt) + } +} + +func TestInput(t *testing.T) { + test := struct { + inputName string + inputIn beam.PCollection + }{ + inputName: "this is a string", + inputIn: beam.PCollection{}, + } + + o := &options{inputs: make(map[string]beam.PCollection)} + option := Input(test.inputName, test.inputIn) + if option == nil { + t.Errorf("Input(%v, %v) = %v, want not nil", test.inputName, test.inputIn, option) + } + option(o) + if o.inputs == nil || !reflect.DeepEqual(o.inputs[test.inputName], test.inputIn) { + t.Errorf("The function that Input(%v, %v) returned did not work correctly. For the inputs field in options, got %v, want %v", test.inputName, test.inputIn, o.inputs, test.inputIn) + } +} + +func TestDialect(t *testing.T) { + test := struct { + dialect string + }{ + dialect: "this is a string", + } + + o := &options{} + option := Dialect(test.dialect) + if option == nil { + t.Errorf("Dialect(%v) = %v, want not nil", test.dialect, option) + } + option(o) + if !reflect.DeepEqual(o.dialect, test.dialect) { + t.Errorf("The function that Input(%v) returned did not work correctly. For the dialect field in options, got %v, want %v", test.dialect, o.dialect, test.dialect) + } +} + +func TestExpansionAddr(t *testing.T) { + test := struct { + addr string + }{ + addr: "this is a string", + } + + o := &options{} + option := ExpansionAddr(test.addr) + if option == nil { + t.Errorf("ExpansionAddr(%v) = %v, want not nil", test.addr, option) + } + option(o) + if !reflect.DeepEqual(o.expansionAddr, test.addr) { + t.Errorf("The function that ExpansionAddr(%v) returned did not work correctly. For the expansionAddr field in options, got %v, want %v", test.addr, o.expansionAddr, test.addr) + } +} diff --git a/sdks/java/container/Dockerfile b/sdks/java/container/Dockerfile index c29b7f7910be..b941aee453d4 100644 --- a/sdks/java/container/Dockerfile +++ b/sdks/java/container/Dockerfile @@ -23,6 +23,9 @@ ARG pull_licenses ADD target/slf4j-api.jar /opt/apache/beam/jars/ ADD target/slf4j-jdk14.jar /opt/apache/beam/jars/ +ADD target/jcl-over-slf4j.jar /opt/apache/beam/jars/ +ADD target/log4j-over-slf4j.jar /opt/apache/beam/jars/ +ADD target/log4j-to-slf4j.jar /opt/apache/beam/jars/ ADD target/beam-sdks-java-harness.jar /opt/apache/beam/jars/ # Required to run cross-language pipelines with KafkaIO diff --git a/sdks/java/container/boot.go b/sdks/java/container/boot.go index 5fa85c77dd5b..63dd1176d370 100644 --- a/sdks/java/container/boot.go +++ b/sdks/java/container/boot.go @@ -137,6 +137,9 @@ func main() { cp := []string{ filepath.Join(jarsDir, "slf4j-api.jar"), filepath.Join(jarsDir, "slf4j-jdk14.jar"), + filepath.Join(jarsDir, "jcl-over-slf4j.jar"), + filepath.Join(jarsDir, "log4j-over-slf4j.jar"), + filepath.Join(jarsDir, "log4j-to-slf4j.jar"), filepath.Join(jarsDir, "beam-sdks-java-harness.jar"), filepath.Join(jarsDir, "beam-sdks-java-io-kafka.jar"), filepath.Join(jarsDir, "kafka-clients.jar"), diff --git a/sdks/java/container/build.gradle b/sdks/java/container/build.gradle index 552cc8de98cd..98a626595603 100644 --- a/sdks/java/container/build.gradle +++ b/sdks/java/container/build.gradle @@ -38,6 +38,9 @@ configurations { dependencies { dockerDependency library.java.slf4j_api dockerDependency library.java.slf4j_jdk14 + dockerDependency library.java.jcl_over_slf4j + dockerDependency library.java.log4j_over_slf4j + dockerDependency library.java.log4j2_to_slf4j dockerDependency project(path: ":sdks:java:harness", configuration: "shadow") // For executing KafkaIO, e.g. as an external transform dockerDependency project(":sdks:java:io:kafka") diff --git a/sdks/java/container/common.gradle b/sdks/java/container/common.gradle index 265d14fbe9c7..1ec0da0098b5 100644 --- a/sdks/java/container/common.gradle +++ b/sdks/java/container/common.gradle @@ -48,6 +48,9 @@ task copyDockerfileDependencies(type: Copy) { from configurations.dockerDependency rename 'slf4j-api.*', 'slf4j-api.jar' rename 'slf4j-jdk14.*', 'slf4j-jdk14.jar' + rename 'jcl-over-slf4j.*', 'jcl-over-slf4j.jar' + rename 'log4j-over-slf4j.*', 'log4j-over-slf4j.jar' + rename 'log4j-to-slf4j.*', 'log4j-to-slf4j.jar' if (imageJavaVersion == "11" || imageJavaVersion == "17") { rename 'beam-sdks-java-container-agent.*.jar', 'open-module-agent.jar' } @@ -80,8 +83,10 @@ task copyGolangLicenses(type: Copy) { task copyJdkOptions(type: Copy) { if (imageJavaVersion == "17" || imageJavaVersion == "11") { from "option-jamm.json" - into "build/target/options" } + from "java${imageJavaVersion}-security.properties" + from "option-java${imageJavaVersion}-security.json" + into "build/target/options" } task skipPullLicenses(type: Exec) { @@ -129,4 +134,4 @@ dockerPrepare.dependsOn copySdkHarnessLauncher dockerPrepare.dependsOn copyDockerfileDependencies dockerPrepare.dependsOn ":sdks:java:container:downloadCloudProfilerAgent" dockerPrepare.dependsOn copyJdkOptions -dockerPrepare.dependsOn validateJavaHome \ No newline at end of file +dockerPrepare.dependsOn validateJavaHome diff --git a/sdks/java/container/java11/java11-security.properties b/sdks/java/container/java11/java11-security.properties new file mode 100644 index 000000000000..caf64592c400 --- /dev/null +++ b/sdks/java/container/java11/java11-security.properties @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Java 11 java.security properties file override for JVM +# base properties derived from: +# openjdk version "11.0.16" 2022-07-19 +# OpenJDK Runtime Environment 18.9 (build 11.0.16+8) +# OpenJDK 64-Bit Server VM 18.9 (build 11.0.16+8, mixed mode, sharing) + +# Java has now disabled TLSv1 and TLSv1.1. We specifically put it in the +# legacy algorithms list to allow it to be used if something better is not +# available (e.g. TLSv1.2). This will prevent breakages for existing users +# (for example JDBC with MySQL). See +# https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8202343 +# for additional details. +jdk.tls.disabledAlgorithms=SSLv3, RC4, DES, MD5withRSA, \ + DH keySize < 1024, EC keySize < 224, 3DES_EDE_CBC, anon, NULL, \ + include jdk.disabled.namedCurves + +jdk.tls.legacyAlgorithms= \ + K_NULL, C_NULL, M_NULL, \ + DH_anon, ECDH_anon, \ + RC4_128, RC4_40, DES_CBC, DES40_CBC, \ + 3DES_EDE_CBC, TLSv1, TLSv1.1 + +# /dev/random blocks in virtualized environments due to lack of +# good entropy sources, which makes SecureRandom use impractical. +# In particular, that affects the performance of HTTPS that relies +# on SecureRandom. +# +# Due to that, /dev/urandom is used as the default. +# +# See http://www.2uo.de/myths-about-urandom/ for some background +# on security of /dev/urandom on Linux. +securerandom.source=file:/dev/./urandom \ No newline at end of file diff --git a/sdks/java/container/java11/option-java11-security.json b/sdks/java/container/java11/option-java11-security.json new file mode 100644 index 000000000000..a8ad9672a3fc --- /dev/null +++ b/sdks/java/container/java11/option-java11-security.json @@ -0,0 +1,9 @@ +{ + "name": "java-security", + "enabled": true, + "options": { + "properties": { + "java.security.properties": "/opt/apache/beam/options/java11-security.properties" + } + } +} diff --git a/sdks/java/container/java17/java17-security.properties b/sdks/java/container/java17/java17-security.properties new file mode 100644 index 000000000000..ec2a5c039cb9 --- /dev/null +++ b/sdks/java/container/java17/java17-security.properties @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Java 17 java.security properties file override for JVM +# base properties derived from: +# openjdk version "17.0.2" 2022-01-18 +# OpenJDK Runtime Environment (build 17.0.2+8-86) +# OpenJDK 64-Bit Server VM (build 17.0.2+8-86, mixed mode, sharing) + +# Java has now disabled TLSv1 and TLSv1.1. We specifically put it in the +# legacy algorithms list to allow it to be used if something better is not +# available (e.g. TLSv1.2). This will prevent breakages for existing users +# (for example JDBC with MySQL). See +# https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8202343 +# for additional details. +jdk.tls.disabledAlgorithms=SSLv3, RC4, DES, MD5withRSA, \ + DH keySize < 1024, EC keySize < 224, 3DES_EDE_CBC, anon, NULL + +# The raw value from 17.0.2 for legacyAlgorithms is +# NULL, anon, RC4, DES, 3DES_EDE_CBC +# Because these values are in disabledAlgorithms, it is erroneous to include +# them in legacy (they are disabled in Java 8 and Java 11 as well). Here we +# only include TLSv1 and TLSv1.1 which were removed from disabledAlgorithms +jdk.tls.legacyAlgorithms=TLSv1, TLSv1.1 + +# /dev/random blocks in virtualized environments due to lack of +# good entropy sources, which makes SecureRandom use impractical. +# In particular, that affects the performance of HTTPS that relies +# on SecureRandom. +# +# Due to that, /dev/urandom is used as the default. +# +# See http://www.2uo.de/myths-about-urandom/ for some background +# on security of /dev/urandom on Linux. +securerandom.source=file:/dev/./urandom \ No newline at end of file diff --git a/sdks/java/container/java17/option-java17-security.json b/sdks/java/container/java17/option-java17-security.json new file mode 100644 index 000000000000..979d4be90d1e --- /dev/null +++ b/sdks/java/container/java17/option-java17-security.json @@ -0,0 +1,9 @@ +{ + "name": "java-security", + "enabled": true, + "options": { + "properties": { + "java.security.properties": "/opt/apache/beam/options/java17-security.properties" + } + } +} diff --git a/sdks/java/container/java8/java8-security.properties b/sdks/java/container/java8/java8-security.properties new file mode 100644 index 000000000000..f637d3ef7567 --- /dev/null +++ b/sdks/java/container/java8/java8-security.properties @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Java 8 java.security properties file override for JVM +# base properties derived from: +# openjdk version "1.8.0_342" +# OpenJDK Runtime Environment (build 1.8.0_342-b07) +# OpenJDK 64-Bit Server VM (build 25.342-b07, mixed mode) + +# Java has now disabled TLSv1 and TLSv1.1. We specifically put it in the +# legacy algorithms list to allow it to be used if something better is not +# available (e.g. TLSv1.2). This will prevent breakages for existing users +# (for example JDBC with MySQL). See +# https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8202343 +# for additional details. +jdk.tls.disabledAlgorithms=SSLv3, RC4, DES, MD5withRSA, \ + DH keySize < 1024, EC keySize < 224, 3DES_EDE_CBC, anon, NULL, \ + include jdk.disabled.namedCurves + +jdk.tls.legacyAlgorithms= \ + K_NULL, C_NULL, M_NULL, \ + DH_anon, ECDH_anon, \ + RC4_128, RC4_40, DES_CBC, DES40_CBC, \ + 3DES_EDE_CBC, TLSv1, TLSv1.1 + +# /dev/random blocks in virtualized environments due to lack of +# good entropy sources, which makes SecureRandom use impractical. +# In particular, that affects the performance of HTTPS that relies +# on SecureRandom. +# +# Due to that, /dev/urandom is used as the default. +# +# See http://www.2uo.de/myths-about-urandom/ for some background +# on security of /dev/urandom on Linux. +securerandom.source=file:/dev/./urandom \ No newline at end of file diff --git a/sdks/java/container/java8/option-java8-security.json b/sdks/java/container/java8/option-java8-security.json new file mode 100644 index 000000000000..47f2938bf7cd --- /dev/null +++ b/sdks/java/container/java8/option-java8-security.json @@ -0,0 +1,9 @@ +{ + "name": "java-security", + "enabled": true, + "options": { + "properties": { + "java.security.properties": "/opt/apache/beam/options/java8-security.properties" + } + } +} diff --git a/sdks/java/core/build.gradle b/sdks/java/core/build.gradle index 890d44a28e19..2e172ec50c07 100644 --- a/sdks/java/core/build.gradle +++ b/sdks/java/core/build.gradle @@ -26,11 +26,13 @@ applyJavaNature( ], shadowClosure: { dependencies { - include(dependency("org.apache.commons:.*")) + include(dependency(library.java.commons_compress)) + include(dependency(library.java.commons_lang3)) include(dependency(library.java.antlr_runtime)) } relocate "com.google.thirdparty", getJavaRelocatedPath("com.google.thirdparty") - relocate "org.apache.commons", getJavaRelocatedPath("org.apache.commons") + relocate "org.apache.commons.compress", getJavaRelocatedPath("org.apache.commons.compress") + relocate "org.apache.commons.lang3", getJavaRelocatedPath("org.apache.commons.lang3") relocate "org.antlr.v4", getJavaRelocatedPath("org.antlr.v4") }, ) @@ -114,6 +116,9 @@ dependencies { shadowTest library.java.quickcheck_generators shadowTest library.java.avro_tests shadowTest library.java.zstd_jni + shadowTest library.java.commons_logging + shadowTest library.java.log4j + shadowTest library.java.log4j2_api shadowTest library.java.jamm testRuntimeOnly library.java.slf4j_jdk14 } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java index df370b4f8542..bf0b537ad935 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/Schema.java @@ -999,6 +999,13 @@ public final String toString() { builder.append(getMapValueType().toString()); builder.append(">"); break; + case LOGICAL_TYPE: + builder.append("LOGICAL_TYPE<"); + if (getLogicalType() != null) { + builder.append(getLogicalType().getIdentifier()); + } + builder.append(">"); + break; default: builder.append(getTypeName().toString()); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java index f79db31bf7ec..0a9680185626 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java @@ -56,6 +56,7 @@ import org.apache.beam.sdk.util.SerializableUtils; import org.apache.beam.sdk.values.Row; import org.apache.beam.vendor.grpc.v1p48p1.com.google.protobuf.ByteString; +import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables; @@ -63,6 +64,8 @@ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams; import org.apache.commons.lang3.ClassUtils; import org.checkerframework.checker.nullness.qual.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Utility methods for translating schemas. */ @Experimental(Kind.SCHEMAS) @@ -71,6 +74,7 @@ "rawtypes" }) public class SchemaTranslation { + private static final Logger LOG = LoggerFactory.getLogger(SchemaTranslation.class); private static final String URN_BEAM_LOGICAL_DECIMAL = FixedPrecisionNumeric.BASE_IDENTIFIER; private static final String URN_BEAM_LOGICAL_JAVASDK = "beam:logical_type:javasdk:v1"; @@ -124,8 +128,8 @@ private static SchemaApi.Field fieldToProto( .build(); } - private static SchemaApi.FieldType fieldTypeToProto( - FieldType fieldType, boolean serializeLogicalType) { + @VisibleForTesting + static SchemaApi.FieldType fieldTypeToProto(FieldType fieldType, boolean serializeLogicalType) { SchemaApi.FieldType.Builder builder = SchemaApi.FieldType.newBuilder(); switch (fieldType.getTypeName()) { case ROW: @@ -297,7 +301,8 @@ private static Field fieldFromProto(SchemaApi.Field protoField) { .withDescription(protoField.getDescription()); } - private static FieldType fieldTypeFromProto(SchemaApi.FieldType protoFieldType) { + @VisibleForTesting + static FieldType fieldTypeFromProto(SchemaApi.FieldType protoFieldType) { FieldType fieldType = fieldTypeFromProtoWithoutNullable(protoFieldType); if (protoFieldType.getNullable()) { @@ -426,26 +431,32 @@ private static FieldType fieldTypeFromProtoWithoutNullable(SchemaApi.FieldType p return FieldType.DATETIME; } else if (urn.equals(URN_BEAM_LOGICAL_DECIMAL)) { return FieldType.DECIMAL; - } else if (urn.equals(URN_BEAM_LOGICAL_JAVASDK)) { - return FieldType.logicalType( - (LogicalType) - SerializableUtils.deserializeFromByteArray( - logicalType.getPayload().toByteArray(), "logicalType")); - } else { - @Nullable FieldType argumentType = null; - @Nullable Object argumentValue = null; - if (logicalType.hasArgumentType()) { - argumentType = fieldTypeFromProto(logicalType.getArgumentType()); - argumentValue = fieldValueFromProto(argumentType, logicalType.getArgument()); + } else if (urn.startsWith("beam:logical_type:")) { + try { + return FieldType.logicalType( + (LogicalType) + SerializableUtils.deserializeFromByteArray( + logicalType.getPayload().toByteArray(), "logicalType")); + } catch (IllegalArgumentException e) { + LOG.warn( + "Unable to deserialize the logical type {} from proto. Mark as UnknownLogicalType.", + urn); } - return FieldType.logicalType( - new UnknownLogicalType( - urn, - logicalType.getPayload().toByteArray(), - argumentType, - argumentValue, - fieldTypeFromProto(logicalType.getRepresentation()))); } + // assemble an UnknownLogicalType + @Nullable FieldType argumentType = null; + @Nullable Object argumentValue = null; + if (logicalType.hasArgumentType()) { + argumentType = fieldTypeFromProto(logicalType.getArgumentType()); + argumentValue = fieldValueFromProto(argumentType, logicalType.getArgument()); + } + return FieldType.logicalType( + new UnknownLogicalType( + urn, + logicalType.getPayload().toByteArray(), + argumentType, + argumentValue, + fieldTypeFromProto(logicalType.getRepresentation()))); default: throw new IllegalArgumentException( "Unexpected type_info: " + protoFieldType.getTypeInfoCase()); diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/SdkHarnessEnvironmentTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/SdkHarnessEnvironmentTest.java index dd2d469fd4be..d2735265a391 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/SdkHarnessEnvironmentTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/SdkHarnessEnvironmentTest.java @@ -17,10 +17,24 @@ */ package org.apache.beam.sdk; +import static org.apache.beam.sdk.testing.ExpectedLogs.verifyLogged; +import static org.apache.beam.sdk.testing.ExpectedLogs.verifyNotLogged; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItemInArray; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertNotNull; +import java.security.Security; +import java.util.logging.Level; +import java.util.logging.LogManager; +import javax.net.ssl.SSLContext; import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.options.SdkHarnessOptions; +import org.apache.beam.sdk.options.SdkHarnessOptions.LogLevel; +import org.apache.beam.sdk.options.SdkHarnessOptions.SdkHarnessLogLevelOverrides; +import org.apache.beam.sdk.testing.ExpectedLogs; +import org.apache.beam.sdk.testing.ExpectedLogs.LogSaver; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.testing.UsesSdkHarnessEnvironment; @@ -66,4 +80,120 @@ public void testJammAgentAvailable() throws Exception { PAssert.that(output).containsInAnyOrder("measured"); p.run().waitUntilFinish(); } + + /** {@link DoFn} used to validate that TLS was enabled as part of java security properties. */ + private static class TLSDoFn extends DoFn { + @ProcessElement + public void processElement(ProcessContext c) throws Exception { + String[] disabledAlgorithms = + Security.getProperty("jdk.tls.disabledAlgorithms").trim().split("\\s*,\\s*"); + String[] legacyAlgorithms = + Security.getProperty("jdk.tls.legacyAlgorithms").trim().split("\\s*,\\s*"); + assertThat(disabledAlgorithms, not(hasItemInArray("TLSv1"))); + assertThat(disabledAlgorithms, not(hasItemInArray("TLSv1.1"))); + assertThat(legacyAlgorithms, hasItemInArray("TLSv1")); + assertThat(legacyAlgorithms, hasItemInArray("TLSv1.1")); + + // getDefaultSSLParameters() shows all protocols that JSSE implements that are allowed. + // getSupportedSSLParameters() shows all protocols that JSSE implements including those that + // are disabled. + SSLContext context = SSLContext.getInstance("TLS"); + context.init(null, null, null); + assertNotNull(context); + String[] defaultProtocols = context.getDefaultSSLParameters().getProtocols(); + assertThat(defaultProtocols, hasItemInArray("TLSv1")); + assertThat(defaultProtocols, hasItemInArray("TLSv1.1")); + + c.output("TLSv1-TLSv1.1 enabled"); + } + } + + @Test + @Category({ValidatesRunner.class, UsesSdkHarnessEnvironment.class}) + public void testTlsAvailable() throws Exception { + PCollection input = p.apply(Create.of("TLS").withCoder(StringUtf8Coder.of())); + + PCollection output = input.apply(ParDo.of(new TLSDoFn())); + + PAssert.that(output).containsInAnyOrder("TLSv1-TLSv1.1 enabled"); + + p.run().waitUntilFinish(); + } + + private static class LoggingDoFn extends DoFn { + @ProcessElement + public void processElement(@Element String element, OutputReceiver output) { + LogSaver logSaver = new LogSaver(); + LogManager.getLogManager().getLogger("").addHandler(logSaver); + + try { + Exception fooException = new RuntimeException("a.Foo-RuntimeException"); + // Test the different log levels for various named loggers. + final org.slf4j.Logger fooLogger = org.slf4j.LoggerFactory.getLogger("a.Foo"); + fooLogger.trace("a.Foo-Trace"); + fooLogger.debug("a.Foo-Debug"); + fooLogger.info("a.Foo-Info"); + fooLogger.warn("a.Foo-Warn"); + fooLogger.error("a.Foo-Error", fooException); + + Exception barException = new RuntimeException("a.b.Bar-RuntimeException"); + final org.slf4j.Logger barLogger = org.slf4j.LoggerFactory.getLogger("a.b.Bar"); + barLogger.trace("a.b.Bar-Trace"); + barLogger.debug("a.b.Bar-Debug"); + barLogger.info("a.b.Bar-Info"); + barLogger.warn("a.b.Bar-Warn"); + barLogger.error("a.b.Bar-Error", barException); + + // Test the different types of loggers (e.g. slf4j, jcl, jul, log4j, log4jc) + final org.slf4j.Logger slf4jLogger = org.slf4j.LoggerFactory.getLogger("logger.slf4j"); + slf4jLogger.info("SLF4J log messages work"); + final org.apache.commons.logging.Log jclLogger = + org.apache.commons.logging.LogFactory.getLog("logger.jcl"); + jclLogger.info("JCL log messages work"); + final java.util.logging.Logger julLogger = java.util.logging.Logger.getLogger("logger.jul"); + julLogger.info("JUL log messages work"); + final org.apache.log4j.Logger log4jLogger = + org.apache.log4j.Logger.getLogger("logger.log4j"); + log4jLogger.info("Log4j log messages work"); + final org.apache.logging.log4j.Logger log4j2Logger = + org.apache.logging.log4j.LogManager.getLogger("logger.log4j2"); + log4j2Logger.info("Log4j2 log messages work"); + + verifyNotLogged(ExpectedLogs.matcher(Level.FINEST, "a.Foo-Trace"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.FINE, "a.Foo-Debug"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.INFO, "a.Foo-Info"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.WARNING, "a.Foo-Warn"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.SEVERE, "a.Foo-Error", fooException), logSaver); + + verifyNotLogged(ExpectedLogs.matcher(Level.FINEST, "a.Foo-Trace"), logSaver); + verifyNotLogged(ExpectedLogs.matcher(Level.FINE, "a.b.Bar-Debug"), logSaver); + verifyNotLogged(ExpectedLogs.matcher(Level.INFO, "a.b.Bar-Info"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.WARNING, "a.b.Bar-Warn"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.SEVERE, "a.b.Bar-Error", barException), logSaver); + + verifyLogged(ExpectedLogs.matcher(Level.INFO, "SLF4J log messages work"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.INFO, "JCL log messages work"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.INFO, "JUL log messages work"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.INFO, "Log4j log messages work"), logSaver); + verifyLogged(ExpectedLogs.matcher(Level.INFO, "Log4j2 log messages work"), logSaver); + output.output(element); + } finally { + LogManager.getLogManager().getLogger("").removeHandler(logSaver); + } + } + } + + @Test + @Category({ValidatesRunner.class, UsesSdkHarnessEnvironment.class}) + public void testLogging() throws Exception { + p.getOptions().as(SdkHarnessOptions.class).setDefaultSdkHarnessLogLevel(LogLevel.DEBUG); + p.getOptions() + .as(SdkHarnessOptions.class) + .setSdkHarnessLogLevelOverrides( + new SdkHarnessLogLevelOverrides().addOverrideForName("a.b.Bar", LogLevel.WARN)); + PCollection input = p.apply(Create.of("Logging Works").withCoder(StringUtf8Coder.of())); + PCollection output = input.apply(ParDo.of(new LoggingDoFn())); + PAssert.that(output).containsInAnyOrder("Logging Works"); + p.run().waitUntilFinish(); + } } diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java index 2c1ed474a076..a648e5d662ef 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java @@ -40,9 +40,16 @@ import org.apache.beam.sdk.schemas.Schema.FieldType; import org.apache.beam.sdk.schemas.logicaltypes.DateTime; import org.apache.beam.sdk.schemas.logicaltypes.FixedBytes; +import org.apache.beam.sdk.schemas.logicaltypes.FixedPrecisionNumeric; +import org.apache.beam.sdk.schemas.logicaltypes.FixedString; import org.apache.beam.sdk.schemas.logicaltypes.MicrosInstant; +import org.apache.beam.sdk.schemas.logicaltypes.NanosDuration; +import org.apache.beam.sdk.schemas.logicaltypes.NanosInstant; import org.apache.beam.sdk.schemas.logicaltypes.PythonCallable; import org.apache.beam.sdk.schemas.logicaltypes.SchemaLogicalType; +import org.apache.beam.sdk.schemas.logicaltypes.SqlTypes; +import org.apache.beam.sdk.schemas.logicaltypes.VariableBytes; +import org.apache.beam.sdk.schemas.logicaltypes.VariableString; import org.apache.beam.sdk.values.Row; import org.apache.beam.vendor.grpc.v1p48p1.com.google.protobuf.ByteString; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets; @@ -395,6 +402,45 @@ public void typeInfoNotSet() { } } + /** Test schema translation of logical types. */ + @RunWith(Parameterized.class) + public static class LogicalTypesTest { + @Parameters(name = "{index}: {0}") + public static Iterable data() { + return ImmutableList.builder() + .add(FieldType.logicalType(SqlTypes.DATE)) + .add(FieldType.logicalType(SqlTypes.TIME)) + .add(FieldType.logicalType(SqlTypes.DATETIME)) + .add(FieldType.logicalType(SqlTypes.TIMESTAMP)) + .add(FieldType.logicalType(new NanosInstant())) + .add(FieldType.logicalType(new NanosDuration())) + .add(FieldType.logicalType(FixedBytes.of(10))) + .add(FieldType.logicalType(VariableBytes.of(10))) + .add(FieldType.logicalType(FixedString.of(10))) + .add(FieldType.logicalType(VariableString.of(10))) + .add(FieldType.logicalType(FixedPrecisionNumeric.of(10))) + .build(); + } + + @Parameter(0) + public Schema.FieldType fieldType; + + @Test + public void testPortableLogicalTypeSerializeDeserilizeCorrectly() { + SchemaApi.FieldType proto = SchemaTranslation.fieldTypeToProto(fieldType, true); + Schema.FieldType translated = SchemaTranslation.fieldTypeFromProto(proto); + + assertThat( + translated.getLogicalType().getClass(), equalTo(fieldType.getLogicalType().getClass())); + assertThat( + translated.getLogicalType().getArgumentType(), + equalTo(fieldType.getLogicalType().getArgumentType())); + assertThat( + translated.getLogicalType().getArgument(), + equalTo(fieldType.getLogicalType().getArgument())); + } + } + /** A simple logical type that has no argument. */ private static class NullArgumentLogicalType implements Schema.LogicalType { public static final String IDENTIFIER = "beam:logical_type:null_argument:v1"; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java index ad976531620a..1e11d6ac77ec 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java @@ -162,7 +162,7 @@ public void verifyError(String substring, Throwable t) { * @param substring The message to match against. */ public void verifyNotLogged(String substring) { - verifyNotLogged(matcher(substring)); + verifyNotLogged(matcher(substring), logSaver); } /** @@ -187,10 +187,10 @@ public void verifyLogRecords(Matcher> matcher) { } private void verify(final Level level, final String substring) { - verifyLogged(matcher(level, substring)); + verifyLogged(matcher(level, substring), logSaver); } - private TypeSafeMatcher matcher(final String substring) { + public static TypeSafeMatcher matcher(final String substring) { return new TypeSafeMatcher() { @Override public void describeTo(Description description) { @@ -204,7 +204,7 @@ protected boolean matchesSafely(LogRecord item) { }; } - private TypeSafeMatcher matcher(final Level level, final String substring) { + public static TypeSafeMatcher matcher(final Level level, final String substring) { return new TypeSafeMatcher() { @Override public void describeTo(Description description) { @@ -220,14 +220,14 @@ protected boolean matchesSafely(LogRecord item) { } private void verify(final Level level, final String substring, final Throwable throwable) { - verifyLogged(matcher(level, substring, throwable)); + verifyLogged(matcher(level, substring, throwable), logSaver); } private void verifyNo(final Level level, final String substring, final Throwable throwable) { - verifyNotLogged(matcher(level, substring, throwable)); + verifyNotLogged(matcher(level, substring, throwable), logSaver); } - private TypeSafeMatcher matcher( + public static TypeSafeMatcher matcher( final Level level, final String substring, final Throwable throwable) { return new TypeSafeMatcher() { @Override @@ -249,7 +249,7 @@ protected boolean matchesSafely(LogRecord item) { }; } - private void verifyLogged(Matcher matcher) { + public static void verifyLogged(Matcher matcher, LogSaver logSaver) { for (LogRecord record : logSaver.getLogs()) { if (matcher.matches(record)) { return; @@ -259,17 +259,18 @@ private void verifyLogged(Matcher matcher) { fail(String.format("Missing match for [%s]", matcher)); } - private void verifyNotLogged(Matcher matcher) { + public static void verifyNotLogged(Matcher matcher, LogSaver logSaver) { // Don't use Matchers.everyItem(Matchers.not(matcher)) because it doesn't format the logRecord for (LogRecord record : logSaver.getLogs()) { if (matcher.matches(record)) { - fail(String.format("Unexpected match of [%s]: [%s]", matcher, logFormatter.format(record))); + fail( + String.format("Unexpected match of [%s]: [%s]", matcher, LOG_FORMATTER.format(record))); } } } @Override - protected void before() throws Throwable { + protected void before() { previousLevel = log.getLevel(); log.setLevel(Level.ALL); log.addHandler(logSaver); @@ -282,9 +283,9 @@ protected void after() { logSaver.reset(); } + private static final Formatter LOG_FORMATTER = new SimpleFormatter(); private final Logger log; private final LogSaver logSaver; - private final Formatter logFormatter = new SimpleFormatter(); private Level previousLevel; private ExpectedLogs(String name) { @@ -294,7 +295,7 @@ private ExpectedLogs(String name) { /** A JUL logging {@link Handler} that records all logging events that are passed to it. */ @ThreadSafe - private static class LogSaver extends Handler { + public static class LogSaver extends Handler { private final Collection logRecords = new ConcurrentLinkedDeque<>(); @Override diff --git a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle index f6da77aa8e80..affc9db08828 100644 --- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle +++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle @@ -27,18 +27,8 @@ enableJavaPerformanceTesting() description = "Apache Beam :: SDKs :: Java :: IO :: Elasticsearch-Tests :: 5.x" ext.summary = "Tests of ElasticsearchIO on Elasticsearch 5.x" -def log4j_version = "2.17.1" def elastic_search_version = "5.6.3" -configurations.all { - resolutionStrategy { - // Make sure the log4j versions for api and core match instead of taking the default - // Gradle rule of using the latest. - force "org.apache.logging.log4j:log4j-core:$log4j_version" - force "org.apache.logging.log4j:log4j-api:$log4j_version" - } -} - dependencies { testImplementation project(path: ":sdks:java:io:elasticsearch-tests:elasticsearch-tests-common", configuration: "testRuntimeMigration") testImplementation library.java.testcontainers_elasticsearch @@ -50,8 +40,8 @@ dependencies { testImplementation library.java.hamcrest testImplementation library.java.junit testImplementation "org.elasticsearch.client:elasticsearch-rest-client:$elastic_search_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-api:$log4j_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-core:$log4j_version" + testRuntimeOnly library.java.log4j2_api + testRuntimeOnly library.java.log4j2_core testRuntimeOnly library.java.slf4j_jdk14 testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow") } diff --git a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle index 69c48ad4ff4b..4b8f457ad500 100644 --- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle +++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle @@ -27,18 +27,8 @@ enableJavaPerformanceTesting() description = "Apache Beam :: SDKs :: Java :: IO :: Elasticsearch-Tests :: 6.x" ext.summary = "Tests of ElasticsearchIO on Elasticsearch 6.x" -def log4j_version = "2.17.1" def elastic_search_version = "6.4.0" -configurations.all { - resolutionStrategy { - // Make sure the log4j versions for api and core match instead of taking the default - // Gradle rule of using the latest. - force "org.apache.logging.log4j:log4j-core:$log4j_version" - force "org.apache.logging.log4j:log4j-api:$log4j_version" - } -} - dependencies { testImplementation project(path: ":sdks:java:io:elasticsearch-tests:elasticsearch-tests-common", configuration: "testRuntimeMigration") testImplementation library.java.testcontainers_elasticsearch @@ -49,8 +39,8 @@ dependencies { testImplementation library.java.hamcrest testImplementation library.java.junit testImplementation "org.elasticsearch.client:elasticsearch-rest-client:$elastic_search_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-api:$log4j_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-core:$log4j_version" + testRuntimeOnly library.java.log4j2_api + testRuntimeOnly library.java.log4j2_core testRuntimeOnly library.java.slf4j_jdk14 testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow") } \ No newline at end of file diff --git a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle index 90239086f155..325bc0442817 100644 --- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle +++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle @@ -27,18 +27,8 @@ enableJavaPerformanceTesting() description = "Apache Beam :: SDKs :: Java :: IO :: Elasticsearch-Tests :: 7.x" ext.summary = "Tests of ElasticsearchIO on Elasticsearch 7.x" -def log4j_version = "2.17.1" def elastic_search_version = "7.13.4" -configurations.all { - resolutionStrategy { - // Make sure the log4j versions for api and core match instead of taking the default - // Gradle rule of using the latest. - force "org.apache.logging.log4j:log4j-core:$log4j_version" - force "org.apache.logging.log4j:log4j-api:$log4j_version" - } -} - dependencies { testImplementation project(path: ":sdks:java:io:elasticsearch-tests:elasticsearch-tests-common", configuration: "testRuntimeMigration") testImplementation library.java.testcontainers_elasticsearch @@ -50,8 +40,8 @@ dependencies { testImplementation library.java.hamcrest testImplementation library.java.junit testImplementation "org.elasticsearch.client:elasticsearch-rest-client:$elastic_search_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-api:$log4j_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-core:$log4j_version" + testRuntimeOnly library.java.log4j2_api + testRuntimeOnly library.java.log4j2_core testRuntimeOnly library.java.slf4j_jdk14 testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow") } diff --git a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-8/build.gradle b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-8/build.gradle index c46be1038506..b90bc0b2ef4f 100644 --- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-8/build.gradle +++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-8/build.gradle @@ -27,18 +27,8 @@ enableJavaPerformanceTesting() description = "Apache Beam :: SDKs :: Java :: IO :: Elasticsearch-Tests :: 8.x" ext.summary = "Tests of ElasticsearchIO on Elasticsearch 8.x" -def log4j_version = "2.17.1" def elastic_search_version = "8.0.0" -configurations.all { - resolutionStrategy { - // Make sure the log4j versions for api and core match instead of taking the default - // Gradle rule of using the latest. - force "org.apache.logging.log4j:log4j-core:$log4j_version" - force "org.apache.logging.log4j:log4j-api:$log4j_version" - } -} - test { maxParallelForks = 1 } @@ -54,8 +44,8 @@ dependencies { testImplementation library.java.hamcrest testImplementation library.java.junit testImplementation "org.elasticsearch.client:elasticsearch-rest-client:$elastic_search_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-api:$log4j_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-core:$log4j_version" + testRuntimeOnly library.java.log4j2_api + testRuntimeOnly library.java.log4j2_core testRuntimeOnly library.java.slf4j_jdk14 testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow") } \ No newline at end of file diff --git a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle index ba899173811a..e5be6ca079b2 100644 --- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle +++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle @@ -25,18 +25,8 @@ applyJavaNature( description = "Apache Beam :: SDKs :: Java :: IO :: Elasticsearch-Tests :: Common" ext.summary = "Common test classes for ElasticsearchIO" -def log4j_version = "2.17.1" def elastic_search_version = "7.9.2" -configurations.all { - resolutionStrategy { - // Make sure the log4j versions for api and core match instead of taking the default - // Gradle rule of using the latest. - force "org.apache.logging.log4j:log4j-core:$log4j_version" - force "org.apache.logging.log4j:log4j-api:$log4j_version" - } -} - dependencies { testImplementation library.java.jackson_databind testImplementation project(path: ":sdks:java:core", configuration: "shadow") @@ -49,8 +39,8 @@ dependencies { testImplementation "org.elasticsearch.client:elasticsearch-rest-high-level-client:${elastic_search_version}" testImplementation library.java.testcontainers_elasticsearch - testRuntimeOnly "org.apache.logging.log4j:log4j-api:$log4j_version" - testRuntimeOnly "org.apache.logging.log4j:log4j-core:$log4j_version" + testRuntimeOnly library.java.log4j2_api + testRuntimeOnly library.java.log4j2_core testRuntimeOnly library.java.slf4j_jdk14 testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow") } diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/__init__.py b/sdks/python/apache_beam/testing/benchmarks/cloudml/__init__.py new file mode 100644 index 000000000000..cce3acad34a4 --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/__init__.py @@ -0,0 +1,16 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/cloudml_benchmark_constants_lib.py b/sdks/python/apache_beam/testing/benchmarks/cloudml/cloudml_benchmark_constants_lib.py new file mode 100644 index 000000000000..ad41dcc504ec --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/cloudml_benchmark_constants_lib.py @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""A common file for CloudML benchmarks. + +This file contains constants for pipeline paths, dependency locations and +test data paths. +""" + +INPUT_CRITEO_SMALL = 'train10.tsv' +INPUT_CRITEO_SMALL_100MB = '100mb/train.txt' +INPUT_CRITEO_10GB = '10gb/train.txt' + +FREQUENCY_THRESHOLD = '5' +ENABLE_SHUFFLE = True diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/cloudml_benchmark_test.py b/sdks/python/apache_beam/testing/benchmarks/cloudml/cloudml_benchmark_test.py new file mode 100644 index 000000000000..1561f3b17919 --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/cloudml_benchmark_test.py @@ -0,0 +1,90 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import unittest +import uuid + +import pytest + +try: + import apache_beam.testing.benchmarks.cloudml.cloudml_benchmark_constants_lib as lib + from apache_beam.testing.benchmarks.cloudml.pipelines import workflow + from apache_beam.testing.test_pipeline import TestPipeline +except ImportError: # pylint: disable=bare-except + raise unittest.SkipTest('Dependencies are not installed') + +_INPUT_GCS_BUCKET_ROOT = 'gs://apache-beam-ml/datasets/cloudml/criteo' +_CRITEO_FEATURES_FILE = 'testdata/criteo/expected/features.tfrecord.gz' +_OUTPUT_GCS_BUCKET_ROOT = 'gs://temp-storage-for-end-to-end-tests/tft/' + + +@pytest.mark.uses_tft +class CloudMLTFTBenchmarkTest(unittest.TestCase): + def test_cloudml_benchmark_criteo_small(self): + test_pipeline = TestPipeline(is_integration_test=True) + extra_opts = {} + extra_opts['input'] = os.path.join( + _INPUT_GCS_BUCKET_ROOT, lib.INPUT_CRITEO_SMALL) + extra_opts['benchmark_type'] = 'tft' + extra_opts['classifier'] = 'criteo' + extra_opts['frequency_threshold'] = 0 + extra_opts['output'] = os.path.join( + _OUTPUT_GCS_BUCKET_ROOT, uuid.uuid4().hex) + workflow.run(test_pipeline.get_full_options_as_args(**extra_opts)) + + def test_cloudml_benchmark_cirteo_no_shuffle_10GB(self): + test_pipeline = TestPipeline(is_integration_test=True) + extra_opts = {} + extra_opts['input'] = os.path.join( + _INPUT_GCS_BUCKET_ROOT, lib.INPUT_CRITEO_10GB) + extra_opts['benchmark_type'] = 'tft' + extra_opts['classifier'] = 'criteo' + extra_opts['frequency_threshold'] = 0 + extra_opts['output'] = os.path.join( + _OUTPUT_GCS_BUCKET_ROOT, uuid.uuid4().hex) + extra_opts['shuffle'] = False + workflow.run(test_pipeline.get_full_options_as_args(**extra_opts)) + + def test_cloudml_benchmark_criteo_10GB(self): + test_pipeline = TestPipeline(is_integration_test=True) + extra_opts = {} + extra_opts['input'] = os.path.join( + _INPUT_GCS_BUCKET_ROOT, lib.INPUT_CRITEO_10GB) + extra_opts['benchmark_type'] = 'tft' + extra_opts['classifier'] = 'criteo' + extra_opts['frequency_threshold'] = 0 + extra_opts['output'] = os.path.join( + _OUTPUT_GCS_BUCKET_ROOT, uuid.uuid4().hex) + workflow.run(test_pipeline.get_full_options_as_args(**extra_opts)) + + def test_cloud_ml_benchmark_criteo_fixed_workers_10GB(self): + test_pipeline = TestPipeline(is_integration_test=True) + extra_opts = {} + extra_opts['input'] = os.path.join( + _INPUT_GCS_BUCKET_ROOT, lib.INPUT_CRITEO_10GB) + extra_opts['benchmark_type'] = 'tft' + extra_opts['classifier'] = 'criteo' + extra_opts['frequency_threshold'] = 0 + extra_opts['output'] = os.path.join( + _OUTPUT_GCS_BUCKET_ROOT, uuid.uuid4().hex) + extra_opts['num_workers'] = 50 + extra_opts['machine_type'] = 'n1-standard-4' + workflow.run(test_pipeline.get_full_options_as_args(**extra_opts)) + + +if __name__ == '__main__': + unittest.main() diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/criteo_tft/__init__.py b/sdks/python/apache_beam/testing/benchmarks/cloudml/criteo_tft/__init__.py new file mode 100644 index 000000000000..cce3acad34a4 --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/criteo_tft/__init__.py @@ -0,0 +1,16 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/criteo_tft/criteo.py b/sdks/python/apache_beam/testing/benchmarks/cloudml/criteo_tft/criteo.py new file mode 100644 index 000000000000..cd14bd9e659f --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/criteo_tft/criteo.py @@ -0,0 +1,158 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Schema and tranform definition for the Criteo dataset.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf +import tensorflow_transform as tft + + +def _get_raw_categorical_column_name(column_idx): + return 'categorical-feature-{}'.format(column_idx) + + +def get_transformed_categorical_column_name(column_name_or_id): + if isinstance(column_name_or_id, bytes): + # assume the input is column name + column_name = column_name_or_id + else: + # assume the input is column id + column_name = _get_raw_categorical_column_name(column_name_or_id) + return column_name + '_id' + + +_INTEGER_COLUMN_NAMES = [ + 'int-feature-{}'.format(column_idx) for column_idx in range(1, 14) +] +_CATEGORICAL_COLUMN_NAMES = [ + _get_raw_categorical_column_name(column_idx) + for column_idx in range(14, 40) +] +DEFAULT_DELIMITER = '\t' +# Number of buckets for integer columns. +_NUM_BUCKETS = 10 + +# Schema annotations aren't supported in this build. +tft.common.IS_ANNOTATIONS_PB_AVAILABLE = False + + +def make_ordered_column_names(include_label=True): + """Returns the column names in the dataset in the order as they appear. + + Args: + include_label: Indicates whether the label feature should be included. + Returns: + A list of column names in the dataset. + """ + result = ['clicked'] if include_label else [] + for name in _INTEGER_COLUMN_NAMES: + result.append(name) + for name in _CATEGORICAL_COLUMN_NAMES: + result.append(name) + return result + + +def make_legacy_input_feature_spec(include_label=True): + """Input schema definition. + + Args: + include_label: Indicates whether the label feature should be included. + Returns: + A `Schema` object. + """ + result = {} + if include_label: + result['clicked'] = tf.io.FixedLenFeature(shape=[], dtype=tf.int64) + for name in _INTEGER_COLUMN_NAMES: + result[name] = tf.io.FixedLenFeature( + shape=[], dtype=tf.int64, default_value=-1) + for name in _CATEGORICAL_COLUMN_NAMES: + result[name] = tf.io.FixedLenFeature( + shape=[], dtype=tf.string, default_value='') + return result + + +def make_input_feature_spec(include_label=True): + """Input schema definition. + + Args: + include_label: Indicates whether the label feature should be included. + + Returns: + A `Schema` object. + """ + result = {} + if include_label: + result['clicked'] = tf.io.FixedLenFeature(shape=[], dtype=tf.int64) + for name in _INTEGER_COLUMN_NAMES: + result[name] = tf.io.VarLenFeature(dtype=tf.int64) + + for name in _CATEGORICAL_COLUMN_NAMES: + result[name] = tf.io.VarLenFeature(dtype=tf.string) + + return result + + +def make_preprocessing_fn(frequency_threshold): + """Creates a preprocessing function for criteo. + + Args: + frequency_threshold: The frequency_threshold used when generating + vocabularies for the categorical features. + + Returns: + A preprocessing function. + """ + def preprocessing_fn(inputs): + """User defined preprocessing function for criteo columns. + + Args: + inputs: dictionary of input `tensorflow_transform.Column`. + Returns: + A dictionary of `tensorflow_transform.Column` representing the transformed + columns. + """ + result = {'clicked': inputs['clicked']} + for name in _INTEGER_COLUMN_NAMES: + feature = inputs[name] + # TODO(https://github.com/apache/beam/issues/24902): + # Replace this boilerplate with a helper function. + # This is a SparseTensor because it is optional. Here we fill in a + # default value when it is missing. + feature = tft.sparse_tensor_to_dense_with_shape( + feature, [None, 1], default_value=-1) + # Reshaping from a batch of vectors of size 1 to a batch of scalars and + # adding a bucketized version. + feature = tf.squeeze(feature, axis=1) + result[name] = feature + result[name + '_bucketized'] = tft.bucketize(feature, _NUM_BUCKETS) + for name in _CATEGORICAL_COLUMN_NAMES: + feature = inputs[name] + # Similar to for integer columns, but use '' as default. + feature = tft.sparse_tensor_to_dense_with_shape( + feature, [None, 1], default_value='') + feature = tf.squeeze(feature, axis=1) + result[get_transformed_categorical_column_name( + name)] = tft.compute_and_apply_vocabulary( + feature, frequency_threshold=frequency_threshold) + + return result + + return preprocessing_fn diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/pipelines/__init__.py b/sdks/python/apache_beam/testing/benchmarks/cloudml/pipelines/__init__.py new file mode 100644 index 000000000000..cce3acad34a4 --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/pipelines/__init__.py @@ -0,0 +1,16 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/pipelines/workflow.py b/sdks/python/apache_beam/testing/benchmarks/cloudml/pipelines/workflow.py new file mode 100644 index 000000000000..e60e3a47c0d1 --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/pipelines/workflow.py @@ -0,0 +1,215 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import logging +import os + +import apache_beam as beam +import tensorflow_transform as tft +import tensorflow_transform.beam as tft_beam +from apache_beam.testing.benchmarks.cloudml.criteo_tft import criteo +from tensorflow_transform import coders +from tensorflow_transform.tf_metadata import dataset_metadata +from tensorflow_transform.tf_metadata import schema_utils +from tfx_bsl.public import tfxio + +# Name of the column for the synthetic version of the benchmark. +_SYNTHETIC_COLUMN = 'x' + + +class _RecordBatchToPyDict(beam.PTransform): + """Converts PCollections of pa.RecordBatch to python dicts.""" + def __init__(self, input_feature_spec): + self._input_feature_spec = input_feature_spec + + def expand(self, pcoll): + def format_values(instance): + return { + k: v.squeeze(0).tolist() + if v is not None else self._input_feature_spec[k].default_value + for k, + v in instance.items() + } + + return ( + pcoll + | 'RecordBatchToDicts' >> + beam.FlatMap(lambda x: x.to_pandas().to_dict(orient='records')) + | 'FormatPyDictValues' >> beam.Map(format_values)) + + +def _synthetic_preprocessing_fn(inputs): + return { + _SYNTHETIC_COLUMN: tft.compute_and_apply_vocabulary( + inputs[_SYNTHETIC_COLUMN], + + # Execute more codepaths but do no frequency filtration. + frequency_threshold=1, + + # Execute more codepaths but do no top filtration. + top_k=2**31 - 1, + + # Execute more codepaths + num_oov_buckets=10) + } + + +class _PredictionHistogramFn(beam.DoFn): + def __init__(self): + # Beam Metrics API for Distributions only works with integers but + # predictions are floating point numbers. We thus store a "quantized" + # distribution of the prediction with sufficient granularity and for ease + # of human interpretation (eg as a percentage for logistic regression). + self._prediction_distribution = beam.metrics.Metrics.distribution( + self.__class__, 'int(scores[0]*100)') + + def process(self, element): + self._prediction_distribution.update(int(element['scores'][0] * 100)) + + +def setup_pipeline(p, args): + if args.classifier == 'criteo': + input_feature_spec = criteo.make_input_feature_spec() + input_schema = schema_utils.schema_from_feature_spec(input_feature_spec) + input_tfxio = tfxio.BeamRecordCsvTFXIO( + physical_format='text', + column_names=criteo.make_ordered_column_names(), + schema=input_schema, + delimiter=criteo.DEFAULT_DELIMITER, + telemetry_descriptors=['CriteoCloudMLBenchmark']) + preprocessing_fn = criteo.make_preprocessing_fn(args.frequency_threshold) + else: + assert False, 'Unknown args classifier <{}>'.format(args.classifier) + + input_data = p | 'ReadFromText' >> beam.io.textio.ReadFromText( + args.input, coder=beam.coders.BytesCoder()) + + if args.benchmark_type == 'tft': + logging.info('TFT benchmark') + + # Setting TFXIO output format only for Criteo benchmarks to make sure that + # both codepaths are covered. + output_record_batches = args.classifier == 'criteo' + + # pylint: disable=expression-not-assigned + input_metadata = dataset_metadata.DatasetMetadata(schema=input_schema) + ( + input_metadata + | 'WriteInputMetadata' >> tft_beam.WriteMetadata( + os.path.join(args.output, 'raw_metadata'), pipeline=p)) + + with tft_beam.Context(temp_dir=os.path.join(args.output, 'tmp'), + use_deep_copy_optimization=True): + decoded_input_data = ( + input_data | 'DecodeForAnalyze' >> input_tfxio.BeamSource()) + transform_fn = ((decoded_input_data, input_tfxio.TensorAdapterConfig()) + | 'Analyze' >> tft_beam.AnalyzeDataset(preprocessing_fn)) + + if args.shuffle: + # Shuffle the data before any decoding (more compact representation). + input_data |= 'Shuffle' >> beam.transforms.Reshuffle() # pylint: disable=no-value-for-parameter + + decoded_input_data = ( + input_data | 'DecodeForTransform' >> input_tfxio.BeamSource()) + (dataset, + metadata) = ((decoded_input_data, input_tfxio.TensorAdapterConfig()), + transform_fn) | 'Transform' >> tft_beam.TransformDataset( + output_record_batches=output_record_batches) + + if output_record_batches: + + def record_batch_to_examples(batch, unary_passthrough_features): + """Encodes transformed data as tf.Examples.""" + # Ignore unary pass-through features. + del unary_passthrough_features + # From beam: "imports, functions and other variables defined in the + # global context of your __main__ file of your Dataflow pipeline are, by + # default, not available in the worker execution environment, and such + # references will cause a NameError, unless the --save_main_session + # pipeline option is set to True. Please see + # https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors ." + from tfx_bsl.coders.example_coder import RecordBatchToExamples + return RecordBatchToExamples(batch) + + encode_ptransform = beam.FlatMapTuple(record_batch_to_examples) + else: + example_coder = coders.ExampleProtoCoder(metadata.schema) + encode_ptransform = beam.Map(example_coder.encode) + + # TODO: Use WriteDataset instead when it becomes available. + ( + dataset + | 'Encode' >> encode_ptransform + | 'Write' >> beam.io.WriteToTFRecord( + os.path.join(args.output, 'features_train'), + file_name_suffix='.tfrecord.gz')) + # transform_fn | beam.Map(print) + transform_fn | 'WriteTransformFn' >> tft_beam.WriteTransformFn(args.output) + + # TODO: Remember to eventually also save the statistics. + else: + logging.fatal('Unknown benchmark type: %s', args.benchmark_type) + + +def parse_known_args(argv): + """Parses args for this workflow.""" + parser = argparse.ArgumentParser() + parser.add_argument( + '--input', + dest='input', + required=True, + help='Input path for input files.') + parser.add_argument( + '--output', + dest='output', + required=True, + help='Output path for output files.') + parser.add_argument( + '--classifier', + dest='classifier', + required=True, + help='Name of classifier to use.') + parser.add_argument( + '--frequency_threshold', + dest='frequency_threshold', + default=5, # TODO: Align default with TFT (ie 0). + help='Threshold for minimum number of unique values for a category.') + parser.add_argument( + '--shuffle', + action='store_false', + dest='shuffle', + default=True, + help='Skips shuffling the data.') + parser.add_argument( + '--benchmark_type', + dest='benchmark_type', + required=True, + help='Type of benchmark to run.') + + return parser.parse_known_args(argv) + + +def run(argv=None): + """Main entry point; defines and runs the pipeline.""" + known_args, pipeline_args = parse_known_args(argv) + with beam.Pipeline(argv=pipeline_args) as p: + setup_pipeline(p, known_args) + + +if __name__ == '__main__': + run() diff --git a/sdks/python/apache_beam/testing/benchmarks/cloudml/requirements.txt b/sdks/python/apache_beam/testing/benchmarks/cloudml/requirements.txt new file mode 100644 index 000000000000..8ddfddece547 --- /dev/null +++ b/sdks/python/apache_beam/testing/benchmarks/cloudml/requirements.txt @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +tfx_bsl +tensorflow-transform diff --git a/sdks/python/pytest.ini b/sdks/python/pytest.ini index e2ffe74ac275..915b49c8f16a 100644 --- a/sdks/python/pytest.ini +++ b/sdks/python/pytest.ini @@ -47,9 +47,11 @@ markers = no_xdist: run without pytest-xdist plugin # We run these tests with multiple major pyarrow versions (BEAM-11211) uses_pyarrow: tests that utilize pyarrow in some way + # ML tests uses_pytorch: tests that utilize pytorch in some way uses_sklearn: tests that utilize scikit-learn in some way uses_tensorflow: tests that utilize tensorflow in some way + uses_tft: tests that utilizes tensorflow transforms in some way. # Default timeout intended for unit tests. # If certain tests need a different value, please see the docs on how to diff --git a/sdks/python/scripts/generate_pydoc.sh b/sdks/python/scripts/generate_pydoc.sh index 376dc123cef5..9a0dc99613dd 100755 --- a/sdks/python/scripts/generate_pydoc.sh +++ b/sdks/python/scripts/generate_pydoc.sh @@ -65,6 +65,7 @@ excluded_patterns=( 'apache_beam/runners/test/' 'apache_beam/runners/worker/' 'apache_beam/testing/benchmarks/chicago_taxi/' + 'apache_beam/testing/benchmarks/cloudml/' 'apache_beam/testing/benchmarks/inference/' 'apache_beam/testing/benchmarks/data/' 'apache_beam/testing/benchmarks/load_tests/' diff --git a/sdks/python/scripts/run_integration_test.sh b/sdks/python/scripts/run_integration_test.sh index d38f1bf3baa2..508d9f50421e 100755 --- a/sdks/python/scripts/run_integration_test.sh +++ b/sdks/python/scripts/run_integration_test.sh @@ -247,8 +247,6 @@ if [[ -z $PIPELINE_OPTS ]]; then # Add --runner_v2 if provided if [[ "$RUNNER_V2" = true ]]; then opts+=("--experiments=use_runner_v2") - # TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. - opts+=("--experiments=shuffle_mode=appliance") if [[ "$STREAMING" = true ]]; then # Dataflow Runner V2 only supports streaming engine. opts+=("--enable_streaming_engine") diff --git a/sdks/python/test-suites/dataflow/build.gradle b/sdks/python/test-suites/dataflow/build.gradle index d16111679da7..548a50246a28 100644 --- a/sdks/python/test-suites/dataflow/build.gradle +++ b/sdks/python/test-suites/dataflow/build.gradle @@ -84,3 +84,9 @@ task examplesPostCommit { dependsOn.add(":sdks:python:test-suites:dataflow:py${getVersionSuffix(it)}:examples") } } + +task tftTests { + getVersionsAsList('dataflow_cloudml_benchmark_tests_py_versions').each { + dependsOn.add(":sdks:python:test-suites:dataflow:py${getVersionSuffix(it)}:tftTests") + } +} diff --git a/sdks/python/test-suites/dataflow/common.gradle b/sdks/python/test-suites/dataflow/common.gradle index 5e2fa3d7f7f9..a879421e9394 100644 --- a/sdks/python/test-suites/dataflow/common.gradle +++ b/sdks/python/test-suites/dataflow/common.gradle @@ -362,6 +362,40 @@ def tensorRTTests = tasks.create("tensorRTtests") { } } +task installTFTRequirements { + dependsOn 'installGcpTest' + doLast { + exec { + workingDir "$rootProject.projectDir/sdks/python/apache_beam/testing/benchmarks/cloudml/" + executable 'sh' + args '-c', ". ${envdir}/bin/activate && pip install -r requirements.txt" + } + } +} + +// Tensorflow transform integration and benchmarking tests on Apache Beam. +task tftTests { + dependsOn 'installGcpTest' + dependsOn ':sdks:python:sdist' + dependsOn "installTFTRequirements" + + doLast { + def testOpts = basicPytestOpts + ["--numprocesses=8", "--dist=loadfile"] + def argMap = [ + "test_opts": testOpts, + "sdk_location": files(configurations.distTarBall.files).singleFile, + "suite": "TFTransformTests-df${pythonVersionSuffix}", + "collect": "uses_tft", + "requirements_file": "apache_beam/testing/benchmarks/cloudml/requirements.txt" + ] + def cmdArgs = mapToArgString(argMap) + exec { + executable 'sh' + args '-c', ". ${envdir}/bin/activate && ${runScriptsDir}/run_integration_test.sh $cmdArgs" + } + } +} + // add all RunInference E2E tests that run on DataflowRunner // As of now, this test suite is enable in py38 suite as the base NVIDIA image used for Tensor RT // contains Python 3.8. diff --git a/sdks/python/test-suites/gradle.properties b/sdks/python/test-suites/gradle.properties index 050ed442f618..055bb484bfa6 100644 --- a/sdks/python/test-suites/gradle.properties +++ b/sdks/python/test-suites/gradle.properties @@ -37,7 +37,8 @@ dataflow_validates_container_tests=3.7,3.8,3.9,3.10 dataflow_validates_runner_batch_tests_V2=3.7,3.10 dataflow_validates_runner_streaming_tests_V2=3.7,3.10 dataflow_examples_postcommit_py_versions=3.10 - +# TFX_BSL is not yet supported on Python 3.10. +dataflow_cloudml_benchmark_tests_py_versions=3.9 # direct runner test-suites direct_mongodbio_it_task_py_versions=3.10 direct_examples_postcommit_py_versions=3.7,3.8,3.9,3.10 diff --git a/settings.gradle.kts b/settings.gradle.kts index 2841f5bf860c..5279951d13fa 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -66,6 +66,7 @@ include(":playground") include(":playground:backend") include(":playground:frontend") include(":playground:frontend:playground_components") +include(":playground:frontend:playground_components:tools:extract_symbols_java") include(":playground:backend:containers") include(":playground:backend:containers:java") include(":playground:backend:containers:go") diff --git a/website/www/site/content/en/blog/beam-2.43.0.md b/website/www/site/content/en/blog/beam-2.43.0.md index 83c3bda293e5..227538390f2f 100644 --- a/website/www/site/content/en/blog/beam-2.43.0.md +++ b/website/www/site/content/en/blog/beam-2.43.0.md @@ -37,7 +37,7 @@ For more information on changes in 2.43.0, check out the [detailed release notes * Decreased TextSource CPU utilization by 2.3x (Java) ([#23193](https://github.com/apache/beam/issues/23193)). * Fixed bug when using SpannerIO with RuntimeValueProvider options (Java) ([#22146](https://github.com/apache/beam/issues/22146)). -* Fixed issue for unicode rendering on WriteToBigQuery ([#10785](https://github.com/apache/beam/issues/10785)) +* Fixed issue for unicode rendering on WriteToBigQuery ([#22312](https://github.com/apache/beam/issues/22312)) * Remove obsolete variants of BigQuery Read and Write, always using Beam-native variant ([#23564](https://github.com/apache/beam/issues/23564) and [#23559](https://github.com/apache/beam/issues/23559)). * Bumped google-cloud-spanner dependency version to 3.x for Python SDK ([#21198](https://github.com/apache/beam/issues/21198)). diff --git a/website/www/site/content/en/get-started/quickstart/java.md b/website/www/site/content/en/get-started/quickstart/java.md index 9fcf1fa6e7ae..3eeeff0bf89d 100644 --- a/website/www/site/content/en/get-started/quickstart/java.md +++ b/website/www/site/content/en/get-started/quickstart/java.md @@ -42,8 +42,8 @@ Use [`sdkman`](https://sdkman.io/) to install the Java Development Kit (JDK). # Install sdkman curl -s "https://get.sdkman.io" | bash -# Install Java 11 -sdk install java 11.0.16-tem +# Install Java 17 +sdk install java 17.0.5-tem {{< /highlight >}} You can use either [Gradle](https://gradle.org/) or