diff --git a/.github/workflows/verify_library_generation.yaml b/.github/workflows/verify_library_generation.yaml index cdfde24b98..0b4ae1b8ed 100644 --- a/.github/workflows/verify_library_generation.yaml +++ b/.github/workflows/verify_library_generation.yaml @@ -25,7 +25,7 @@ jobs: cache: maven - uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: 3.11 - name: install pyenv shell: bash run: | @@ -36,10 +36,23 @@ jobs: export PATH="$PYENV_ROOT/bin:$PATH" echo "PYENV_ROOT=${PYENV_ROOT}" >> $GITHUB_ENV echo "PATH=${PATH}" >> $GITHUB_ENV - # init pyenv - eval "$(pyenv init --path)" - eval "$(pyenv init -)" + set +ex + - name: install python dependencies + shell: bash + run: | + set -ex + pushd library_generation + pip install -r requirements.in + popd + + - name: install utils (macos) + if: matrix.os == 'macos-12' + shell: bash + run: | + brew update --preinstall + # we need the `realpath` command to be available + brew install coreutils - name: install docker (ubuntu) if: matrix.os == 'ubuntu-22.04' shell: bash @@ -69,10 +82,30 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - - name: Run unit tests + - name: install utils (macos) + if: matrix.os == 'macos-12' + shell: bash + run: | + brew update --preinstall + brew install coreutils + - uses: actions/setup-python@v4 + with: + python-version: 3.11 + - name: install python dependencies + shell: bash + run: | + set -ex + pushd library_generation + pip install -r requirements.in + popd + - name: Run shell unit tests run: | set -x library_generation/test/generate_library_unit_tests.sh + - name: Run python unit tests + run: | + set -x + python -m unittest library_generation/test/unit_tests.py lint: runs-on: ubuntu-22.04 steps: diff --git a/library_generation/.gitignore b/library_generation/.gitignore new file mode 100644 index 0000000000..c18dd8d83c --- /dev/null +++ b/library_generation/.gitignore @@ -0,0 +1 @@ +__pycache__/ diff --git a/library_generation/generate_composed_library.py b/library_generation/generate_composed_library.py new file mode 100755 index 0000000000..d5beec733b --- /dev/null +++ b/library_generation/generate_composed_library.py @@ -0,0 +1,141 @@ +""" +This script allows generation of libraries that are composed of more than one +service version. It is achieved by calling `generate_library.sh` without +postprocessing for all service versions and then calling +postprocess_library.sh at the end, once all libraries are ready. + +Prerequisites +- Needs a folder named `output` in current working directory. This folder +is automatically detected by `generate_library.sh` and this script ensures it +contains the necessary folders and files, specifically: + - A "google" folder found in the googleapis/googleapis repository + - A "grafeas" folder found in the googleapis/googleapis repository +Note: googleapis repo is found in https://github.com/googleapis/googleapis. +""" + +import click +import utilities as util +import os +import sys +import subprocess +import json +from model.GenerationConfig import GenerationConfig +from model.LibraryConfig import LibraryConfig +from model.ClientInputs import parse as parse_build_file + +script_dir = os.path.dirname(os.path.realpath(__file__)) + +""" +Main function in charge of generating libraries composed of more than one +service or service version. +Arguments + - config: a GenerationConfig object representing a parsed configuration + yaml + - library: a LibraryConfig object contained inside config, passed here for + convenience and to prevent all libraries to be processed + - enable_postprocessing: true if postprocessing should be done on the generated + libraries + - repository_path: path to the repository where the generated files will be + sent. If not specified, it will default to the one defined in the configuration yaml + and will be downloaded. The versions file will be inferred from this folder +""" +def generate_composed_library( + config: GenerationConfig, + library: LibraryConfig, + repository_path: str, + enable_postprocessing: bool = True, +) -> None: + output_folder = util.sh_util('get_output_folder') + + print(f'output_folder: {output_folder}') + print('library: ', library) + os.makedirs(output_folder, exist_ok=True) + + googleapis_commitish = config.googleapis_commitish + if library.googleapis_commitish is not None: + googleapis_commitish = library.googleapis_commitish + print('using library-specific googleapis commitish: ' + googleapis_commitish) + else: + print('using common googleapis_commitish') + + print('removing old googleapis folders and files') + util.delete_if_exists(f'{output_folder}/google') + util.delete_if_exists(f'{output_folder}/grafeas') + + print('downloading googleapis') + util.sh_util(f'download_googleapis_files_and_folders "{output_folder}" "{googleapis_commitish}"') + + is_monorepo = len(config.libraries) > 1 + + base_arguments = [] + base_arguments += util.create_argument('gapic_generator_version', config) + base_arguments += util.create_argument('grpc_version', config) + base_arguments += util.create_argument('protobuf_version', config) + + library_name = f'java-{library.api_shortname}' + library_path = None + + versions_file = '' + if is_monorepo: + print('this is a monorepo library') + destination_path = config.destination_path + '/' + library_name + library_folder = destination_path.split('/')[-1] + if repository_path is None: + print(f'sparse_cloning monorepo with {library_name}') + repository_path = f'{output_folder}/{config.destination_path}' + clone_out = util.sh_util(f'sparse_clone "https://github.com/googleapis/{MONOREPO_NAME}.git" "{library_folder} google-cloud-pom-parent google-cloud-jar-parent versions.txt .github"', cwd=output_folder) + print(clone_out) + library_path = f'{repository_path}/{library_name}' + versions_file = f'{repository_path}/versions.txt' + else: + print('this is a HW library') + destination_path = library_name + if repository_path is None: + repository_path = f'{output_folder}/{destination_path}' + util.delete_if_exists(f'{output_folder}/{destination_path}') + clone_out = util.sh_util(f'git clone "https://github.com/googleapis/{destination_path}.git"', cwd=output_folder) + print(clone_out) + library_path = f'{repository_path}' + versions_file = f'{repository_path}/versions.txt' + + owlbot_cli_source_folder = util.sh_util('mktemp -d') + for gapic in library.gapic_configs: + + effective_arguments = list(base_arguments) + effective_arguments += util.create_argument('proto_path', gapic) + + build_file_folder = f'{output_folder}/{gapic.proto_path}' + print(f'build_file_folder: {build_file_folder}') + client_inputs = parse_build_file(build_file_folder, gapic.proto_path) + effective_arguments += [ + '--proto_only', client_inputs.proto_only, + '--gapic_additional_protos', client_inputs.additional_protos, + '--transport', client_inputs.transport, + '--rest_numeric_enums', client_inputs.rest_numeric_enum, + '--gapic_yaml', client_inputs.gapic_yaml, + '--service_config', client_inputs.service_config, + '--service_yaml', client_inputs.service_yaml, + '--include_samples', client_inputs.include_samples, + ] + service_version = gapic.proto_path.split('/')[-1] + temp_destination_path = f'java-{library.api_shortname}-{service_version}' + effective_arguments += [ '--destination_path', temp_destination_path ] + print('arguments: ') + print(effective_arguments) + print(f'Generating library from {gapic.proto_path} to {destination_path}...') + util.run_process_and_print_output(['bash', '-x', f'{script_dir}/generate_library.sh', + *effective_arguments], 'Library generation') + + + if enable_postprocessing: + util.sh_util(f'build_owlbot_cli_source_folder "{library_path}"' + + f' "{owlbot_cli_source_folder}" "{output_folder}/{temp_destination_path}"' + + f' "{gapic.proto_path}"', + cwd=output_folder) + + if enable_postprocessing: + # call postprocess library + util.run_process_and_print_output([f'{script_dir}/postprocess_library.sh', + f'{library_path}', '', versions_file, owlbot_cli_source_folder, + config.owlbot_cli_image, config.synthtool_commitish, str(is_monorepo).lower()], 'Library postprocessing') + diff --git a/library_generation/generate_library.sh b/library_generation/generate_library.sh index ed8a9008c5..9691f063e6 100755 --- a/library_generation/generate_library.sh +++ b/library_generation/generate_library.sh @@ -60,18 +60,10 @@ case $key in include_samples="$2" shift ;; - --enable_postprocessing) - enable_postprocessing="$2" - shift - ;; --os_architecture) os_architecture="$2" shift ;; - --versions_file) - versions_file="$2" - shift - ;; *) echo "Invalid option: [$1]" exit 1 @@ -85,6 +77,11 @@ script_dir=$(dirname "$(readlink -f "$0")") source "${script_dir}"/utilities.sh output_folder="$(get_output_folder)" +if [ -z "${gapic_generator_version}" ]; then + echo 'missing required argument --gapic_generator_version' + exit 1 +fi + if [ -z "${protobuf_version}" ]; then protobuf_version=$(get_protobuf_version "${gapic_generator_version}") fi @@ -125,10 +122,6 @@ if [ -z "${include_samples}" ]; then include_samples="true" fi -if [ -z "$enable_postprocessing" ]; then - enable_postprocessing="true" -fi - if [ -z "${os_architecture}" ]; then os_architecture=$(detect_os_architecture) fi @@ -305,34 +298,7 @@ popd # output_folder pushd "${temp_destination_path}" rm -rf java_gapic_srcjar java_gapic_srcjar_raw.srcjar.zip java_grpc.jar java_proto.jar temp-codegen.srcjar popd # destination path -##################### Section 5 ##################### -# post-processing -##################################################### -if [ "${enable_postprocessing}" != "true" ]; -then - echo "post processing is disabled" - cp -r ${temp_destination_path}/* "${output_folder}/${destination_path}" - rm -rdf "${temp_destination_path}" - exit 0 -fi -if [ -z "${versions_file}" ];then - echo "no versions.txt argument provided. Please provide one in order to enable post-processing" - exit 1 -fi -workspace="${output_folder}/workspace" -if [ -d "${workspace}" ]; then - rm -rdf "${workspace}" -fi - -mkdir -p "${workspace}" - -# if destination_path is not empty, it will be used as a starting workspace for -# postprocessing -if [[ $(find "${output_folder}/${destination_path}" -mindepth 1 -maxdepth 1 -type d,f | wc -l) -gt 0 ]];then - workspace="${output_folder}/${destination_path}" -fi - -bash -x "${script_dir}/postprocess_library.sh" "${workspace}" \ - "${temp_destination_path}" \ - "${versions_file}" +cp -r ${temp_destination_path}/* "${output_folder}/${destination_path}" +rm -rdf "${temp_destination_path}" +exit 0 diff --git a/library_generation/main.py b/library_generation/main.py new file mode 100644 index 0000000000..282e0283fd --- /dev/null +++ b/library_generation/main.py @@ -0,0 +1,77 @@ +""" +Parses a config yaml and generates libraries via generate_composed_library.py +""" + +import click +from generate_composed_library import generate_composed_library +from typing import Dict +from model.GenerationConfig import GenerationConfig +from collections.abc import Sequence +from absl import app + +@click.group(invoke_without_command=False) +@click.pass_context +@click.version_option(message="%(version)s") +def main(ctx): + pass + +@main.command() +@click.option( + "--generation-config-yaml", + required=True, + type=str, + help=""" + Path to generation_config.yaml that contains the metadata about library generation + """ +) +@click.option( + "--enable-postprocessing", + required=False, + default=True, + type=bool, + help=""" + Path to repository where generated files will be merged into, via owlbot copy-code. + Specifying this option enables postprocessing + """ +) +@click.option( + "--target-library-api-shortname", + required=False, + type=str, + help=""" + If specified, only the `library` with api_shortname = target-library-api-shortname will + be generated. If not specified, all libraries in the configuration yaml will be generated + """ +) +@click.option( + "--repository-path", + required=False, + type=str, + help=""" + If specified, the generated files will be sent to this location. If not specified, the + repository will be pulled into output_folder and move the generated files there + """ +) +def generate_from_yaml( + generation_config_yaml: str, + enable_postprocessing: bool, + target_library_api_shortname: str, + repository_path: str +) -> None: + config = GenerationConfig.from_yaml(generation_config_yaml) + target_libraries = config.libraries + if target_library_api_shortname is not None: + target_libraries = [library for library in config.libraries + if library.api_shortname == target_library_api_shortname] + for library in target_libraries: + print(f'generating library {library.api_shortname}') + generate_composed_library( + config, library, repository_path, enable_postprocessing + ) + + + + + +if __name__ == "__main__": + main() diff --git a/library_generation/new_client/client_inputs.py b/library_generation/model/ClientInputs.py similarity index 91% rename from library_generation/new_client/client_inputs.py rename to library_generation/model/ClientInputs.py index 3106fe5210..38acdb316f 100644 --- a/library_generation/new_client/client_inputs.py +++ b/library_generation/model/ClientInputs.py @@ -71,6 +71,7 @@ def __init__( def parse( build_path: Path, versioned_path: str, + build_file_name: str = 'BUILD.bazel' ) -> ClientInput: """ Utility function to parse inputs of generate_library.sh from BUILD.bazel. @@ -79,18 +80,22 @@ def parse( google/cloud/asset/v1. :return: an ClientInput object. """ - with open(f"{build_path}/BUILD.bazel") as build: + with open(f"{build_path}/{build_file_name}") as build: content = build.read() proto_library_target = re.compile( proto_library_pattern, re.DOTALL | re.VERBOSE - ).findall(content)[0] - additional_protos = __parse_additional_protos(proto_library_target) + ).findall(content) + additional_protos = '' + if len(proto_library_target) > 0: + additional_protos = __parse_additional_protos(proto_library_target[0]) gapic_target = re.compile(gapic_pattern, re.DOTALL | re.VERBOSE)\ .findall(content) assembly_target = re.compile(assembly_pattern, re.DOTALL | re.VERBOSE)\ .findall(content) - include_samples = __parse_include_samples(assembly_target[0]) + include_samples = 'false' + if len(assembly_target) > 0: + include_samples = __parse_include_samples(assembly_target[0]) if len(gapic_target) == 0: return ClientInput( include_samples=include_samples @@ -142,7 +147,7 @@ def __parse_gapic_yaml(gapic_target: str, versioned_path: str) -> str: def __parse_service_config(gapic_target: str, versioned_path: str) -> str: service_config = re.findall(service_config_pattern, gapic_target) - return f"{versioned_path}/{service_config[0]}" if len(service_config) != 0 \ + return f"{versioned_path}/{service_config[0]}".replace(':','') if len(service_config) != 0 \ else "" diff --git a/library_generation/model/GapicConfig.py b/library_generation/model/GapicConfig.py new file mode 100644 index 0000000000..be99b0a35f --- /dev/null +++ b/library_generation/model/GapicConfig.py @@ -0,0 +1,9 @@ +""" +Class that represents a GAPICs single entry, inside a `LibraryConfig` in a generation_config.yaml +""" +class GapicConfig: + def __init__( + self, + proto_path: str, + ): + self.proto_path = proto_path diff --git a/library_generation/model/GenerationConfig.py b/library_generation/model/GenerationConfig.py new file mode 100644 index 0000000000..77273b10eb --- /dev/null +++ b/library_generation/model/GenerationConfig.py @@ -0,0 +1,91 @@ +""" +Class that represents the root of a generation_config.yaml +""" +import yaml +from typing import List, Optional, Dict +from .LibraryConfig import LibraryConfig +from .GapicConfig import GapicConfig + + +class GenerationConfig: + def __init__( + self, + gapic_generator_version: str, + grpc_version: Optional[str], + protobuf_version: Optional[str], + googleapis_commitish: str, + owlbot_cli_image: str, + synthtool_commitish: str, + destination_path: Optional[str], + libraries: List[LibraryConfig], + ): + self.gapic_generator_version = gapic_generator_version + self.grpc_version = grpc_version + self.protobuf_version = protobuf_version + self.googleapis_commitish = googleapis_commitish + self.owlbot_cli_image = owlbot_cli_image + self.synthtool_commitish = synthtool_commitish + self.destination_path = destination_path + self.libraries = libraries + + """ + Parses a yaml located in path_to_yaml. Returns the parsed configuration represented + by the "model" classes + """ + @staticmethod + def from_yaml(path_to_yaml: str): + config = None + with open(path_to_yaml, 'r') as file_stream: + config = yaml.load(file_stream, yaml.Loader) + + libraries = _required(config, 'libraries') + + parsed_libraries = list() + for library in libraries: + gapics = _required(library, 'GAPICs') + + parsed_gapics = list() + for gapic in gapics: + proto_path = _required(gapic, 'proto_path') + new_gapic = GapicConfig(proto_path) + parsed_gapics.append(new_gapic) + + new_library = LibraryConfig( + _required(library, 'api_shortname'), + _optional(library, 'name_pretty', None), + _required(library, 'library_type'), + _optional(library, 'artifact_id', None), + _optional(library, 'api_description', None), + _optional(library, 'product_documentation', None), + _optional(library, 'client_documentation', None), + _optional(library, 'rest_documentation', None), + _optional(library, 'rpc_documentation', None), + parsed_gapics, + _optional(library, 'googleapis_commitish', None), + _optional(library, 'group_id', 'com.google.cloud'), + _optional(library, 'requires_billing', None), + ) + parsed_libraries.append(new_library) + + parsed_config = GenerationConfig( + _required(config, 'gapic_generator_version'), + _optional(config, 'grpc_version', None), + _optional(config, 'protobuf_version', None), + _required(config, 'googleapis_commitish'), + _required(config, 'owlbot_cli_image'), + _required(config, 'synthtool_commitish'), + _optional(config, 'destination_path', None), + parsed_libraries + ) + + return parsed_config + +def _required(config: Dict, key: str): + if key not in config: + raise ValueError(f'required key {key} not found in yaml') + return config[key] + +def _optional(config: Dict, key: str, default: any): + if key not in config: + return default + return config[key] diff --git a/library_generation/model/Library.py b/library_generation/model/Library.py new file mode 100644 index 0000000000..e1449443ba --- /dev/null +++ b/library_generation/model/Library.py @@ -0,0 +1,46 @@ +""" +Class that represents a library in a generation_config.yaml file +""" +from typing import Dict, List, Optional +from enum import Enum +from .GapicConfig import GapicConfig + +""" +Two possible library types: + - GAPIC_AUTO: pure generated library + - GAPIC_COMBO: generated library with a handwritten layer +""" +class _LibraryType(Enum): + GAPIC_AUTO = 1 + GAPIC_COMBO = 2 + +class LibraryConfig: + def __init__( + self, + api_shortname: str, + name_pretty: Optional[str], + library_type: _LibraryType, + artifact_id: Optional[str], + api_description: Optional[str], + product_documentation: Optional[str], + client_documentation: Optional[str], + rest_documentation: Optional[str], + rpc_documentation: Optional[str], + gapicConfigs: List[GapicConfig], + googleapis_commitish: Optional[str], + group_id: Optional[str] = 'com.google.cloud', + requires_billing: Optional[bool] = True, + ): + self.api_shortname = api_shortname + self.name_pretty = name_pretty + self.library_type = library_type + self.artifact_id = artifact_id + self.requires_billing = requires_billing + self.api_description = api_description + self.product_documentation = product_documentation + self.client_documentation = client_documentation + self.rest_documentation = rest_documentation + self.rpc_documentation = rpc_documentation + self.group_id = group_id + self.gapicConfigs = gapicConfigs + self.googleapis_commitish = googleapis_commitish diff --git a/library_generation/model/LibraryConfig.py b/library_generation/model/LibraryConfig.py new file mode 100644 index 0000000000..a0d09351ed --- /dev/null +++ b/library_generation/model/LibraryConfig.py @@ -0,0 +1,46 @@ +""" +Class that represents a library in a generation_config.yaml file +""" +from typing import Dict, List, Optional +from enum import Enum +from .GapicConfig import GapicConfig + +""" +Two possible library types: + - GAPIC_AUTO: pure generated library + - GAPIC_COMBO: generated library with a handwritten layer +""" +class _LibraryType(Enum): + GAPIC_AUTO = 1 + GAPIC_COMBO = 2 + +class LibraryConfig: + def __init__( + self, + api_shortname: str, + name_pretty: Optional[str], + library_type: _LibraryType, + artifact_id: Optional[str], + api_description: Optional[str], + product_documentation: Optional[str], + client_documentation: Optional[str], + rest_documentation: Optional[str], + rpc_documentation: Optional[str], + gapic_configs: List[GapicConfig], + googleapis_commitish: Optional[str], + group_id: Optional[str] = 'com.google.cloud', + requires_billing: Optional[bool] = True, + ): + self.api_shortname = api_shortname + self.name_pretty = name_pretty + self.library_type = library_type + self.artifact_id = artifact_id + self.requires_billing = requires_billing + self.api_description = api_description + self.product_documentation = product_documentation + self.client_documentation = client_documentation + self.rest_documentation = rest_documentation + self.rpc_documentation = rpc_documentation + self.group_id = group_id + self.gapic_configs = gapic_configs + self.googleapis_commitish = googleapis_commitish diff --git a/library_generation/new_client/new-client.py b/library_generation/new_client/new-client.py index 5b69f335c8..26d0afb7f3 100644 --- a/library_generation/new_client/new-client.py +++ b/library_generation/new_client/new-client.py @@ -21,8 +21,11 @@ import click import templates from git import Repo -from client_inputs import parse import shutil +current_dir = os.path.dirname(os.path.realpath(__file__)) +parent_dir = os.path.dirname(current_dir) +sys.path.append(parent_dir) +from model.ClientInputs import parse @click.group(invoke_without_command=False) diff --git a/library_generation/owlbot/bin/entrypoint.sh b/library_generation/owlbot/bin/entrypoint.sh index 65e3a5fa2a..26ed707591 100755 --- a/library_generation/owlbot/bin/entrypoint.sh +++ b/library_generation/owlbot/bin/entrypoint.sh @@ -65,6 +65,8 @@ function processModule() { echo "...done" } +# This script can be used to process HW libraries and monorepo +# (google-cloud-java) libraries, which require a slightly different treatment # monorepo folders have an .OwlBot.yaml file in the module folder (e.g. # java-asset/.OwlBot.yaml), whereas HW libraries have the yaml in # `.github/.OwlBot.yaml` diff --git a/library_generation/postprocess_library.sh b/library_generation/postprocess_library.sh index bf07127427..f7035ec6c8 100755 --- a/library_generation/postprocess_library.sh +++ b/library_generation/postprocess_library.sh @@ -13,25 +13,43 @@ # 2 - preprocessed_sources_path: used to transfer the raw grpc, proto and gapic # libraries into the postprocessing_target via copy-code # 3 - versions_file: path to file containing versions to be applied to the poms +# 4 - owlbot_cli_source_folder: alternative folder with a structure exactly like +# googleapis-gen. It will be used instead of preprocessed_sources_path if +# 5 - owlbot_cli_image_sha: SHA of the image containing the OwlBot CLI +# 6 - synthtool_commitish: Commit SHA of the synthtool repo +# provided +# 7 - is_monorepo: whether this library is a monorepo, which implies slightly +# different logic set -xeo pipefail scripts_root=$(dirname "$(readlink -f "$0")") postprocessing_target=$1 preprocessed_sources_path=$2 versions_file=$3 +owlbot_cli_source_folder=$4 +owlbot_cli_image_sha=$5 +synthtool_commitish=$6 +is_monorepo=$7 source "${scripts_root}"/utilities.sh +declare -a required_inputs=("postprocessing_target" "versions_file" "owlbot_cli_image_sha" "synthtool_commitish" "is_monorepo") +for required_input in "${required_inputs[@]}"; do + if [[ -z "${!required_input}" ]]; then + echo "missing required ${required_input} argument, please specify one" + exit 1 + fi +done + for owlbot_file in ".repo-metadata.json" "owlbot.py" ".OwlBot.yaml" do if [[ $(find "${postprocessing_target}" -name "${owlbot_file}" | wc -l) -eq 0 ]]; then echo "necessary file for postprocessing '${owlbot_file}' was not found in postprocessing_target" - echo "please provide a postprocessing_target folder that is java owlbot compatible" + echo "please provide a postprocessing_target folder that is compatible with the OwlBot Java postprocessor" exit 1 fi done -proto_path=$(get_proto_path_from_preprocessed_sources "${preprocessed_sources_path}") # ensure pyenv scripts are available eval "$(pyenv init --path)" @@ -48,45 +66,31 @@ if [ $(pyenv virtualenvs | grep "${python_version}" | grep "postprocessing" | wc fi pyenv activate "postprocessing" -# call owl-bot-copy -owlbot_staging_folder="${postprocessing_target}/owl-bot-staging" -mkdir -p "${owlbot_staging_folder}" -echo 'Running owl-bot-copy' -pre_processed_libs_folder=$(mktemp -d) -# By default (thanks to generation templates), .OwlBot.yaml `deep-copy` section -# references a wildcard pattern matching a folder -# ending with `-java` at the leaf of proto_path. We then use a generated-java -# folder that will be picked up by copy-code -mkdir -p "${pre_processed_libs_folder}/${proto_path}/generated-java" -copy_directory_if_exists "${preprocessed_sources_path}" "proto" \ - "${pre_processed_libs_folder}/${proto_path}/generated-java/proto-google-cloud-library" -copy_directory_if_exists "${preprocessed_sources_path}" "grpc" \ - "${pre_processed_libs_folder}/${proto_path}/generated-java/grpc-google-cloud-library" -copy_directory_if_exists "${preprocessed_sources_path}" "gapic" \ - "${pre_processed_libs_folder}/${proto_path}/generated-java/gapic-google-cloud-library" -copy_directory_if_exists "${preprocessed_sources_path}" "samples" \ - "${pre_processed_libs_folder}/${proto_path}/generated-java/samples" -pushd "${pre_processed_libs_folder}" -# create an empty commit so owl-bot-copy can process this as a repo -# (it cannot process non-git-repositories) -git init -git commit --allow-empty -m 'empty commit' -popd # pre_processed_libs_folder +if [[ -z "${owlbot_cli_source_folder}" ]]; then + owlbot_cli_source_folder=$(mktemp -d) + build_owlbot_cli_source_folder "${postprocessing_target}" "${owlbot_cli_source_folder}" "${preprocessed_sources_path}" +fi -owlbot_cli_image_sha=$(cat "${scripts_root}/configuration/owlbot-cli-sha" | grep "sha256") +# we determine the location of the .OwlBot.yaml file by checking if the target +# folder is a monorepo folder or not +if [[ "${postprocessing_target}" == *google-cloud-java* ]]; then + owlbot_yaml_relative_path=".OwlBot.yaml" +else + owlbot_yaml_relative_path=".github/.OwlBot.yaml" +fi docker run --rm \ --user $(id -u):$(id -g) \ -v "${postprocessing_target}:/repo" \ - -v "${pre_processed_libs_folder}:/pre-processed-libraries" \ + -v "${owlbot_cli_source_folder}:/pre-processed-libraries" \ -w /repo \ --env HOME=/tmp \ gcr.io/cloud-devrel-public-resources/owlbot-cli@"${owlbot_cli_image_sha}" \ copy-code \ --source-repo-commit-hash=none \ --source-repo=/pre-processed-libraries \ - --config-file=.OwlBot.yaml + --config-file="${owlbot_yaml_relative_path}" # we clone the synthtool library and manually build it mkdir -p /tmp/synthtool @@ -95,7 +99,6 @@ if [ ! -d "synthtool" ]; then git clone https://github.com/googleapis/synthtool.git fi pushd "synthtool" -synthtool_commitish=$(cat "${scripts_root}/configuration/synthtool-commitish") git reset --hard "${synthtool_commitish}" python3 -m pip install -e . python3 -m pip install -r requirements.in diff --git a/library_generation/requirements.in b/library_generation/requirements.in new file mode 100644 index 0000000000..2bd5a0b0a8 --- /dev/null +++ b/library_generation/requirements.in @@ -0,0 +1,17 @@ +absl-py==2.0.0 +attr==0.3.2 +attrs==23.2.0 +black==23.12.1 +click==8.1.7 +gitdb==4.0.11 +GitPython==3.1.40 +Jinja2==3.1.2 +lxml==5.0.0 +MarkupSafe==2.1.3 +mypy-extensions==1.0.0 +packaging==23.2 +pathspec==0.12.1 +platformdirs==4.1.0 +PyYAML==6.0.1 +smmap==5.0.1 +typing==3.7.4.3 diff --git a/library_generation/test/__init__.py b/library_generation/test/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/library_generation/test/compare_poms.py b/library_generation/test/compare_poms.py index c2abd8da13..94c94ae128 100644 --- a/library_generation/test/compare_poms.py +++ b/library_generation/test/compare_poms.py @@ -4,16 +4,15 @@ The only comparison points are: element path (e.g. project/dependencies) and element text There is a special case for `dependency`, where the maven coordinates are prepared as well """ - -import sys import xml.etree.ElementTree as ET from collections import Counter +import sys +import os +current = os.path.dirname(os.path.realpath(__file__)) +parent = os.path.dirname(current) +sys.path.append(parent) +from utilities import eprint -""" -prints to stderr -""" -def eprint(*args, **kwargs): - print(*args, file=sys.stderr, **kwargs) """ Convenience method to access a node's child elements via path and get its text diff --git a/library_generation/test/generate_library_integration_test.sh b/library_generation/test/generate_library_integration_test.sh index f6084cf241..9b46304da3 100755 --- a/library_generation/test/generate_library_integration_test.sh +++ b/library_generation/test/generate_library_integration_test.sh @@ -5,14 +5,11 @@ set -xeo pipefail # This script is used to test the result of `generate_library.sh` against generated # source code in the specified repository. # Specifically, this script will do -# 1. checkout the master branch of googleapis/google and WORKSPACE -# 2. parse version of gapic-generator-java, protobuf and grpc from WORKSPACE -# 3. generate a library with proto_path and destination_path in a proto_path -# list by invoking `generate_library.sh`. GAPIC options to generate a library -# will be parsed from proto_path/BUILD.bazel. -# 4. depending on whether postprocessing is enabled, -# 4.1 checkout the master branch of googleapis-gen repository and compare the result, or -# 4.2 checkout the master branch of google-cloud-java or HW library repository and compare the result +# 1. take a configuration yaml describing the structure of the libraries to +# generate +# 2. For each api_shortname, call generate_composed_library.py to generate the groups of libraries +# 3. After the generation is done, compare the resulting library with the +# corresponding cloned repository # defaults googleapis_gen_url="git@github.com:googleapis/googleapis-gen.git" @@ -25,6 +22,7 @@ source "${script_dir}/test_utilities.sh" source "${script_dir}/../utilities.sh" output_folder="$(pwd)/output" + while [[ $# -gt 0 ]]; do key="$1" case $key in @@ -40,10 +38,6 @@ case $key in googleapis_gen_url="$2" shift ;; - -v|--versions_file) - versions_file="$2" - shift - ;; *) echo "Invalid option: [$1]" exit 1 @@ -53,166 +47,105 @@ shift # past argument or value done mkdir -p "${output_folder}" -pushd "${output_folder}" -# checkout the master branch of googleapis/google (proto files) and WORKSPACE -echo "Checking out googlapis repository..." -# sparse_clone will remove folder contents first, so we have to checkout googleapis -# only once. -sparse_clone https://github.com/googleapis/googleapis.git "google grafeas WORKSPACE" -pushd googleapis -cp -r google "${output_folder}" -cp -r grafeas "${output_folder}" -# parse version of gapic-generator-java, protobuf and grpc from WORKSPACE -gapic_generator_version=$(get_version_from_WORKSPACE "_gapic_generator_java_version" WORKSPACE "=") -echo "The version of gapic-generator-java is ${gapic_generator_version}." -protobuf_version=$(get_version_from_WORKSPACE "protobuf-" WORKSPACE "-") -echo "The version of protobuf is ${protobuf_version}" -popd # googleapis -popd # output_folder + if [ -f "${output_folder}/generation_times" ];then rm "${output_folder}/generation_times" fi -if [ -z "${versions_file}" ]; then - # google-cloud-java will be downloaded before each call of - # `generate_library.sh` - versions_file="${output_folder}/google-cloud-java/versions.txt" -fi -grep -v '^ *#' < "${proto_path_list}" | while IFS= read -r line; do - proto_path=$(echo "$line" | cut -d " " -f 1) - repository_path=$(echo "$line" | cut -d " " -f 2) - skip_postprocessing=$(echo "$line" | cut -d " " -f 3) - # parse destination_path - pushd "${output_folder}" - echo "Checking out googleapis-gen repository..." - sparse_clone "${googleapis_gen_url}" "${proto_path}" - destination_path=$(compute_destination_path "${proto_path}" "${output_folder}") - # parse GAPIC options from proto_path/BUILD.bazel - proto_build_file_path="${proto_path}/BUILD.bazel" - proto_only=$(get_proto_only_from_BUILD "${proto_build_file_path}") - gapic_additional_protos=$(get_gapic_additional_protos_from_BUILD "${proto_build_file_path}") - transport=$(get_transport_from_BUILD "${proto_build_file_path}") - rest_numeric_enums=$(get_rest_numeric_enums_from_BUILD "${proto_build_file_path}") - gapic_yaml=$(get_gapic_yaml_from_BUILD "${proto_build_file_path}") - service_config=$(get_service_config_from_BUILD "${proto_build_file_path}") - service_yaml=$(get_service_yaml_from_BUILD "${proto_build_file_path}") - include_samples=$(get_include_samples_from_BUILD "${proto_build_file_path}") - popd # output_folder - echo "GAPIC options are - transport=${transport}, - rest_numeric_enums=${rest_numeric_enums}, - gapic_yaml=${gapic_yaml}, - service_config=${service_config}, - service_yaml=${service_yaml}, - include_samples=${include_samples}." - pushd "${output_folder}" - if [ "${skip_postprocessing}" == "true" ]; then - echo 'this library is not intended for postprocessing test' - popd # output folder - continue - else - echo 'this is a monorepo library' - sparse_clone "https://github.com/googleapis/google-cloud-java.git" "${repository_path} google-cloud-pom-parent google-cloud-jar-parent versions.txt .github" +declare -a configuration_yamls=( + "${script_dir}/resources/integration/java-bigtable/generation_config.yaml" + "${script_dir}/resources/integration/google-cloud-java/generation_config.yaml" +) - # compute path from output_folder to source of truth library location - # (e.g. google-cloud-java/java-compute) - repository_path="google-cloud-java/${repository_path}" - target_folder="${output_folder}/${repository_path}" - popd # output_folder - fi - # generate GAPIC client library - echo "Generating library from ${proto_path}, to ${destination_path}..." - generation_start=$(date "+%s") - if [ "${enable_postprocessing}" == "true" ]; then - if [[ "${repository_path}" == "null" ]]; then - # we need a repository to compare the generated results with. Skip this - # library - continue - fi - "${library_generation_dir}"/generate_library.sh \ - -p "${proto_path}" \ - -d "${repository_path}" \ - --gapic_generator_version "${gapic_generator_version}" \ - --protobuf_version "${protobuf_version}" \ - --proto_only "${proto_only}" \ - --gapic_additional_protos "${gapic_additional_protos}" \ - --transport "${transport}" \ - --rest_numeric_enums "${rest_numeric_enums}" \ - --gapic_yaml "${gapic_yaml}" \ - --service_config "${service_config}" \ - --service_yaml "${service_yaml}" \ - --include_samples "${include_samples}" \ - --enable_postprocessing "true" \ - --versions_file "${output_folder}/google-cloud-java/versions.txt" + +for configuration_yaml in "${configuration_yamls[@]}"; do + library_api_shortnames=$(py_util "get_configuration_yaml_library_api_shortnames" "${configuration_yaml}") + destination_path=$(py_util "get_configuration_yaml_destination_path" "${configuration_yaml}") + pushd "${output_folder}" + if [[ "${destination_path}" == *google-cloud-java* ]]; then + git clone "https://github.com/googleapis/google-cloud-java" + repository_path="${output_folder}/google-cloud-java" else - "${library_generation_dir}"/generate_library.sh \ - -p "${proto_path}" \ - -d "${destination_path}" \ - --gapic_generator_version "${gapic_generator_version}" \ - --protobuf_version "${protobuf_version}" \ - --proto_only "${proto_only}" \ - --gapic_additional_protos "${gapic_additional_protos}" \ - --transport "${transport}" \ - --rest_numeric_enums "${rest_numeric_enums}" \ - --gapic_yaml "${gapic_yaml}" \ - --service_config "${service_config}" \ - --service_yaml "${service_yaml}" \ - --include_samples "${include_samples}" \ - --enable_postprocessing "false" + git clone "https://github.com/googleapis/${destination_path}" + repository_path="${output_folder}/${destination_path}" fi - generation_end=$(date "+%s") - # some generations are less than 1 second (0 produces exit code 1 in `expr`) - generation_duration_seconds=$(expr "${generation_end}" - "${generation_start}" || true) - echo "Generation time for ${repository_path} was ${generation_duration_seconds} seconds." - pushd "${output_folder}" - echo "${proto_path} ${generation_duration_seconds}" >> generation_times - - echo "Generate library finished." - echo "Compare generation result..." - if [ $enable_postprocessing == "true" ]; then - echo "Checking out repository..." - pushd "${target_folder}" - source_diff_result=0 - git diff \ - --ignore-space-at-eol \ - -r \ - --exit-code \ - -- \ - ':!*pom.xml' \ - ':!*README.md' \ - ':!*package-info.java' \ - || source_diff_result=$? - - pom_diff_result=$(compare_poms "${target_folder}") - popd # target_folder - if [[ ${source_diff_result} == 0 ]] && [[ ${pom_diff_result} == 0 ]] ; then - echo "SUCCESS: Comparison finished, no difference is found." - # Delete google-cloud-java to allow a sparse clone of the next library - rm -rdf google-cloud-java - elif [ ${source_diff_result} != 0 ]; then - echo "FAILURE: Differences found in proto path: ${proto_path}." - exit "${source_diff_result}" - elif [ ${pom_diff_result} != 0 ]; then - echo "FAILURE: Differences found in generated poms" - exit "${pom_diff_result}" - fi - elif [ "${enable_postprocessing}" == "false" ]; then - # include gapic_metadata.json and package-info.java after - # resolving https://github.com/googleapis/sdk-platform-java/issues/1986 - source_diff_result=0 - diff --strip-trailing-cr -r "googleapis-gen/${proto_path}/${destination_path}" "${output_folder}/${destination_path}" \ - -x "*gradle*" \ - -x "gapic_metadata.json" \ - -x "package-info.java" || source_diff_result=$? - if [ ${source_diff_result} == 0 ] ; then - echo "SUCCESS: Comparison finished, no difference is found." - else - echo "FAILURE: Differences found in proto path: ${proto_path}." - exit "${source_diff_result}" + popd + + for api_shortname in ${library_api_shortnames}; do + pushd "${output_folder}" + + echo "Generating library ${api_shortname}..." + generation_start=$(date "+%s") + python3 "${library_generation_dir}"/main.py generate-from-yaml \ + --generation-config-yaml "${configuration_yaml}" \ + --enable-postprocessing "${enable_postprocessing}" \ + --target-library-api-shortname "${api_shortname}" \ + --repository-path "${repository_path}" + generation_end=$(date "+%s") + + # some generations are less than 1 second (0 produces exit code 1 in `expr`) + generation_duration_seconds=$(expr "${generation_end}" - "${generation_start}" || true) + echo "Generation time for ${api_shortname} was ${generation_duration_seconds} seconds." + pushd "${output_folder}" + echo "${proto_path} ${generation_duration_seconds}" >> generation_times + + echo "Generate library finished." + echo "Compare generation result..." + if [ ${enable_postprocessing} == "true" ]; then + echo "Checking out repository..." + if [[ "${destination_path}" == *google-cloud-java* ]]; then + target_folder="${output_folder}/google-cloud-java/java-${api_shortname}" + else + target_folder="${output_folder}/java-${api_shortname}" + fi + + pushd "${target_folder}" + source_diff_result=0 + git diff \ + --ignore-space-at-eol \ + -r \ + --exit-code \ + -- \ + . \ + ':!*pom.xml' \ + ':!*README.md' \ + ':!*gapic_metadata.json' \ + ':!*reflect-config.json' \ + ':!*package-info.java' \ + || source_diff_result=$? + + pom_diff_result=$(compare_poms "${target_folder}") + popd # target_folder + if [[ ${source_diff_result} == 0 ]] && [[ ${pom_diff_result} == 0 ]] ; then + echo "SUCCESS: Comparison finished, no difference is found." + elif [ ${source_diff_result} != 0 ]; then + echo "FAILURE: Differences found in proto path: java-${api_shortname}." + exit "${source_diff_result}" + elif [ ${pom_diff_result} != 0 ]; then + echo "FAILURE: Differences found in generated java-${api_shortname}'s poms" + exit "${pom_diff_result}" + fi + elif [ "${enable_postprocessing}" == "false" ]; then + for proto_path in "${proto_paths[@]}"; do + destination_path=$(compute_destination_path "${proto_path}" "${output_folder}") + # include gapic_metadata.json and package-info.java after + # resolving https://github.com/googleapis/sdk-platform-java/issues/1986 + source_diff_result=0 + diff --strip-trailing-cr -r "googleapis-gen/${proto_path}/${destination_path}" "${output_folder}/${destination_path}" \ + -x "*gradle*" \ + -x "gapic_metadata.json" \ + -x "package-info.java" || source_diff_result=$? + if [ ${source_diff_result} == 0 ] ; then + echo "SUCCESS: Comparison finished, no difference is found." + else + echo "FAILURE: Differences found in proto path: ${proto_path}." + exit "${source_diff_result}" + fi + done fi - fi - popd # output_folder + popd # output_folder + done done echo "ALL TESTS SUCCEEDED" echo "generation times in seconds (does not consider repo checkout):" diff --git a/library_generation/test/generate_library_unit_tests.sh b/library_generation/test/generate_library_unit_tests.sh index 6fde314788..e9f4954298 100755 --- a/library_generation/test/generate_library_unit_tests.sh +++ b/library_generation/test/generate_library_unit_tests.sh @@ -208,103 +208,6 @@ generate_library_failed_with_invalid_grpc_version() { cleanup "${destination}" } -get_gapic_additional_protos_from_BUILD_common_resources_test() { - local proto_path="${script_dir}/resources/search_additional_protos/BUILD_common_resources.bazel" - local addition_protos - addition_protos=$(get_gapic_additional_protos_from_BUILD "${proto_path}") - assertEquals "google/cloud/common_resources.proto" "${addition_protos}" -} - -get_gapic_additional_protos_from_BUILD_iam_policy_test() { - local proto_path="${script_dir}/resources/search_additional_protos/BUILD_iam_policy.bazel" - local addition_protos - addition_protos=$(get_gapic_additional_protos_from_BUILD "${proto_path}") - assertEquals "google/cloud/common_resources.proto google/iam/v1/iam_policy.proto" "${addition_protos}" -} - -get_gapic_additional_protos_from_BUILD_locations_test() { - local proto_path="${script_dir}/resources/search_additional_protos/BUILD_locations.bazel" - local addition_protos - addition_protos=$(get_gapic_additional_protos_from_BUILD "${proto_path}") - assertEquals "google/cloud/common_resources.proto google/cloud/location/locations.proto" "${addition_protos}" -} - -get_gapic_additional_protos_from_BUILD_iam_locations_test() { - local proto_path="${script_dir}/resources/search_additional_protos/BUILD_iam_locations.bazel" - local addition_protos - addition_protos=$(get_gapic_additional_protos_from_BUILD "${proto_path}") - assertEquals "google/cloud/common_resources.proto google/iam/v1/iam_policy.proto google/cloud/location/locations.proto" "${addition_protos}" -} - -get_transport_from_BUILD_grpc_rest_test() { - local build_file="${script_dir}/resources/misc/BUILD_grpc_rest.bazel" - local transport - transport=$(get_transport_from_BUILD "${build_file}") - assertEquals "grpc+rest" "${transport}" -} - -get_transport_from_BUILD_grpc_test() { - local build_file="${script_dir}/resources/misc/BUILD_grpc.bazel" - local transport - transport=$(get_transport_from_BUILD "${build_file}") - assertEquals "grpc" "${transport}" -} - -get_transport_from_BUILD_rest_test() { - local build_file="${script_dir}/resources/misc/BUILD_rest.bazel" - local transport - transport=$(get_transport_from_BUILD "${build_file}") - assertEquals "rest" "${transport}" -} - -get_rest_numeric_enums_from_BUILD_true_test() { - local build_file="${script_dir}/resources/misc/BUILD_rest_numeric_enums_true.bazel" - local rest_numeric_enums - rest_numeric_enums=$(get_rest_numeric_enums_from_BUILD "${build_file}") - assertEquals "true" "${rest_numeric_enums}" -} - -get_rest_numeric_enums_from_BUILD_false_test() { - local build_file="${script_dir}/resources/misc/BUILD_rest_numeric_enums_false.bazel" - local rest_numeric_enums - rest_numeric_enums=$(get_rest_numeric_enums_from_BUILD "${build_file}") - assertEquals "false" "${rest_numeric_enums}" -} - -get_rest_numeric_enums_from_BUILD_empty_test() { - local build_file="${script_dir}/resources/misc/BUILD_rest_numeric_enums_empty.bazel" - local rest_numeric_enums - rest_numeric_enums=$(get_rest_numeric_enums_from_BUILD "${build_file}") - assertEquals "false" "${rest_numeric_enums}" -} - -get_include_samples_from_BUILD_true_test() { - local build_file="${script_dir}/resources/misc/BUILD_include_samples_true.bazel" - local include_samples - include_samples=$(get_include_samples_from_BUILD "${build_file}") - assertEquals "true" "${include_samples}" -} - -get_include_samples_from_BUILD_false_test() { - local build_file="${script_dir}/resources/misc/BUILD_include_samples_false.bazel" - local include_samples - include_samples=$(get_include_samples_from_BUILD "${build_file}") - assertEquals "false" "${include_samples}" -} - -get_include_samples_from_BUILD_empty_test() { - local build_file="${script_dir}/resources/misc/BUILD_include_samples_empty.bazel" - local include_samples - include_samples=$(get_include_samples_from_BUILD "${build_file}") - assertEquals "false" "${include_samples}" -} - -get_version_from_valid_WORKSPACE_test() { - workspace_file="${script_dir}/resources/misc/TESTWORKSPACE" - obtained_ggj_version=$(get_version_from_WORKSPACE "_gapic_generator_java_version" "${workspace_file}") - assertEquals '2.25.1-SNAPSHOT' "${obtained_ggj_version}" -} - copy_directory_if_exists_valid_folder_succeeds() { local source_folder="${script_dir}/resources" local destination="${script_dir}/test_destination_folder" @@ -372,20 +275,6 @@ test_list=( generate_library_failed_with_invalid_generator_version generate_library_failed_with_invalid_protobuf_version generate_library_failed_with_invalid_grpc_version - get_gapic_additional_protos_from_BUILD_common_resources_test - get_gapic_additional_protos_from_BUILD_iam_policy_test - get_gapic_additional_protos_from_BUILD_locations_test - get_gapic_additional_protos_from_BUILD_iam_locations_test - get_transport_from_BUILD_grpc_rest_test - get_transport_from_BUILD_grpc_test - get_transport_from_BUILD_rest_test - get_rest_numeric_enums_from_BUILD_true_test - get_rest_numeric_enums_from_BUILD_false_test - get_rest_numeric_enums_from_BUILD_empty_test - get_include_samples_from_BUILD_true_test - get_include_samples_from_BUILD_false_test - get_include_samples_from_BUILD_empty_test - get_version_from_valid_WORKSPACE_test copy_directory_if_exists_valid_folder_succeeds copy_directory_if_exists_invalid_folder_does_not_copy get_proto_path_from_preprocessed_sources_valid_library_succeeds diff --git a/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml b/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml index f8c2808739..7b73f329d0 100644 --- a/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml +++ b/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml @@ -1,25 +1,25 @@ #Required. -gapic_generator_version: 2.30.0 +gapic_generator_version: 2.32.0 #Optional. -grpc_version: 1.59.1 -#Optional. -protobuf_version: 3.25.1 -#Required. -googleapis-commitish: 4512234113a18c1fda1fb0d0ceac8f4b4efe9801 -#Required. -owlbot-cli-image: sha256:623647ee79ac605858d09e60c1382a716c125fb776f69301b72de1cd35d49409 +# grpc_version: 1.60.0 +#Optional. The protobuf version in googleapis (not sdk-platform-java) is the actual source of truth for generated protos in google-cloud-java +protobuf_version: 23.2 #Required. -synthtool-commitish: 59fe44fde9866a26e7ee4e4450fd79f67f8cf599 +googleapis_commitish: 4512234113a18c1fda1fb0d0ceac8f4b4efe9801 #Required. -python-version: 3.11.2 -#Optional. The root folder name of generated client libraries. If empty, modules will be created under current folder, useful for single module -destination-path: google-cloud-java +owlbot_cli_image: sha256:623647ee79ac605858d09e60c1382a716c125fb776f69301b72de1cd35d49409 #Required. +synthtool_commitish: fac8444edd5f5526e804c306b766a271772a3e2f +#Required. The root folder name of generated client libraries. +destination_path: google-cloud-java +#Required. If the number of libraries is greater than 1, the scripts will treat the target repository as a monorepo, with a slightly different workflow mainly in the postprocessing stage libraries: #Required. Can be used for populating the folder name java-{api_shortName}. This is also the destination-name in new-client.py. - api_shortname: asset + #Optional. Overrides the root-level commit hash + googleapis_commitish: 4512234113a18c1fda1fb0d0ceac8f4b4efe9801 #Optional. The default value is the title of service yaml - name-pretty: Cloud Asset + name_pretty: Cloud Asset #Required. library_type: GAPIC_AUTO #Optional. The default value is com.google.cloud @@ -27,7 +27,7 @@ libraries: #Optional. The default value is google.cloud.{api_shortname} artifact_id: google.cloud.asset #Optional. The default value is true. - requires-billing: true + requires_billing: true #Optional. The default value is documentation.summary from service yaml api_description: #Optional. @@ -48,5 +48,43 @@ libraries: - proto_path: google/cloud/asset/v1p7beta1 - api_shortname: speech library_type: GAPIC_AUTO - services: - - proto_path: google/cloud/asset/v1 + GAPICs: + - proto_path: google/cloud/speech/v1 + - proto_path: google/cloud/speech/v1p1beta1 + - proto_path: google/cloud/speech/v2 + - api_shortname: apigee-connect + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/cloud/apigeeconnect/v1 + - api_shortname: dialogflow + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/cloud/dialogflow/v2beta1 + - proto_path: google/cloud/dialogflow/v2 + - api_shortname: compute + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/cloud/compute/v1 + - api_shortname: kms + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/cloud/kms/v1 + - api_shortname: redis + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/cloud/redis/v1 + - proto_path: google/cloud/redis/v1beta1 + - api_shortname: containeranalysis + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/devtools/containeranalysis/v1 + - api_shortname: iam + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/iam/v1 + - proto_path: google/iam/v2 + - api_shortname: iamcredentials + library_type: GAPIC_AUTO + GAPICs: + - proto_path: google/iam/credentials/v1 + diff --git a/library_generation/test/resources/integration/java-bigtable/generation_config.yaml b/library_generation/test/resources/integration/java-bigtable/generation_config.yaml new file mode 100644 index 0000000000..4a82a3e2c4 --- /dev/null +++ b/library_generation/test/resources/integration/java-bigtable/generation_config.yaml @@ -0,0 +1,14 @@ +gapic_generator_version: 2.32.0 +grpc_version: 1.61.0 +protobuf_version: 23.2 +googleapis_commitish: 4512234113a18c1fda1fb0d0ceac8f4b4efe9801 +owlbot_cli_image: sha256:623647ee79ac605858d09e60c1382a716c125fb776f69301b72de1cd35d49409 +synthtool_commitish: 6612ab8f3afcd5e292aecd647f0fa68812c9f5b5 +destination_path: java-bigtable +libraries: + - api_shortname: bigtable + name_pretty: Cloud Bigtable + library_type: GAPIC_COMBO + GAPICs: + - proto_path: google/bigtable/admin/v2 + - proto_path: google/bigtable/v2 diff --git a/library_generation/test/resources/misc/BUILD_gapic_yaml.bazel b/library_generation/test/resources/misc/BUILD_gapic_yaml.bazel new file mode 100644 index 0000000000..b55f4550d8 --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_gapic_yaml.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + gapic_yaml = "test_gapic_yaml.yaml", +) diff --git a/library_generation/test/resources/misc/BUILD_no_gapic_yaml.bazel b/library_generation/test/resources/misc/BUILD_no_gapic_yaml.bazel new file mode 100644 index 0000000000..1e9462aa30 --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_no_gapic_yaml.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + gapic_yaml = None +) diff --git a/library_generation/test/resources/misc/BUILD_no_service_config.bazel b/library_generation/test/resources/misc/BUILD_no_service_config.bazel new file mode 100644 index 0000000000..dbde6de05c --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_no_service_config.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + grpc_service_config = None +) diff --git a/library_generation/test/resources/misc/BUILD_no_service_yaml.bazel b/library_generation/test/resources/misc/BUILD_no_service_yaml.bazel new file mode 100644 index 0000000000..05bae16d5d --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_no_service_yaml.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + service_yaml = None +) diff --git a/library_generation/test/resources/misc/BUILD_service_config.bazel b/library_generation/test/resources/misc/BUILD_service_config.bazel new file mode 100644 index 0000000000..097d1bb6bd --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_service_config.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + grpc_service_config = "test_service_config.json" +) diff --git a/library_generation/test/resources/misc/BUILD_service_yaml.bazel b/library_generation/test/resources/misc/BUILD_service_yaml.bazel new file mode 100644 index 0000000000..f7e4c91f4e --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_service_yaml.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + service_yaml = "test_service_yaml.yaml" +) diff --git a/library_generation/test/resources/proto_path_list.txt b/library_generation/test/resources/proto_path_list.txt deleted file mode 100755 index 5f82059e52..0000000000 --- a/library_generation/test/resources/proto_path_list.txt +++ /dev/null @@ -1,25 +0,0 @@ -# This file is used in integration test against `generate_library.sh`. -# Format: -# proto_path repository_path skip_postprocessing_test -# google/bigtable/admin/v2 java-bigtable true -# google/bigtable/v2 java-bigtable true -google/cloud/apigeeconnect/v1 java-apigee-connect false -google/cloud/asset/v1p5beta1 java-asset false -# google/cloud/asset/v1p2beta1 java-asset false -google/cloud/asset/v1p1beta1 java-asset false -google/cloud/asset/v1p7beta1 java-asset false -google/cloud/asset/v1 java-asset false -# google/cloud/dialogflow/v2beta1 java-dialogflow false -# google/cloud/dialogflow/v2 java-dialogflow false -# google/cloud/compute/v1 java-compute false -google/cloud/kms/v1 java-kms false -google/cloud/redis/v1 java-redis false -google/cloud/redis/v1beta1 java-redis false -# google/example/library/v1 google-cloud-example-library-v1-java null false -google/devtools/containeranalysis/v1 java-containeranalysis false -google/iam/v1 java-iam true -google/iam/v2 java-iam false -google/iam/credentials/v1 java-iamcredentials false -google/logging/v2 java-logging true -google/pubsub/v1 java-pubsub true -google/storage/v2 java-storage true diff --git a/library_generation/test/test_utilities.sh b/library_generation/test/test_utilities.sh index 3da3bd0392..007dc8e6d9 100755 --- a/library_generation/test/test_utilities.sh +++ b/library_generation/test/test_utilities.sh @@ -26,90 +26,6 @@ __test_failed() { failed_tests="${failed_tests} ${failed_test}" } -# Used to obtain configuration values from a bazel BUILD file -# -# inspects a $build_file for a certain $rule (e.g. java_gapic_library). If the -# first 15 lines after the declaration of the rule contain $pattern, then -# it will return $if_match if $pattern is found, otherwise $default -__get_config_from_BUILD() { - build_file=$1 - rule=$2 - pattern=$3 - default=$4 - if_match=$5 - - result="${default}" - if grep -A 20 "${rule}" "${build_file}" | grep -q "${pattern}"; then - result="${if_match}" - fi - echo "${result}" -} - -__get_gapic_option_from_BUILD() { - local build_file=$1 - local pattern=$2 - local gapic_option - local file_path - gapic_option=$(grep "${pattern}" "${build_file}" |\ - head -1 |\ - sed 's/.*\"\([^]]*\)\".*/\1/g' |\ - sed 's/^[[:space:]]*//;s/[[:space:]]*$//' - ) - if [ -z "${gapic_option}" ] || [[ "${gapic_option}" == *"None"* ]]; then - echo "" - return - fi - - if [[ "${gapic_option}" == ":"* ]] || [[ "${gapic_option}" == "*"* ]]; then - # if gapic_option starts with : or *, remove the first character. - gapic_option="${gapic_option:1}" - elif [[ "${gapic_option}" == "//"* ]]; then - # gapic option is a bazel target, use the file path and name directly. - # remove the leading "//". - gapic_option="${gapic_option:2}" - # replace ":" with "/" - gapic_option="${gapic_option//://}" - echo "${gapic_option}" - return - fi - - file_path="${build_file%/*}" - # Make sure gapic option (*.yaml or *.json) exists in proto_path; otherwise - # reset gapic option to empty string. - if [ -f "${file_path}/${gapic_option}" ]; then - gapic_option="${file_path}/${gapic_option}" - else - echo "WARNING: file ${file_path}/${gapic_option} does not exist, reset gapic option to empty string." >&2 - gapic_option="" - fi - echo "${gapic_option}" -} - -__get_iam_policy_from_BUILD() { - local build_file=$1 - local contains_iam_policy - contains_iam_policy=$(__get_config_from_BUILD \ - "${build_file}" \ - "proto_library_with_info(" \ - "//google/iam/v1:iam_policy_proto" \ - "false" \ - "true" - ) - echo "${contains_iam_policy}" -} - -__get_locations_from_BUILD() { - local build_file=$1 - local contains_locations - contains_locations=$(__get_config_from_BUILD \ - "${build_file}" \ - "proto_library_with_info(" \ - "//google/cloud/location:location_proto" \ - "false" \ - "true" - ) - echo "${contains_locations}" -} ############# Functions used in test execution ############# @@ -167,136 +83,11 @@ execute_tests() { } ############# Utility functions used in `generate_library_integration_tests.sh` ############# -get_proto_only_from_BUILD() { - local build_file=$1 - local proto_only - proto_only=$(__get_config_from_BUILD \ - "${build_file}" \ - "java_gapic_library(" \ - "java_gapic_library" \ - "true" \ - "false" - ) - echo "${proto_only}" -} - -# Apart from proto files in proto_path, additional protos are needed in order -# to generate GAPIC client libraries. -# In most cases, these protos should be within google/ directory, which is -# pulled from googleapis as a prerequisite. -# Get additional protos in BUILD.bazel. -get_gapic_additional_protos_from_BUILD() { - local build_file=$1 - local gapic_additional_protos="google/cloud/common_resources.proto" - if [[ $(__get_iam_policy_from_BUILD "${build_file}") == "true" ]]; then - gapic_additional_protos="${gapic_additional_protos} google/iam/v1/iam_policy.proto" - fi - if [[ $(__get_locations_from_BUILD "${build_file}") == "true" ]]; then - gapic_additional_protos="${gapic_additional_protos} google/cloud/location/locations.proto" - fi - echo "${gapic_additional_protos}" -} - -get_transport_from_BUILD() { - local build_file=$1 - local transport - transport=$(__get_config_from_BUILD \ - "${build_file}" \ - "java_gapic_library(" \ - "grpc+rest" \ - "grpc" \ - "grpc+rest" - ) - # search again because the transport maybe `rest`. - transport=$(__get_config_from_BUILD \ - "${build_file}" \ - "java_gapic_library(" \ - "transport = \"rest\"" \ - "${transport}" \ - "rest" - ) - echo "${transport}" -} - -get_rest_numeric_enums_from_BUILD() { - local build_file=$1 - local rest_numeric_enums - rest_numeric_enums=$(__get_config_from_BUILD \ - "${build_file}" \ - "java_gapic_library(" \ - "rest_numeric_enums = True" \ - "false" \ - "true" - ) - echo "${rest_numeric_enums}" -} - -get_gapic_yaml_from_BUILD() { - local build_file=$1 - local gapic_yaml - gapic_yaml=$(__get_gapic_option_from_BUILD "${build_file}" "gapic_yaml = ") - echo "${gapic_yaml}" -} - -get_service_config_from_BUILD() { - local build_file=$1 - local service_config - service_config=$(__get_gapic_option_from_BUILD "${build_file}" "grpc_service_config = ") - echo "${service_config}" -} - -get_service_yaml_from_BUILD() { - local build_file=$1 - local service_yaml - service_yaml=$(__get_gapic_option_from_BUILD "${build_file}" "service_yaml") - echo "${service_yaml}" -} - -get_include_samples_from_BUILD() { - local build_file=$1 - local include_samples - include_samples=$(__get_config_from_BUILD \ - "${build_file}" \ - "java_gapic_assembly_gradle_pkg(" \ - "include_samples = True" \ - "false" \ - "true" - ) - echo "${include_samples}" -} # Obtains a version from a bazel WORKSPACE file # # versions look like "_ggj_version="1.2.3" # It will return 1.2.3 for such example -get_version_from_WORKSPACE() { - version_key_word=$1 - workspace=$2 - version=$(\ - grep "${version_key_word}" "${workspace}" |\ - head -n 1 |\ - sed 's/\(.*\) = "\(.*\)"\(.*\)/\2/' |\ - sed 's/[a-zA-Z-]*//' - ) - echo "${version}" -} - -# Convenience function to clone only the necessary folders from a git repository -sparse_clone() { - repo_url=$1 - paths=$2 - commitish=$3 - clone_dir=$(basename "${repo_url%.*}") - rm -rf "${clone_dir}" - git clone -n --depth=1 --no-single-branch --filter=tree:0 "${repo_url}" - pushd "${clone_dir}" - if [ -n "${commitish}" ]; then - git checkout "${commitish}" - fi - git sparse-checkout set --no-cone ${paths} - git checkout - popd -} # performs a deep structural comparison between the current pom in a git # folder and the one at HEAD. @@ -313,9 +104,9 @@ compare_poms() { set -e result=0 if [ "${os_architecture}" == "linux-x86_64" ]; then - find . -name 'pom.xml' -print0 | xargs -i -0 python "${test_utilities_script_dir}/compare_poms.py" {} {}.new false || result=$? + find . -name 'pom.xml' -print0 | xargs -i -0 python3 "${test_utilities_script_dir}/compare_poms.py" {} {}.new false || result=$? else - find . -name 'pom.xml' -print0 | xargs -I{} -0 python "${test_utilities_script_dir}/compare_poms.py" {} {}.new false || result=$? + find . -name 'pom.xml' -print0 | xargs -I{} -0 python3 "${test_utilities_script_dir}/compare_poms.py" {} {}.new false || result=$? fi popd &> /dev/null # target_dir echo ${result} diff --git a/library_generation/test/unit_tests.py b/library_generation/test/unit_tests.py new file mode 100644 index 0000000000..13d2eaacf9 --- /dev/null +++ b/library_generation/test/unit_tests.py @@ -0,0 +1,190 @@ +""" +Unit tests for utilities.py +""" + +import unittest +import os +import io +import sys +import contextlib +import subprocess +current = os.path.dirname(os.path.realpath(__file__)) +parent = os.path.dirname(current) +sys.path.append(parent) +import utilities as util +from model.GapicConfig import GapicConfig +from model.GenerationConfig import GenerationConfig +from model.ClientInputs import parse as parse_build_file + +script_dir = os.path.dirname(os.path.realpath(__file__)) +resources_dir = os.path.join(script_dir, 'resources') + +class UtilitiesTest(unittest.TestCase): + + CONFIGURATION_YAML_PATH = os.path.join(current, 'resources', 'integration', + 'google-cloud-java', 'generation_config.yaml') + + def test_create_argument_valid_container_succeeds(self): + container_value = 'google/test/v1' + container = GapicConfig(container_value) + argument_key = 'proto_path' + result = util.create_argument(argument_key, container) + self.assertEqual([ f'--{argument_key}', container_value], result) + + def test_create_argument_empty_container_returns_empty_list(self): + container = dict() + argument_key = 'proto_path' + result = util.create_argument(argument_key, container) + self.assertEqual([], result) + + def test_create_argument_none_container_fails(self): + container = None + argument_key = 'proto_path' + result = util.create_argument(argument_key, container) + self.assertEqual([], result) + + def test_get_configuration_yaml_library_api_shortnames_valid_input_returns_valid_list(self): + result = util.get_configuration_yaml_library_api_shortnames(self.CONFIGURATION_YAML_PATH) + self.assertEqual('asset speech apigee-connect dialogflow compute kms ' + + 'redis containeranalysis iam iamcredentials', result) + + def test_get_configuration_yaml_destination_path_returns_valid_destination_path(self): + result = util.get_configuration_yaml_destination_path(self.CONFIGURATION_YAML_PATH) + self.assertEqual('google-cloud-java', result) + + def test_sh_util_existent_function_succeeds(self): + result = util.sh_util('extract_folder_name path/to/folder_name') + self.assertEqual('folder_name', result) + + def test_sh_util_nonexistent_function_fails(self): + with self.assertRaises(RuntimeError): + result = util.sh_util('nonexistent_function') + + def test_eprint_valid_input_succeeds(self): + test_input='This is some test input' + # create a stdio capture object + stderr_capture = io.StringIO() + # run eprint() with the capture object + with contextlib.redirect_stderr(stderr_capture): + util.eprint(test_input) + result = stderr_capture.getvalue() + # print() appends a `\n` each time it's called + self.assertEqual(test_input + '\n', result) + + def test_delete_if_exists_preexisting_temp_files_succeeds(self): + # create temporary directory + # also remove last character (\n) + temp_dir = subprocess.check_output(['mktemp', '-d']).decode()[:-1] + + # add a file and a folder to the temp dir + file = os.path.join(temp_dir, 'temp_file') + with open(file, 'a'): + os.utime(file, None) + folder = os.path.join(temp_dir, 'temp_child_dir') + os.mkdir(folder) + self.assertEqual(2, len(os.listdir(temp_dir))) + + # remove file and folder + util.delete_if_exists(file) + util.delete_if_exists(folder) + self.assertEqual(0, len(os.listdir(temp_dir))) + + def test_client_inputs_parse_grpc_only_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_grpc.bazel') + self.assertEqual('grpc', parsed.transport) + + def test_client_inputs_parse_grpc_only_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_grpc.bazel') + self.assertEqual('grpc', parsed.transport) + + def test_client_inputs_parse_grpc_rest_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_grpc_rest.bazel') + self.assertEqual('grpc+rest', parsed.transport) + + def test_client_inputs_parse_rest_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_rest.bazel') + self.assertEqual('rest', parsed.transport) + + def test_client_inputs_parse_empty_include_samples_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_include_samples_empty.bazel') + self.assertEqual('false', parsed.include_samples) + + def test_client_inputs_parse_include_samples_false_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_include_samples_false.bazel') + self.assertEqual('false', parsed.include_samples) + + def test_client_inputs_parse_include_samples_true_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_include_samples_true.bazel') + self.assertEqual('true', parsed.include_samples) + + def test_client_inputs_parse_empty_rest_numeric_enums_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_rest_numeric_enums_empty.bazel') + self.assertEqual('false', parsed.rest_numeric_enum) + + def test_client_inputs_parse_include_samples_false_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_rest_numeric_enums_false.bazel') + self.assertEqual('false', parsed.rest_numeric_enum) + + def test_client_inputs_parse_include_samples_true_suceeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, '', 'BUILD_rest_numeric_enums_true.bazel') + self.assertEqual('true', parsed.rest_numeric_enum) + + def test_client_inputs_parse_no_gapic_library_returns_proto_only_true(self): + build_file = os.path.join(resources_dir, 'misc') + # include_samples_empty only has a gradle assembly rule + parsed = parse_build_file(build_file, '', 'BUILD_include_samples_empty.bazel') + self.assertEqual('true', parsed.proto_only) + + def test_client_inputs_parse_with_gapic_library_returns_proto_only_false(self): + build_file = os.path.join(resources_dir, 'misc') + # rest.bazel has a java_gapic_library rule + parsed = parse_build_file(build_file, '', 'BUILD_rest.bazel') + self.assertEqual('false', parsed.proto_only) + + def test_client_inputs_parse_gapic_yaml_succeeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_gapic_yaml.bazel') + self.assertEqual('test/versioned/path/test_gapic_yaml.yaml', parsed.gapic_yaml) + + def test_client_inputs_parse_no_gapic_yaml_returns_empty_string(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_no_gapic_yaml.bazel') + self.assertEqual('', parsed.gapic_yaml) + + def test_client_inputs_parse_service_config_succeeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_service_config.bazel') + self.assertEqual('test/versioned/path/test_service_config.json', parsed.service_config) + + def test_client_inputs_parse_no_service_config_returns_empty_string(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_no_service_config.bazel') + self.assertEqual('', parsed.service_config) + + def test_client_inputs_parse_service_yaml_succeeds(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_service_yaml.bazel') + self.assertEqual('test/versioned/path/test_service_yaml.yaml', parsed.service_yaml) + + def test_client_inputs_parse_no_service_yaml_returns_empty_string(self): + build_file = os.path.join(resources_dir, 'misc') + parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_no_service_yaml.bazel') + self.assertEqual('', parsed.service_yaml) + + + + + + +if __name__ == "__main__": + unittest.main() diff --git a/library_generation/utilities.py b/library_generation/utilities.py new file mode 100755 index 0000000000..0772e8b260 --- /dev/null +++ b/library_generation/utilities.py @@ -0,0 +1,125 @@ + +import sys +import subprocess +import os +import shutil +from collections.abc import Sequence +from model.GenerationConfig import GenerationConfig +from typing import List + +script_dir = os.path.dirname(os.path.realpath(__file__)) + + +""" +Generates a list of two elements [argument, value], or returns +an empty array if arg_val is None +""" +def create_argument(arg_key: str, arg_container: object) -> List[str]: + arg_val = getattr(arg_container, arg_key, None) + if arg_val is not None: + return [f'--{arg_key}', f'{arg_val}'] + return [] + +""" +For a given configuration yaml path, it returns a space-separated list of +the api_shortnames contained in such configuration_yaml +""" +def get_configuration_yaml_library_api_shortnames(generation_config_yaml: str) -> List[str]: + config = GenerationConfig.from_yaml(generation_config_yaml) + result = '' + for library in config.libraries: + result += f'{library.api_shortname} ' + return result[:-1] + +""" +For a given configuration yaml path, it returns the destination_path +entry at the root of the yaml +""" +def get_configuration_yaml_destination_path(generation_config_yaml: str) -> str: + config = GenerationConfig.from_yaml(generation_config_yaml) + return config.destination_path or '' + +""" +Runs a process with the given "arguments" list and prints its output. If the process +fails, then the whole program exits +""" +def run_process_and_print_output(arguments: List[str], job_name: str = 'Job'): + # check_output() raises an exception if it exited with a nonzero code + try: + output = subprocess.check_output(arguments, stderr=subprocess.STDOUT) + print(output.decode(), end='', flush=True) + print(f'{job_name} finished successfully') + except subprocess.CalledProcessError as ex: + print(ex.output.decode(), end='', flush=True) + print(f'{job_name} failed') + sys.exit(1) + + +""" +Calls a function defined in library_generation/utilities.sh +""" +def sh_util(statement: str, **kwargs) -> str: + if 'stdout' not in kwargs: + kwargs['stdout'] = subprocess.PIPE + if 'stderr' not in kwargs: + kwargs['stderr'] = subprocess.PIPE + output = '' + with subprocess.Popen( + ['bash', '-exc', f'source {script_dir}/utilities.sh && {statement}'], + **kwargs, + ) as proc: + print('command stderr:') + for line in proc.stderr: + print(line.decode(), end='', flush=True) + print('command stdout:') + for line in proc.stdout: + print(line.decode(), end='', flush=True) + output += line.decode() + proc.wait() + if proc.returncode != 0: + raise RuntimeError(f'function {statement} failed with exit code {proc.returncode}') + # captured stdout may contain a newline at the end, we remove it + if len(output) > 0 and output[-1] == '\n': + output = output[:-1] + return output + +""" +prints to stderr +""" +def eprint(*args, **kwargs): + print(*args, file=sys.stderr, **kwargs) + + +"""Deletes a file or folder if it exists. + + Args: + path: The path to the file or folder. +""" +def delete_if_exists(path: str): + if os.path.isfile(path): # Check if it's a file + os.remove(path) + print(f"File deleted: {path}") + elif os.path.isdir(path): # Check if it's a directory + shutil.rmtree(path) + print(f"Folder deleted: {path}") + else: + print(f"Path does not exist: {path}") + +def main(argv: Sequence[str]) -> None: + if len(argv) < 1: + raise ValueError('Usage: python generate_composed_library_args.py function_name arg1...argN') + + function_name = argv[1] + arguments = argv[2:] + try: + function = getattr(sys.modules[__name__], function_name) + print(function(*arguments)) + except AttributeError: + print(f'function name "{function_name}" not found in utilities.py') + sys.exit(1) + + + + +if __name__ == "__main__": + main(sys.argv) diff --git a/library_generation/utilities.sh b/library_generation/utilities.sh index 87feb3838c..965ed1fa0a 100755 --- a/library_generation/utilities.sh +++ b/library_generation/utilities.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -xeo pipefail +utilities_script_dir=$(dirname "$(realpath "${BASH_SOURCE[0]}")") # Utility functions used in `generate_library.sh` and showcase generation. extract_folder_name() { @@ -209,7 +210,11 @@ download_fail() { # gets the output folder where all sources and dependencies will be located. get_output_folder() { - echo "$(pwd)/output" + if [[ $(basename $(pwd)) != "output" ]]; then + echo "$(pwd)/output" + else + echo $(pwd) + fi } detect_os_architecture() { @@ -268,3 +273,75 @@ get_proto_path_from_preprocessed_sources() { popd > /dev/null # sources echo "${result}" } + +# for a pre-processed library stored in $preprocessed_sources_path, a folder +# tree is built on $target_folder so it looks like a googleapis-gen folder and +# is therefore consumable by an .OwlBot.yaml file +build_owlbot_cli_source_folder() { + local postprocessing_target=$1 + local target_folder=$2 + local preprocessed_sources_path=$3 + local proto_path=$4 + if [[ -z "${proto_path}" ]]; then + proto_path=$(get_proto_path_from_preprocessed_sources "${preprocessed_sources_path}") + fi + owlbot_staging_folder="${postprocessing_target}/owl-bot-staging" + mkdir -p "${owlbot_staging_folder}" + + # By default (thanks to generation templates), .OwlBot.yaml `deep-copy` section + # references a wildcard pattern matching a folder + # ending with `-java` at the leaf of proto_path. We then use a generated-java + # folder that will be picked up by copy-code + mkdir -p "${target_folder}/${proto_path}/generated-java" + copy_directory_if_exists "${preprocessed_sources_path}" "proto" \ + "${target_folder}/${proto_path}/generated-java/proto-google-cloud-library" + copy_directory_if_exists "${preprocessed_sources_path}" "grpc" \ + "${target_folder}/${proto_path}/generated-java/grpc-google-cloud-library" + copy_directory_if_exists "${preprocessed_sources_path}" "gapic" \ + "${target_folder}/${proto_path}/generated-java/gapic-google-cloud-library" + copy_directory_if_exists "${preprocessed_sources_path}" "samples" \ + "${target_folder}/${proto_path}/generated-java/samples" + pushd "${target_folder}" + # create an empty commit so owl-bot-copy can process this as a repo + # (it cannot process non-git-repositories) + git init + git commit --allow-empty -m 'empty commit' + popd # target_folder +} + +# Convenience function to clone only the necessary folders from a git repository +sparse_clone() { + repo_url=$1 + paths=$2 + commitish=$3 + clone_dir=$(basename "${repo_url%.*}") + rm -rf "${clone_dir}" + git clone -n --depth=1 --no-single-branch --filter=tree:0 "${repo_url}" + pushd "${clone_dir}" + if [ -n "${commitish}" ]; then + git checkout "${commitish}" + fi + git sparse-checkout set --no-cone ${paths} + git checkout + popd +} + +# calls a function in utilities.py. THe first argument is the function name, the +# rest of the arguments are the positional arguments to such function +py_util() { + python3 "${utilities_script_dir}/utilities.py" "$@" +} + +download_googleapis_files_and_folders() { + local output_folder=$1 + local googleapis_commitish=$2 + # checkout the master branch of googleapis/google (proto files) and WORKSPACE + echo "Checking out googlapis repository..." + # sparse_clone will remove folder contents first, so we have to checkout googleapis + # only once. + sparse_clone https://github.com/googleapis/googleapis.git "google grafeas" "${googleapis_commitish}" + pushd googleapis + cp -r google "${output_folder}" + cp -r grafeas "${output_folder}" +} + diff --git a/showcase/scripts/generate_showcase.sh b/showcase/scripts/generate_showcase.sh index ef9e2bf850..1c1b1f58de 100755 --- a/showcase/scripts/generate_showcase.sh +++ b/showcase/scripts/generate_showcase.sh @@ -8,6 +8,7 @@ set -ex readonly SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) lib_gen_scripts_dir="${SCRIPT_DIR}/../../library_generation/" source "${lib_gen_scripts_dir}/test/test_utilities.sh" +source "${lib_gen_scripts_dir}/utilities.sh" readonly perform_cleanup=$1 cd "${SCRIPT_DIR}" @@ -66,7 +67,6 @@ bash "${SCRIPT_DIR}/../../library_generation/generate_library.sh" \ --service_config "${service_config}" \ --service_yaml "${service_yaml}" \ --include_samples "${include_samples}" \ - --enable_postprocessing "false" \ --transport "${transport}" exit_code=$?