From df34893b63ea944731429cff294c4788c87e62a4 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Mon, 2 Sep 2024 17:48:17 +0200 Subject: [PATCH 01/51] feat(airbyte-cdk): replace pydantic `BaseModel` with `dataclasses` + `serpyco-rs` in protocol (#44444) Signed-off-by: Artem Inzhyyants --- .../python/airbyte_cdk/config_observation.py | 12 +- airbyte-cdk/python/airbyte_cdk/connector.py | 4 +- .../airbyte_cdk/connector_builder/main.py | 15 +- .../connector_builder/message_grouper.py | 34 +- .../airbyte_cdk/destinations/destination.py | 12 +- airbyte-cdk/python/airbyte_cdk/entrypoint.py | 22 +- airbyte-cdk/python/airbyte_cdk/logger.py | 17 +- .../python/airbyte_cdk/models/__init__.py | 10 + .../airbyte_cdk/models/airbyte_protocol.py | 79 +- .../models/airbyte_protocol_serializers.py | 38 + .../airbyte_cdk/models/well_known_types.py | 2 +- .../concurrent_read_processor.py | 5 +- .../sources/connector_state_manager.py | 22 +- .../incremental/per_partition_cursor.py | 2 +- .../wait_time_from_header_backoff_strategy.py | 2 +- .../sources/declarative/spec/spec.py | 6 +- .../sources/embedded/base_integration.py | 4 +- .../file_based/file_types/avro_parser.py | 4 +- .../file_based/file_types/excel_parser.py | 6 +- .../python/airbyte_cdk/sources/source.py | 13 +- ...substream_resumable_full_refresh_cursor.py | 2 +- .../sources/streams/http/http_client.py | 18 +- .../sources/utils/catalog_helpers.py | 22 - .../sources/utils/schema_models.py | 84 -- .../airbyte_cdk/test/catalog_builder.py | 4 +- .../airbyte_cdk/test/entrypoint_wrapper.py | 30 +- .../python/airbyte_cdk/test/state_builder.py | 10 +- .../python/airbyte_cdk/test/utils/reading.py | 2 +- .../python/airbyte_cdk/utils/message_utils.py | 8 +- .../airbyte_cdk/utils/traced_exception.py | 21 +- airbyte-cdk/python/cdk-migrations.md | 31 + airbyte-cdk/python/poetry.lock | 776 +++++++++++------- airbyte-cdk/python/pyproject.toml | 4 +- airbyte-cdk/python/unit_tests/conftest.py | 4 +- .../test_connector_builder_handler.py | 40 +- .../connector_builder/test_message_grouper.py | 114 ++- .../unit_tests/connector_builder/utils.py | 4 +- .../destinations/test_destination.py | 11 +- .../document_processor_test.py | 10 +- .../vector_db_based/embedder_test.py | 2 +- .../vector_db_based/writer_test.py | 7 +- .../test_concurrent_source_adapter.py | 22 +- .../sources/declarative/auth/test_jwt.py | 37 +- .../declarative/checks/test_check_stream.py | 4 +- .../datetime/test_datetime_parser.py | 7 +- .../datetime/test_min_max_datetime.py | 7 +- .../declarative/decoders/test_json_decoder.py | 14 +- .../extractors/test_dpath_extractor.py | 7 +- .../incremental/test_datetime_based_cursor.py | 7 +- .../incremental/test_per_partition_cursor.py | 2 +- .../test_per_partition_cursor_integration.py | 58 +- .../test_resumable_full_refresh_cursor.py | 2 +- .../test_legacy_to_per_partition_migration.py | 250 +++--- .../test_model_to_component_factory.py | 78 +- ...test_cartesian_product_partition_router.py | 56 +- .../test_list_partition_router.py | 28 +- .../test_parent_state_stream.py | 72 +- .../test_substream_partition_router.py | 104 +-- .../test_wait_time_from_header.py | 10 +- .../test_composite_error_handler.py | 6 +- .../test_default_error_handler.py | 242 +++--- .../test_default_http_response_filter.py | 20 +- .../test_http_response_filter.py | 32 +- .../test_cursor_pagination_strategy.py | 2 +- .../paginators/test_default_paginator.py | 2 +- .../paginators/test_page_increment.py | 8 +- .../requesters/test_http_requester.py | 5 +- .../retrievers/test_simple_retriever.py | 55 +- .../sources/declarative/spec/test_spec.py | 16 +- .../declarative/test_declarative_stream.py | 2 +- .../test_manifest_declarative_source.py | 39 +- .../sources/declarative/test_types.py | 51 +- .../embedded/test_embedded_integration.py | 6 +- .../file_based/config/test_csv_format.py | 2 +- .../file_based/file_types/test_avro_parser.py | 2 +- .../file_types/test_excel_parser.py | 14 +- .../file_types/test_parquet_parser.py | 3 +- .../file_based/in_memory_files_source.py | 8 +- .../concurrent_incremental_scenarios.py | 12 +- .../file_based/scenarios/csv_scenarios.py | 98 ++- .../file_based/scenarios/excel_scenarios.py | 26 +- .../scenarios/file_based_source_builder.py | 4 +- .../file_based/scenarios/scenario_builder.py | 21 +- .../scenarios/unstructured_scenarios.py | 14 - .../scenarios/user_input_schema_scenarios.py | 2 +- .../test_file_based_concurrent_cursor.py | 2 +- .../sources/file_based/test_scenarios.py | 18 +- .../sources/message/test_repository.py | 9 - .../mock_server_tests/mock_source_fixture.py | 148 ++-- .../airbyte_message_assertions.py | 15 +- .../test_mock_server_abstract_source.py | 195 +++-- .../test_resumable_full_refresh.py | 95 ++- .../checkpoint/test_checkpoint_reader.py | 7 +- ...substream_resumable_full_refresh_cursor.py | 83 +- .../scenarios/stream_facade_builder.py | 22 +- ..._based_concurrent_stream_source_builder.py | 22 +- .../test_concurrent_read_processor.py | 4 +- .../sources/streams/concurrent/test_cursor.py | 21 +- .../concurrent/test_partition_enqueuer.py | 5 +- .../test_default_backoff_strategy.py | 5 +- .../test_http_status_error_handler.py | 20 +- .../test_json_error_message_parser.py | 34 +- .../error_handlers/test_response_models.py | 8 +- .../http/test_availability_strategy.py | 8 +- .../sources/streams/http/test_http.py | 291 +++---- .../sources/streams/http/test_http_client.py | 144 ++-- .../sources/streams/test_stream_read.py | 12 +- .../sources/streams/test_streams_core.py | 90 +- .../streams/utils/test_stream_helper.py | 5 +- .../sources/test_abstract_source.py | 45 +- .../sources/test_connector_state_manager.py | 102 +-- .../python/unit_tests/sources/test_source.py | 54 +- .../unit_tests/sources/test_source_read.py | 30 +- .../sources/utils/test_catalog_helpers.py | 30 - .../sources/utils/test_schema_helpers.py | 6 +- .../sources/utils/test_schema_models.py | 65 -- .../test/mock_http/test_response_builder.py | 18 +- .../test/test_entrypoint_wrapper.py | 54 +- .../python/unit_tests/test_connector.py | 5 +- .../python/unit_tests/test_entrypoint.py | 202 +++-- .../unit_tests/test_exception_handler.py | 24 +- .../utils/test_datetime_format_inferrer.py | 2 +- .../unit_tests/utils/test_message_utils.py | 14 +- .../unit_tests/utils/test_schema_inferrer.py | 98 +-- .../unit_tests/utils/test_traced_exception.py | 18 +- 125 files changed, 2730 insertions(+), 2270 deletions(-) create mode 100644 airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol_serializers.py delete mode 100644 airbyte-cdk/python/airbyte_cdk/sources/utils/catalog_helpers.py delete mode 100644 airbyte-cdk/python/airbyte_cdk/sources/utils/schema_models.py delete mode 100644 airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py delete mode 100644 airbyte-cdk/python/unit_tests/sources/utils/test_schema_models.py diff --git a/airbyte-cdk/python/airbyte_cdk/config_observation.py b/airbyte-cdk/python/airbyte_cdk/config_observation.py index 55e03f335c86..94a3d64a511b 100644 --- a/airbyte-cdk/python/airbyte_cdk/config_observation.py +++ b/airbyte-cdk/python/airbyte_cdk/config_observation.py @@ -10,7 +10,15 @@ from copy import copy from typing import Any, List, MutableMapping -from airbyte_cdk.models import AirbyteControlConnectorConfigMessage, AirbyteControlMessage, AirbyteMessage, OrchestratorType, Type +from airbyte_cdk.models import ( + AirbyteControlConnectorConfigMessage, + AirbyteControlMessage, + AirbyteMessage, + AirbyteMessageSerializer, + OrchestratorType, + Type, +) +from orjson import orjson class ObservedDict(dict): # type: ignore # disallow_any_generics is set to True, and dict is equivalent to dict[Any] @@ -76,7 +84,7 @@ def emit_configuration_as_airbyte_control_message(config: MutableMapping[str, An See the airbyte_cdk.sources.message package """ airbyte_message = create_connector_config_control_message(config) - print(airbyte_message.model_dump_json(exclude_unset=True)) + print(orjson.dumps(AirbyteMessageSerializer.dump(airbyte_message)).decode()) def create_connector_config_control_message(config: MutableMapping[str, Any]) -> AirbyteMessage: diff --git a/airbyte-cdk/python/airbyte_cdk/connector.py b/airbyte-cdk/python/airbyte_cdk/connector.py index e40ace288e2b..658a0b167077 100644 --- a/airbyte-cdk/python/airbyte_cdk/connector.py +++ b/airbyte-cdk/python/airbyte_cdk/connector.py @@ -11,7 +11,7 @@ from typing import Any, Generic, Mapping, Optional, Protocol, TypeVar import yaml -from airbyte_cdk.models import AirbyteConnectionStatus, ConnectorSpecification +from airbyte_cdk.models import AirbyteConnectionStatus, ConnectorSpecification, ConnectorSpecificationSerializer def load_optional_package_file(package: str, filename: str) -> Optional[bytes]: @@ -84,7 +84,7 @@ def spec(self, logger: logging.Logger) -> ConnectorSpecification: else: raise FileNotFoundError("Unable to find spec.yaml or spec.json in the package.") - return ConnectorSpecification.parse_obj(spec_obj) + return ConnectorSpecificationSerializer.load(spec_obj) @abstractmethod def check(self, logger: logging.Logger, config: TConfig) -> AirbyteConnectionStatus: diff --git a/airbyte-cdk/python/airbyte_cdk/connector_builder/main.py b/airbyte-cdk/python/airbyte_cdk/connector_builder/main.py index 9f12b8aaeb11..1691b41b090d 100644 --- a/airbyte-cdk/python/airbyte_cdk/connector_builder/main.py +++ b/airbyte-cdk/python/airbyte_cdk/connector_builder/main.py @@ -9,10 +9,17 @@ from airbyte_cdk.connector import BaseConnector from airbyte_cdk.connector_builder.connector_builder_handler import TestReadLimits, create_source, get_limits, read_stream, resolve_manifest from airbyte_cdk.entrypoint import AirbyteEntrypoint -from airbyte_cdk.models import AirbyteMessage, AirbyteStateMessage, ConfiguredAirbyteCatalog +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteMessageSerializer, + AirbyteStateMessage, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, +) from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource from airbyte_cdk.sources.source import Source from airbyte_cdk.utils.traced_exception import AirbyteTracedException +from orjson import orjson def get_config_and_catalog_from_args(args: List[str]) -> Tuple[str, Mapping[str, Any], Optional[ConfiguredAirbyteCatalog], Any]: @@ -32,7 +39,7 @@ def get_config_and_catalog_from_args(args: List[str]) -> Tuple[str, Mapping[str, command = config["__command"] if command == "test_read": - catalog = ConfiguredAirbyteCatalog.parse_obj(BaseConnector.read_config(catalog_path)) + catalog = ConfiguredAirbyteCatalogSerializer.load(BaseConnector.read_config(catalog_path)) state = Source.read_state(state_path) else: catalog = None @@ -67,7 +74,7 @@ def handle_request(args: List[str]) -> AirbyteMessage: command, config, catalog, state = get_config_and_catalog_from_args(args) limits = get_limits(config) source = create_source(config, limits) - return handle_connector_builder_request(source, command, config, catalog, state, limits).json(exclude_unset=True) + return AirbyteMessageSerializer.dump(handle_connector_builder_request(source, command, config, catalog, state, limits)) # type: ignore[no-any-return] # Serializer.dump() always returns AirbyteMessage if __name__ == "__main__": @@ -76,4 +83,4 @@ def handle_request(args: List[str]) -> AirbyteMessage: except Exception as exc: error = AirbyteTracedException.from_exception(exc, message=f"Error handling request: {str(exc)}") m = error.as_airbyte_message() - print(error.as_airbyte_message().model_dump_json(exclude_unset=True)) + print(orjson.dumps(AirbyteMessageSerializer.dump(m)).decode()) diff --git a/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py b/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py index 80cb8c36178e..4b00fc874cf5 100644 --- a/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py +++ b/airbyte-cdk/python/airbyte_cdk/connector_builder/message_grouper.py @@ -18,13 +18,7 @@ StreamReadSlices, ) from airbyte_cdk.entrypoint import AirbyteEntrypoint -from airbyte_cdk.sources.declarative.declarative_source import DeclarativeSource -from airbyte_cdk.sources.utils.slice_logger import SliceLogger -from airbyte_cdk.sources.utils.types import JsonType -from airbyte_cdk.utils import AirbyteTracedException -from airbyte_cdk.utils.datetime_format_inferrer import DatetimeFormatInferrer -from airbyte_cdk.utils.schema_inferrer import SchemaInferrer, SchemaValidationException -from airbyte_protocol.models.airbyte_protocol import ( +from airbyte_cdk.models import ( AirbyteControlMessage, AirbyteLogMessage, AirbyteMessage, @@ -34,7 +28,13 @@ OrchestratorType, TraceType, ) -from airbyte_protocol.models.airbyte_protocol import Type as MessageType +from airbyte_cdk.models import Type as MessageType +from airbyte_cdk.sources.declarative.declarative_source import DeclarativeSource +from airbyte_cdk.sources.utils.slice_logger import SliceLogger +from airbyte_cdk.sources.utils.types import JsonType +from airbyte_cdk.utils import AirbyteTracedException +from airbyte_cdk.utils.datetime_format_inferrer import DatetimeFormatInferrer +from airbyte_cdk.utils.schema_inferrer import SchemaInferrer, SchemaValidationException class MessageGrouper: @@ -182,19 +182,19 @@ def _get_message_groups( if ( at_least_one_page_in_group and message.type == MessageType.LOG - and message.log.message.startswith(SliceLogger.SLICE_LOG_PREFIX) + and message.log.message.startswith(SliceLogger.SLICE_LOG_PREFIX) # type: ignore[union-attr] # AirbyteMessage with MessageType.LOG has log.message ): yield StreamReadSlices( pages=current_slice_pages, slice_descriptor=current_slice_descriptor, state=[latest_state_message] if latest_state_message else [], ) - current_slice_descriptor = self._parse_slice_description(message.log.message) + current_slice_descriptor = self._parse_slice_description(message.log.message) # type: ignore[union-attr] # AirbyteMessage with MessageType.LOG has log.message current_slice_pages = [] at_least_one_page_in_group = False - elif message.type == MessageType.LOG and message.log.message.startswith(SliceLogger.SLICE_LOG_PREFIX): + elif message.type == MessageType.LOG and message.log.message.startswith(SliceLogger.SLICE_LOG_PREFIX): # type: ignore[union-attr] # AirbyteMessage with MessageType.LOG has log.message # parsing the first slice - current_slice_descriptor = self._parse_slice_description(message.log.message) + current_slice_descriptor = self._parse_slice_description(message.log.message) # type: ignore[union-attr] # AirbyteMessage with MessageType.LOG has log.message elif message.type == MessageType.LOG: if json_message is not None and self._is_http_log(json_message): if self._is_auxiliary_http_request(json_message): @@ -221,17 +221,17 @@ def _get_message_groups( else: yield message.log elif message.type == MessageType.TRACE: - if message.trace.type == TraceType.ERROR: + if message.trace.type == TraceType.ERROR: # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has trace.type yield message.trace elif message.type == MessageType.RECORD: - current_page_records.append(message.record.data) + current_page_records.append(message.record.data) # type: ignore[union-attr] # AirbyteMessage with MessageType.RECORD has record.data records_count += 1 schema_inferrer.accumulate(message.record) datetime_format_inferrer.accumulate(message.record) - elif message.type == MessageType.CONTROL and message.control.type == OrchestratorType.CONNECTOR_CONFIG: + elif message.type == MessageType.CONTROL and message.control.type == OrchestratorType.CONNECTOR_CONFIG: # type: ignore[union-attr] # AirbyteMessage with MessageType.CONTROL has control.type yield message.control elif message.type == MessageType.STATE: - latest_state_message = message.state + latest_state_message = message.state # type: ignore[assignment] else: if current_page_request or current_page_response or current_page_records: self._close_page(current_page_request, current_page_response, current_slice_pages, current_page_records) @@ -246,7 +246,7 @@ def _need_to_close_page(at_least_one_page_in_group: bool, message: AirbyteMessag return ( at_least_one_page_in_group and message.type == MessageType.LOG - and (MessageGrouper._is_page_http_request(json_message) or message.log.message.startswith("slice:")) + and (MessageGrouper._is_page_http_request(json_message) or message.log.message.startswith("slice:")) # type: ignore[union-attr] # AirbyteMessage with MessageType.LOG has log.message ) @staticmethod diff --git a/airbyte-cdk/python/airbyte_cdk/destinations/destination.py b/airbyte-cdk/python/airbyte_cdk/destinations/destination.py index f95e185aabfe..336a54a94e8f 100644 --- a/airbyte-cdk/python/airbyte_cdk/destinations/destination.py +++ b/airbyte-cdk/python/airbyte_cdk/destinations/destination.py @@ -11,10 +11,10 @@ from airbyte_cdk.connector import Connector from airbyte_cdk.exception_handler import init_uncaught_exception_handler -from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, Type +from airbyte_cdk.models import AirbyteMessage, AirbyteMessageSerializer, ConfiguredAirbyteCatalog, ConfiguredAirbyteCatalogSerializer, Type from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit from airbyte_cdk.utils.traced_exception import AirbyteTracedException -from pydantic import ValidationError as V2ValidationError +from orjson import orjson logger = logging.getLogger("airbyte") @@ -36,14 +36,14 @@ def _parse_input_stream(self, input_stream: io.TextIOWrapper) -> Iterable[Airbyt """Reads from stdin, converting to Airbyte messages""" for line in input_stream: try: - yield AirbyteMessage.parse_raw(line) - except V2ValidationError: + yield AirbyteMessageSerializer.load(orjson.loads(line)) + except orjson.JSONDecodeError: logger.info(f"ignoring input which can't be deserialized as Airbyte Message: {line}") def _run_write( self, config: Mapping[str, Any], configured_catalog_path: str, input_stream: io.TextIOWrapper ) -> Iterable[AirbyteMessage]: - catalog = ConfiguredAirbyteCatalog.parse_file(configured_catalog_path) + catalog = ConfiguredAirbyteCatalogSerializer.load(orjson.loads(open(configured_catalog_path).read())) input_messages = self._parse_input_stream(input_stream) logger.info("Begin writing to the destination...") yield from self.write(config=config, configured_catalog=catalog, input_messages=input_messages) @@ -117,4 +117,4 @@ def run(self, args: List[str]) -> None: parsed_args = self.parse_args(args) output_messages = self.run_cmd(parsed_args) for message in output_messages: - print(message.model_dump_json(exclude_unset=True)) + print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) diff --git a/airbyte-cdk/python/airbyte_cdk/entrypoint.py b/airbyte-cdk/python/airbyte_cdk/entrypoint.py index cc9c3662ff67..57b604691bc1 100644 --- a/airbyte-cdk/python/airbyte_cdk/entrypoint.py +++ b/airbyte-cdk/python/airbyte_cdk/entrypoint.py @@ -19,8 +19,15 @@ from airbyte_cdk.connector import TConfig from airbyte_cdk.exception_handler import init_uncaught_exception_handler from airbyte_cdk.logger import init_logger -from airbyte_cdk.models import AirbyteMessage, FailureType, Status, Type -from airbyte_cdk.models.airbyte_protocol import AirbyteStateStats, ConnectorSpecification # type: ignore [attr-defined] +from airbyte_cdk.models import ( # type: ignore [attr-defined] + AirbyteMessage, + AirbyteMessageSerializer, + AirbyteStateStats, + ConnectorSpecification, + FailureType, + Status, + Type, +) from airbyte_cdk.sources import Source from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit, split_config @@ -28,6 +35,7 @@ from airbyte_cdk.utils.airbyte_secrets_utils import get_secrets, update_secrets from airbyte_cdk.utils.constants import ENV_REQUEST_CACHE_PATH from airbyte_cdk.utils.traced_exception import AirbyteTracedException +from orjson import orjson from requests import PreparedRequest, Response, Session logger = init_logger("airbyte") @@ -170,13 +178,13 @@ def read(self, source_spec: ConnectorSpecification, config: TConfig, catalog: An def handle_record_counts(message: AirbyteMessage, stream_message_count: DefaultDict[HashableStreamDescriptor, float]) -> AirbyteMessage: match message.type: case Type.RECORD: - stream_message_count[HashableStreamDescriptor(name=message.record.stream, namespace=message.record.namespace)] += 1.0 + stream_message_count[HashableStreamDescriptor(name=message.record.stream, namespace=message.record.namespace)] += 1.0 # type: ignore[union-attr] # record has `stream` and `namespace` case Type.STATE: stream_descriptor = message_utils.get_stream_descriptor(message) # Set record count from the counter onto the state message - message.state.sourceStats = message.state.sourceStats or AirbyteStateStats() - message.state.sourceStats.recordCount = stream_message_count.get(stream_descriptor, 0.0) + message.state.sourceStats = message.state.sourceStats or AirbyteStateStats() # type: ignore[union-attr] # state has `sourceStats` + message.state.sourceStats.recordCount = stream_message_count.get(stream_descriptor, 0.0) # type: ignore[union-attr] # state has `sourceStats` # Reset the counter stream_message_count[stream_descriptor] = 0.0 @@ -197,8 +205,8 @@ def set_up_secret_filter(config: TConfig, connection_specification: Mapping[str, update_secrets(config_secrets) @staticmethod - def airbyte_message_to_string(airbyte_message: AirbyteMessage) -> Any: - return airbyte_message.model_dump_json(exclude_unset=True) + def airbyte_message_to_string(airbyte_message: AirbyteMessage) -> str: + return orjson.dumps(AirbyteMessageSerializer.dump(airbyte_message)).decode() # type: ignore[no-any-return] # orjson.dumps(message).decode() always returns string @classmethod def extract_state(cls, args: List[str]) -> Optional[Any]: diff --git a/airbyte-cdk/python/airbyte_cdk/logger.py b/airbyte-cdk/python/airbyte_cdk/logger.py index 6f40e581df94..72673dcfa8f7 100644 --- a/airbyte-cdk/python/airbyte_cdk/logger.py +++ b/airbyte-cdk/python/airbyte_cdk/logger.py @@ -7,8 +7,9 @@ import logging.config from typing import Any, Mapping, Optional, Tuple -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteMessageSerializer, Level, Type from airbyte_cdk.utils.airbyte_secrets_utils import filter_secrets +from orjson import orjson LOGGING_CONFIG = { "version": 1, @@ -42,11 +43,11 @@ class AirbyteLogFormatter(logging.Formatter): # Transforming Python log levels to Airbyte protocol log levels level_mapping = { - logging.FATAL: "FATAL", - logging.ERROR: "ERROR", - logging.WARNING: "WARN", - logging.INFO: "INFO", - logging.DEBUG: "DEBUG", + logging.FATAL: Level.FATAL, + logging.ERROR: Level.ERROR, + logging.WARNING: Level.WARN, + logging.INFO: Level.INFO, + logging.DEBUG: Level.DEBUG, } def format(self, record: logging.LogRecord) -> str: @@ -59,8 +60,8 @@ def format(self, record: logging.LogRecord) -> str: else: message = super().format(record) message = filter_secrets(message) - log_message = AirbyteMessage(type="LOG", log=AirbyteLogMessage(level=airbyte_level, message=message)) - return log_message.model_dump_json(exclude_unset=True) # type: ignore + log_message = AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=airbyte_level, message=message)) + return orjson.dumps(AirbyteMessageSerializer.dump(log_message)).decode() # type: ignore[no-any-return] # orjson.dumps(message).decode() always returns string @staticmethod def extract_extra_args_from_record(record: logging.LogRecord) -> Mapping[str, Any]: diff --git a/airbyte-cdk/python/airbyte_cdk/models/__init__.py b/airbyte-cdk/python/airbyte_cdk/models/__init__.py index b062a4468c22..c56df9adc43a 100644 --- a/airbyte-cdk/python/airbyte_cdk/models/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/models/__init__.py @@ -7,6 +7,7 @@ # of airbyte-cdk rather than a standalone package. from .airbyte_protocol import ( AdvancedAuth, + AirbyteStateStats, AirbyteAnalyticsTraceMessage, AirbyteCatalog, AirbyteConnectionStatus, @@ -58,3 +59,12 @@ TimeWithoutTimezone, TimeWithTimezone, ) + +from .airbyte_protocol_serializers import ( +AirbyteStreamStateSerializer, +AirbyteStateMessageSerializer, +AirbyteMessageSerializer, +ConfiguredAirbyteCatalogSerializer, +ConfiguredAirbyteStreamSerializer, +ConnectorSpecificationSerializer, +) \ No newline at end of file diff --git a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py index 74639c8bf3c1..477cfb8a66fd 100644 --- a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py +++ b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py @@ -2,4 +2,81 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from airbyte_protocol.models import * +from dataclasses import InitVar, dataclass +from typing import Annotated, Any, Dict, List, Mapping, Optional + +from airbyte_protocol_dataclasses.models import * +from serpyco_rs.metadata import Alias + + +@dataclass +class AirbyteStateBlob: + """ + A dataclass that dynamically sets attributes based on provided keyword arguments and positional arguments. + Used to "mimic" pydantic Basemodel with ConfigDict(extra='allow') option. + + The `AirbyteStateBlob` class allows for flexible instantiation by accepting any number of keyword arguments + and positional arguments. These are used to dynamically update the instance's attributes. This class is useful + in scenarios where the attributes of an object are not known until runtime and need to be set dynamically. + + Attributes: + kwargs (InitVar[Mapping[str, Any]]): A dictionary of keyword arguments used to set attributes dynamically. + + Methods: + __init__(*args: Any, **kwargs: Any) -> None: + Initializes the `AirbyteStateBlob` by setting attributes from the provided arguments. + + __eq__(other: object) -> bool: + Checks equality between two `AirbyteStateBlob` instances based on their internal dictionaries. + Returns `False` if the other object is not an instance of `AirbyteStateBlob`. + """ + + kwargs: InitVar[Mapping[str, Any]] + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # Set any attribute passed in through kwargs + for arg in args: + self.__dict__.update(arg) + for key, value in kwargs.items(): + setattr(self, key, value) + + def __eq__(self, other: object) -> bool: + return False if not isinstance(other, AirbyteStateBlob) else bool(self.__dict__ == other.__dict__) + + +# The following dataclasses have been redeclared to include the new version of AirbyteStateBlob +@dataclass +class AirbyteStreamState: + stream_descriptor: StreamDescriptor # type: ignore [name-defined] + stream_state: Optional[AirbyteStateBlob] = None + + +@dataclass +class AirbyteGlobalState: + stream_states: List[AirbyteStreamState] + shared_state: Optional[AirbyteStateBlob] = None + + +@dataclass +class AirbyteStateMessage: + type: Optional[AirbyteStateType] = None # type: ignore [name-defined] + stream: Optional[AirbyteStreamState] = None + global_: Annotated[ + AirbyteGlobalState | None, Alias("global") + ] = None # "global" is a reserved keyword in python ⇒ Alias is used for (de-)serialization + data: Optional[Dict[str, Any]] = None + sourceStats: Optional[AirbyteStateStats] = None # type: ignore [name-defined] + destinationStats: Optional[AirbyteStateStats] = None # type: ignore [name-defined] + + +@dataclass +class AirbyteMessage: + type: Type # type: ignore [name-defined] + log: Optional[AirbyteLogMessage] = None # type: ignore [name-defined] + spec: Optional[ConnectorSpecification] = None # type: ignore [name-defined] + connectionStatus: Optional[AirbyteConnectionStatus] = None # type: ignore [name-defined] + catalog: Optional[AirbyteCatalog] = None # type: ignore [name-defined] + record: Optional[AirbyteRecordMessage] = None # type: ignore [name-defined] + state: Optional[AirbyteStateMessage] = None + trace: Optional[AirbyteTraceMessage] = None # type: ignore [name-defined] + control: Optional[AirbyteControlMessage] = None # type: ignore [name-defined] diff --git a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol_serializers.py b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol_serializers.py new file mode 100644 index 000000000000..aeac43f794ce --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol_serializers.py @@ -0,0 +1,38 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +from typing import Any, Dict + +from serpyco_rs import CustomType, Serializer + +from .airbyte_protocol import ( # type: ignore[attr-defined] # all classes are imported to airbyte_protocol via * + AirbyteMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStreamState, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + ConnectorSpecification, +) + + +class AirbyteStateBlobType(CustomType[AirbyteStateBlob, Dict[str, Any]]): + def serialize(self, value: AirbyteStateBlob) -> Dict[str, Any]: + # cant use orjson.dumps() directly because private attributes are excluded, e.g. "__ab_full_refresh_sync_complete" + return {k: v for k, v in value.__dict__.items()} + + def deserialize(self, value: Dict[str, Any]) -> AirbyteStateBlob: + return AirbyteStateBlob(value) + + def get_json_schema(self) -> Dict[str, Any]: + return {"type": "object"} + + +def custom_type_resolver(t: type) -> CustomType[AirbyteStateBlob, Dict[str, Any]] | None: + return AirbyteStateBlobType() if t is AirbyteStateBlob else None + + +AirbyteStreamStateSerializer = Serializer(AirbyteStreamState, omit_none=True, custom_type_resolver=custom_type_resolver) +AirbyteStateMessageSerializer = Serializer(AirbyteStateMessage, omit_none=True, custom_type_resolver=custom_type_resolver) +AirbyteMessageSerializer = Serializer(AirbyteMessage, omit_none=True, custom_type_resolver=custom_type_resolver) +ConfiguredAirbyteCatalogSerializer = Serializer(ConfiguredAirbyteCatalog, omit_none=True) +ConfiguredAirbyteStreamSerializer = Serializer(ConfiguredAirbyteStream, omit_none=True) +ConnectorSpecificationSerializer = Serializer(ConnectorSpecification, omit_none=True) diff --git a/airbyte-cdk/python/airbyte_cdk/models/well_known_types.py b/airbyte-cdk/python/airbyte_cdk/models/well_known_types.py index 0cc409c7e070..a063ad7db03a 100644 --- a/airbyte-cdk/python/airbyte_cdk/models/well_known_types.py +++ b/airbyte-cdk/python/airbyte_cdk/models/well_known_types.py @@ -2,4 +2,4 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from airbyte_protocol.models.well_known_types import * +from airbyte_protocol_dataclasses.models.well_known_types import * diff --git a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py index f345c6b4bd75..40cc771ab780 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py @@ -5,7 +5,7 @@ from typing import Dict, Iterable, List, Optional, Set from airbyte_cdk.exception_handler import generate_failed_streams_error_message -from airbyte_cdk.models import AirbyteMessage, AirbyteStreamStatus +from airbyte_cdk.models import AirbyteMessage, AirbyteStreamStatus, FailureType, StreamDescriptor from airbyte_cdk.models import Type as MessageType from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel from airbyte_cdk.sources.concurrent_source.stream_thread_exception import StreamThreadException @@ -21,7 +21,6 @@ from airbyte_cdk.sources.utils.slice_logger import SliceLogger from airbyte_cdk.utils import AirbyteTracedException from airbyte_cdk.utils.stream_status_utils import as_airbyte_message as stream_status_as_airbyte_message -from airbyte_protocol.models import FailureType, StreamDescriptor class ConcurrentReadProcessor: @@ -76,7 +75,7 @@ def on_partition_generation_completed(self, sentinel: PartitionGenerationComplet if self._is_stream_done(stream_name) or len(self._streams_to_running_partitions[stream_name]) == 0: yield from self._on_stream_is_done(stream_name) if self._stream_instances_to_start_partition_generation: - yield self.start_next_partition_generator() + yield self.start_next_partition_generator() # type:ignore # None may be yielded def on_partition(self, partition: Partition) -> None: """ diff --git a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py index b550d81b7a27..547f4bb23dca 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py @@ -3,20 +3,22 @@ # import copy +from dataclasses import dataclass from typing import Any, List, Mapping, MutableMapping, Optional, Tuple, Union from airbyte_cdk.models import AirbyteMessage, AirbyteStateBlob, AirbyteStateMessage, AirbyteStateType, AirbyteStreamState, StreamDescriptor from airbyte_cdk.models import Type as MessageType -from pydantic import ConfigDict as V2ConfigDict -class HashableStreamDescriptor(StreamDescriptor): +@dataclass(frozen=True) +class HashableStreamDescriptor: """ Helper class that overrides the existing StreamDescriptor class that is auto generated from the Airbyte Protocol and freezes its fields so that it be used as a hash key. This is only marked public because we use it outside for unit tests. """ - model_config = V2ConfigDict(extra="allow", frozen=True) + name: str + namespace: Optional[str] = None class ConnectorStateManager: @@ -47,9 +49,9 @@ def get_stream_state(self, stream_name: str, namespace: Optional[str]) -> Mutabl :param namespace: Namespace of the stream being fetched :return: The per-stream state for a stream """ - stream_state = self.per_stream_states.get(HashableStreamDescriptor(name=stream_name, namespace=namespace)) + stream_state: AirbyteStateBlob | None = self.per_stream_states.get(HashableStreamDescriptor(name=stream_name, namespace=namespace)) if stream_state: - return stream_state.dict() # type: ignore # mypy thinks dict() returns any, but it returns a dict + return copy.deepcopy({k: v for k, v in stream_state.__dict__.items()}) return {} def update_state_for_stream(self, stream_name: str, namespace: Optional[str], value: Mapping[str, Any]) -> None: @@ -60,7 +62,7 @@ def update_state_for_stream(self, stream_name: str, namespace: Optional[str], va :param value: A stream state mapping that is being updated for a stream """ stream_descriptor = HashableStreamDescriptor(name=stream_name, namespace=namespace) - self.per_stream_states[stream_descriptor] = AirbyteStateBlob.parse_obj(value) + self.per_stream_states[stream_descriptor] = AirbyteStateBlob(value) def create_state_message(self, stream_name: str, namespace: Optional[str]) -> AirbyteMessage: """ @@ -100,19 +102,19 @@ def _extract_from_state_message( if is_global: global_state = state[0].global_ # type: ignore # We verified state is a list in _is_global_state - shared_state = copy.deepcopy(global_state.shared_state, {}) + shared_state = copy.deepcopy(global_state.shared_state, {}) # type: ignore[union-attr] # global_state has shared_state streams = { HashableStreamDescriptor( name=per_stream_state.stream_descriptor.name, namespace=per_stream_state.stream_descriptor.namespace ): per_stream_state.stream_state - for per_stream_state in global_state.stream_states + for per_stream_state in global_state.stream_states # type: ignore[union-attr] # global_state has shared_state } return shared_state, streams else: streams = { HashableStreamDescriptor( - name=per_stream_state.stream.stream_descriptor.name, namespace=per_stream_state.stream.stream_descriptor.namespace - ): per_stream_state.stream.stream_state + name=per_stream_state.stream.stream_descriptor.name, namespace=per_stream_state.stream.stream_descriptor.namespace # type: ignore[union-attr] # stream has stream_descriptor + ): per_stream_state.stream.stream_state # type: ignore[union-attr] # stream has stream_state for per_stream_state in state if per_stream_state.type == AirbyteStateType.STREAM and hasattr(per_stream_state, "stream") # type: ignore # state is always a list of AirbyteStateMessage if is_per_stream is True } diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py index f0869b72fa29..28c2f0eb6b8b 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py @@ -4,12 +4,12 @@ from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional, Union +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.declarative.incremental.declarative_cursor import DeclarativeCursor from airbyte_cdk.sources.declarative.partition_routers.partition_router import PartitionRouter from airbyte_cdk.sources.streams.checkpoint.per_partition_key_serializer import PerPartitionKeySerializer from airbyte_cdk.sources.types import Record, StreamSlice, StreamState from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import FailureType class CursorFactory: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategies/wait_time_from_header_backoff_strategy.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategies/wait_time_from_header_backoff_strategy.py index d437a5c12ae5..79eb8a7fe23d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategies/wait_time_from_header_backoff_strategy.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategies/wait_time_from_header_backoff_strategy.py @@ -7,12 +7,12 @@ from typing import Any, Mapping, Optional, Union import requests +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.requesters.error_handlers.backoff_strategies.header_helper import get_numeric_value_from_header from airbyte_cdk.sources.declarative.requesters.error_handlers.backoff_strategy import BackoffStrategy from airbyte_cdk.sources.types import Config from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import FailureType @dataclass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/spec.py index a0d499f5d13d..87c8911d6aa6 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/spec.py @@ -5,7 +5,7 @@ from dataclasses import InitVar, dataclass from typing import Any, Mapping, Optional -from airbyte_cdk.models.airbyte_protocol import AdvancedAuth, ConnectorSpecification # type: ignore [attr-defined] +from airbyte_cdk.models import AdvancedAuth, ConnectorSpecification, ConnectorSpecificationSerializer # type: ignore [attr-defined] from airbyte_cdk.sources.declarative.models.declarative_component_schema import AuthFlow @@ -36,7 +36,7 @@ def generate_spec(self) -> ConnectorSpecification: if self.advanced_auth: self.advanced_auth.auth_flow_type = self.advanced_auth.auth_flow_type.value # type: ignore # We know this is always assigned to an AuthFlow which has the auth_flow_type field # Map CDK AuthFlow model to protocol AdvancedAuth model - obj["advanced_auth"] = AdvancedAuth.parse_obj(self.advanced_auth.dict()) + obj["advanced_auth"] = self.advanced_auth.dict() # We remap these keys to camel case because that's the existing format expected by the rest of the platform - return ConnectorSpecification.parse_obj(obj) + return ConnectorSpecificationSerializer.load(obj) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/embedded/base_integration.py b/airbyte-cdk/python/airbyte_cdk/sources/embedded/base_integration.py index 158dea4d135a..79c9bd850a3a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/embedded/base_integration.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/embedded/base_integration.py @@ -6,11 +6,11 @@ from typing import Generic, Iterable, Optional, TypeVar from airbyte_cdk.connector import TConfig +from airbyte_cdk.models import AirbyteRecordMessage, AirbyteStateMessage, SyncMode, Type from airbyte_cdk.sources.embedded.catalog import create_configured_catalog, get_stream, get_stream_names from airbyte_cdk.sources.embedded.runner import SourceRunner from airbyte_cdk.sources.embedded.tools import get_defined_id from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit -from airbyte_protocol.models import AirbyteRecordMessage, AirbyteStateMessage, SyncMode, Type TOutput = TypeVar("TOutput") @@ -43,7 +43,7 @@ def _load_data(self, stream_name: str, state: Optional[AirbyteStateMessage] = No for message in self.source.read(self.config, configured_catalog, state): if message.type == Type.RECORD: - output = self._handle_record(message.record, get_defined_id(stream, message.record.data)) + output = self._handle_record(message.record, get_defined_id(stream, message.record.data)) # type: ignore[union-attr] # record has `data` if output: yield output elif message.type is Type.STATE and message.state: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/avro_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/avro_parser.py index bbae84b287bf..b033afa57fb3 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/avro_parser.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/avro_parser.py @@ -166,7 +166,9 @@ def file_read_mode(self) -> FileReadMode: @staticmethod def _to_output_value(avro_format: AvroFormat, record_type: Mapping[str, Any], record_value: Any) -> Any: - if not isinstance(record_type, Mapping): + if isinstance(record_value, bytes): + return record_value.decode() + elif not isinstance(record_type, Mapping): if record_type == "double" and avro_format.double_as_string: return str(record_value) return record_value diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/excel_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/excel_parser.py index 579a85390c66..93add4108dea 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/excel_parser.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/excel_parser.py @@ -17,6 +17,7 @@ from numpy import datetime64 from numpy import dtype as dtype_ from numpy import issubdtype +from orjson import orjson from pydantic.v1 import BaseModel @@ -97,7 +98,10 @@ def parse_records( with stream_reader.open_file(file, self.file_read_mode, self.ENCODING, logger) as fp: df = self.open_and_parse_file(fp) # Yield records as dictionaries - yield from df.to_dict(orient="records") + # DataFrame.to_dict() method returns datetime values in pandas.Timestamp values, which are not serializable by orjson + # DataFrame.to_json() returns string with datetime values serialized to iso8601 with microseconds to align with pydantic behavior + # see PR description: https://github.com/airbytehq/airbyte/pull/44444/ + yield from orjson.loads(df.to_json(orient="records", date_format="iso", date_unit="us")) except Exception as exc: # Raise a RecordParseError if any exception occurs during parsing diff --git a/airbyte-cdk/python/airbyte_cdk/sources/source.py b/airbyte-cdk/python/airbyte_cdk/sources/source.py index 77de81fbe7f1..975770c88949 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/source.py @@ -8,7 +8,14 @@ from typing import Any, Generic, Iterable, List, Mapping, Optional, TypeVar from airbyte_cdk.connector import BaseConnector, DefaultConnectorMixin, TConfig -from airbyte_cdk.models import AirbyteCatalog, AirbyteMessage, AirbyteStateMessage, ConfiguredAirbyteCatalog +from airbyte_cdk.models import ( + AirbyteCatalog, + AirbyteMessage, + AirbyteStateMessage, + AirbyteStateMessageSerializer, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, +) TState = TypeVar("TState") TCatalog = TypeVar("TCatalog") @@ -61,7 +68,7 @@ def read_state(cls, state_path: str) -> List[AirbyteStateMessage]: state_obj = BaseConnector._read_json_file(state_path) if state_obj: for state in state_obj: # type: ignore # `isinstance(state_obj, List)` ensures that this is a list - parsed_message = AirbyteStateMessage.parse_obj(state) + parsed_message = AirbyteStateMessageSerializer.load(state) if not parsed_message.stream and not parsed_message.data and not parsed_message.global_: raise ValueError("AirbyteStateMessage should contain either a stream, global, or state field") parsed_state_messages.append(parsed_message) @@ -70,7 +77,7 @@ def read_state(cls, state_path: str) -> List[AirbyteStateMessage]: # can be overridden to change an input catalog @classmethod def read_catalog(cls, catalog_path: str) -> ConfiguredAirbyteCatalog: - return ConfiguredAirbyteCatalog.parse_obj(cls._read_json_file(catalog_path)) + return ConfiguredAirbyteCatalogSerializer.load(cls._read_json_file(catalog_path)) @property def name(self) -> str: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/checkpoint/substream_resumable_full_refresh_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/checkpoint/substream_resumable_full_refresh_cursor.py index 761a37e1f180..0afc2974fa9a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/checkpoint/substream_resumable_full_refresh_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/checkpoint/substream_resumable_full_refresh_cursor.py @@ -3,11 +3,11 @@ from dataclasses import dataclass from typing import Any, Mapping, MutableMapping, Optional +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.streams.checkpoint import Cursor from airbyte_cdk.sources.streams.checkpoint.per_partition_key_serializer import PerPartitionKeySerializer from airbyte_cdk.sources.types import Record, StreamSlice, StreamState from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import FailureType FULL_REFRESH_COMPLETE_STATE: Mapping[str, Any] = {"__ab_full_refresh_sync_complete": True} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http_client.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http_client.py index d52b92627577..b1f23aeb4e25 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http_client.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http_client.py @@ -10,7 +10,14 @@ import requests import requests_cache -from airbyte_cdk.models import AirbyteStreamStatus, AirbyteStreamStatusReason, AirbyteStreamStatusReasonType, Level, StreamDescriptor +from airbyte_cdk.models import ( + AirbyteMessageSerializer, + AirbyteStreamStatus, + AirbyteStreamStatusReason, + AirbyteStreamStatusReasonType, + Level, + StreamDescriptor, +) from airbyte_cdk.sources.http_config import MAX_CONNECTION_POOL_SIZE from airbyte_cdk.sources.message import MessageRepository from airbyte_cdk.sources.streams.call_rate import APIBudget, CachedLimiterSession, LimiterSession @@ -38,6 +45,7 @@ from airbyte_cdk.utils.constants import ENV_REQUEST_CACHE_PATH from airbyte_cdk.utils.stream_status_utils import as_airbyte_message as stream_status_as_airbyte_message from airbyte_cdk.utils.traced_exception import AirbyteTracedException +from orjson import orjson from requests.auth import AuthBase BODY_REQUEST_METHODS = ("GET", "POST", "PUT", "PATCH") @@ -281,9 +289,11 @@ def _send( if error_resolution.response_action == ResponseAction.RATE_LIMITED: # TODO: Update to handle with message repository when concurrent message repository is ready reasons = [AirbyteStreamStatusReason(type=AirbyteStreamStatusReasonType.RATE_LIMITED)] - message = stream_status_as_airbyte_message( - StreamDescriptor(name=self._name), AirbyteStreamStatus.RUNNING, reasons - ).model_dump_json(exclude_unset=True) + message = orjson.dumps( + AirbyteMessageSerializer.dump( + stream_status_as_airbyte_message(StreamDescriptor(name=self._name), AirbyteStreamStatus.RUNNING, reasons) + ) + ).decode() # Simply printing the stream status is a temporary solution and can cause future issues. Currently, the _send method is # wrapped with backoff decorators, and we can only emit messages by iterating record_iterator in the abstract source at the diff --git a/airbyte-cdk/python/airbyte_cdk/sources/utils/catalog_helpers.py b/airbyte-cdk/python/airbyte_cdk/sources/utils/catalog_helpers.py deleted file mode 100644 index 415374a44bc1..000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/utils/catalog_helpers.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.models import AirbyteCatalog, SyncMode - - -class CatalogHelper: - @staticmethod - def coerce_catalog_as_full_refresh(catalog: AirbyteCatalog) -> AirbyteCatalog: - """ - Updates the sync mode on all streams in this catalog to be full refresh - """ - coerced_catalog = catalog.copy() - for stream in catalog.streams: - stream.source_defined_cursor = False - stream.supported_sync_modes = [SyncMode.full_refresh] - stream.default_cursor_field = None - - # remove nulls - return AirbyteCatalog.parse_raw(coerced_catalog.model_dump_json(exclude_unset=True, exclude_none=True)) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_models.py b/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_models.py deleted file mode 100644 index de011bfb896b..000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/utils/schema_models.py +++ /dev/null @@ -1,84 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Dict, Optional, Type - -from airbyte_cdk.sources.utils.schema_helpers import expand_refs -from pydantic.v1 import BaseModel, Extra -from pydantic.v1.main import ModelMetaclass -from pydantic.v1.typing import resolve_annotations - - -class AllOptional(ModelMetaclass): - """ - Metaclass for marking all Pydantic model fields as Optional - Here is example of declaring model using this metaclass like: - ''' - class MyModel(BaseModel, metaclass=AllOptional): - a: str - b: str - ''' - it is an equivalent of: - ''' - class MyModel(BaseModel): - a: Optional[str] - b: Optional[str] - ''' - It would make code more clear and eliminate a lot of manual work. - """ - - def __new__(mcs, name, bases, namespaces, **kwargs): # type: ignore[no-untyped-def] # super().__new__ is also untyped - """ - Iterate through fields and wrap then with typing.Optional type. - """ - annotations = resolve_annotations(namespaces.get("__annotations__", {}), namespaces.get("__module__", None)) - for base in bases: - annotations = {**annotations, **getattr(base, "__annotations__", {})} - for field in annotations: - if not field.startswith("__"): - annotations[field] = Optional[annotations[field]] # type: ignore[assignment] - namespaces["__annotations__"] = annotations - return super().__new__(mcs, name, bases, namespaces, **kwargs) - - -class BaseSchemaModel(BaseModel): - """ - Base class for all schema models. It has some extra schema postprocessing. - Can be used in combination with AllOptional metaclass - """ - - class Config: - extra = Extra.allow - - @classmethod - def schema_extra(cls, schema: Dict[str, Any], model: Type[BaseModel]) -> None: - """Modify generated jsonschema, remove "title", "description" and "required" fields. - - Pydantic doesn't treat Union[None, Any] type correctly when generate jsonschema, - so we can't set field as nullable (i.e. field that can have either null and non-null values), - We generate this jsonschema value manually. - - :param schema: generated jsonschema - :param model: - """ - schema.pop("title", None) - schema.pop("description", None) - schema.pop("required", None) - for name, prop in schema.get("properties", {}).items(): - prop.pop("title", None) - prop.pop("description", None) - allow_none = model.__fields__[name].allow_none - if allow_none: - if "type" in prop: - prop["type"] = ["null", prop["type"]] - elif "$ref" in prop: - ref = prop.pop("$ref") - prop["oneOf"] = [{"type": "null"}, {"$ref": ref}] - - @classmethod - def schema(cls, *args: Any, **kwargs: Any) -> Dict[str, Any]: - """We're overriding the schema classmethod to enable some post-processing""" - schema = super().schema(*args, **kwargs) - expand_refs(schema) - return schema # type: ignore[no-any-return] diff --git a/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py b/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py index c3e3578f3494..235be7c579b6 100644 --- a/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py +++ b/airbyte-cdk/python/airbyte_cdk/test/catalog_builder.py @@ -2,7 +2,7 @@ from typing import Any, Dict, List, Union, overload -from airbyte_protocol.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, SyncMode +from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConfiguredAirbyteStreamSerializer, SyncMode class ConfiguredAirbyteStreamBuilder: @@ -37,7 +37,7 @@ def with_json_schema(self, json_schema: Dict[str, Any]) -> "ConfiguredAirbyteStr return self def build(self) -> ConfiguredAirbyteStream: - return ConfiguredAirbyteStream.parse_obj(self._stream) + return ConfiguredAirbyteStreamSerializer.load(self._stream) class CatalogBuilder: diff --git a/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py b/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py index ef300bd864f0..9cc74ec2669b 100644 --- a/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py +++ b/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py @@ -26,18 +26,23 @@ from airbyte_cdk.entrypoint import AirbyteEntrypoint from airbyte_cdk.exception_handler import assemble_uncaught_exception from airbyte_cdk.logger import AirbyteLogFormatter -from airbyte_cdk.sources import Source -from airbyte_protocol.models import ( +from airbyte_cdk.models import ( AirbyteLogMessage, AirbyteMessage, + AirbyteMessageSerializer, AirbyteStateMessage, + AirbyteStateMessageSerializer, AirbyteStreamStatus, ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, Level, TraceType, Type, ) +from airbyte_cdk.sources import Source +from orjson import orjson from pydantic import ValidationError as V2ValidationError +from serpyco_rs import SchemaValidationError class EntrypointOutput: @@ -53,8 +58,8 @@ def __init__(self, messages: List[str], uncaught_exception: Optional[BaseExcepti @staticmethod def _parse_message(message: str) -> AirbyteMessage: try: - return AirbyteMessage.parse_obj(json.loads(message)) - except (json.JSONDecodeError, V2ValidationError): + return AirbyteMessageSerializer.load(orjson.loads(message)) # type: ignore[no-any-return] # Serializer.load() always returns AirbyteMessage + except (orjson.JSONDecodeError, SchemaValidationError): # The platform assumes that logs that are not of AirbyteMessage format are log messages return AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message=message)) @@ -75,7 +80,7 @@ def most_recent_state(self) -> Any: state_messages = self._get_message_by_types([Type.STATE]) if not state_messages: raise ValueError("Can't provide most recent state as there are no state messages") - return state_messages[-1].state.stream + return state_messages[-1].state.stream # type: ignore[union-attr] # state has `stream` @property def logs(self) -> List[AirbyteMessage]: @@ -102,9 +107,9 @@ def catalog(self) -> AirbyteMessage: def get_stream_statuses(self, stream_name: str) -> List[AirbyteStreamStatus]: status_messages = map( - lambda message: message.trace.stream_status.status, + lambda message: message.trace.stream_status.status, # type: ignore filter( - lambda message: message.trace.stream_status.stream_descriptor.name == stream_name, + lambda message: message.trace.stream_status.stream_descriptor.name == stream_name, # type: ignore # callable; trace has `stream_status` self._get_trace_message_by_trace_type(TraceType.STREAM_STATUS), ), ) @@ -114,11 +119,11 @@ def _get_message_by_types(self, message_types: List[Type]) -> List[AirbyteMessag return [message for message in self._messages if message.type in message_types] def _get_trace_message_by_trace_type(self, trace_type: TraceType) -> List[AirbyteMessage]: - return [message for message in self._get_message_by_types([Type.TRACE]) if message.trace.type == trace_type] + return [message for message in self._get_message_by_types([Type.TRACE]) if message.trace.type == trace_type] # type: ignore[union-attr] # trace has `type` def is_in_logs(self, pattern: str) -> bool: """Check if any log message case-insensitive matches the pattern.""" - return any(re.search(pattern, entry.log.message, flags=re.IGNORECASE) for entry in self.logs) + return any(re.search(pattern, entry.log.message, flags=re.IGNORECASE) for entry in self.logs) # type: ignore[union-attr] # log has `message` def is_not_in_logs(self, pattern: str) -> bool: """Check if no log message matches the case-insensitive pattern.""" @@ -188,7 +193,9 @@ def read( with tempfile.TemporaryDirectory() as tmp_directory: tmp_directory_path = Path(tmp_directory) config_file = make_file(tmp_directory_path / "config.json", config) - catalog_file = make_file(tmp_directory_path / "catalog.json", catalog.model_dump_json()) + catalog_file = make_file( + tmp_directory_path / "catalog.json", orjson.dumps(ConfiguredAirbyteCatalogSerializer.dump(catalog)).decode() + ) args = [ "read", "--config", @@ -201,7 +208,8 @@ def read( [ "--state", make_file( - tmp_directory_path / "state.json", f"[{','.join([stream_state.model_dump_json() for stream_state in state])}]" + tmp_directory_path / "state.json", + f"[{','.join([orjson.dumps(AirbyteStateMessageSerializer.dump(stream_state)).decode() for stream_state in state])}]", ), ] ) diff --git a/airbyte-cdk/python/airbyte_cdk/test/state_builder.py b/airbyte-cdk/python/airbyte_cdk/test/state_builder.py index 0c43d4320428..50b5dbe5f793 100644 --- a/airbyte-cdk/python/airbyte_cdk/test/state_builder.py +++ b/airbyte-cdk/python/airbyte_cdk/test/state_builder.py @@ -2,7 +2,7 @@ from typing import Any, List -from airbyte_protocol.models import AirbyteStateMessage +from airbyte_cdk.models import AirbyteStateBlob, AirbyteStateMessage, AirbyteStateType, AirbyteStreamState, StreamDescriptor class StateBuilder: @@ -11,7 +11,13 @@ def __init__(self) -> None: def with_stream_state(self, stream_name: str, state: Any) -> "StateBuilder": self._state.append( - AirbyteStateMessage.parse_obj({"type": "STREAM", "stream": {"stream_state": state, "stream_descriptor": {"name": stream_name}}}) + AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_state=state if isinstance(state, AirbyteStateBlob) else AirbyteStateBlob(state), + stream_descriptor=StreamDescriptor(**{"name": stream_name}), + ), + ) ) return self diff --git a/airbyte-cdk/python/airbyte_cdk/test/utils/reading.py b/airbyte-cdk/python/airbyte_cdk/test/utils/reading.py index f8100187d4fb..2d89cb870984 100644 --- a/airbyte-cdk/python/airbyte_cdk/test/utils/reading.py +++ b/airbyte-cdk/python/airbyte_cdk/test/utils/reading.py @@ -3,9 +3,9 @@ from typing import Any, List, Mapping, Optional from airbyte_cdk import AbstractSource +from airbyte_cdk.models import AirbyteStateMessage, ConfiguredAirbyteCatalog, SyncMode from airbyte_cdk.test.catalog_builder import CatalogBuilder from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read -from airbyte_protocol.models import AirbyteStateMessage, ConfiguredAirbyteCatalog, SyncMode def catalog(stream_name: str, sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: diff --git a/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py b/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py index 37d9d1351afc..a862d4696495 100644 --- a/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py +++ b/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py @@ -1,18 +1,18 @@ # Copyright (c) 2024 Airbyte, Inc., all rights reserved. +from airbyte_cdk.models import AirbyteMessage, Type from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor -from airbyte_protocol.models import AirbyteMessage, Type def get_stream_descriptor(message: AirbyteMessage) -> HashableStreamDescriptor: match message.type: case Type.RECORD: - return HashableStreamDescriptor(name=message.record.stream, namespace=message.record.namespace) + return HashableStreamDescriptor(name=message.record.stream, namespace=message.record.namespace) # type: ignore[union-attr] # record has `stream` and `namespace` case Type.STATE: - if not message.state.stream or not message.state.stream.stream_descriptor: + if not message.state.stream or not message.state.stream.stream_descriptor: # type: ignore[union-attr] # state has `stream` raise ValueError("State message was not in per-stream state format, which is required for record counts.") return HashableStreamDescriptor( - name=message.state.stream.stream_descriptor.name, namespace=message.state.stream.stream_descriptor.namespace + name=message.state.stream.stream_descriptor.name, namespace=message.state.stream.stream_descriptor.namespace # type: ignore[union-attr] # state has `stream` ) case _: raise NotImplementedError(f"get_stream_descriptor is not implemented for message type '{message.type}'.") diff --git a/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py b/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py index 9bec5ac095c0..bd96ea398146 100644 --- a/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py +++ b/airbyte-cdk/python/airbyte_cdk/utils/traced_exception.py @@ -1,15 +1,15 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - +import time import traceback -from datetime import datetime from typing import Optional from airbyte_cdk.models import ( AirbyteConnectionStatus, AirbyteErrorTraceMessage, AirbyteMessage, + AirbyteMessageSerializer, AirbyteTraceMessage, FailureType, Status, @@ -18,6 +18,7 @@ ) from airbyte_cdk.models import Type as MessageType from airbyte_cdk.utils.airbyte_secrets_utils import filter_secrets +from orjson import orjson class AirbyteTracedException(Exception): @@ -54,7 +55,7 @@ def as_airbyte_message(self, stream_descriptor: Optional[StreamDescriptor] = Non :param stream_descriptor is deprecated, please use the stream_description in `__init__ or `from_exception`. If many stream_descriptors are defined, the one from `as_airbyte_message` will be discarded. """ - now_millis = datetime.now().timestamp() * 1000.0 + now_millis = time.time_ns() // 1_000_000 trace_exc = self._exception or self stack_trace_str = "".join(traceback.TracebackException.from_exception(trace_exc).format()) @@ -85,7 +86,7 @@ def emit_message(self) -> None: Prints the exception as an AirbyteTraceMessage. Note that this will be called automatically on uncaught exceptions when using the airbyte_cdk entrypoint. """ - message = self.as_airbyte_message().model_dump_json(exclude_unset=True) + message = orjson.dumps(AirbyteMessageSerializer.dump(self.as_airbyte_message())).decode() filtered_message = filter_secrets(message) print(filtered_message) @@ -106,10 +107,10 @@ def as_sanitized_airbyte_message(self, stream_descriptor: Optional[StreamDescrip stream_descriptors are defined, the one from `as_sanitized_airbyte_message` will be discarded. """ error_message = self.as_airbyte_message(stream_descriptor=stream_descriptor) - if error_message.trace.error.message: - error_message.trace.error.message = filter_secrets(error_message.trace.error.message) - if error_message.trace.error.internal_message: - error_message.trace.error.internal_message = filter_secrets(error_message.trace.error.internal_message) - if error_message.trace.error.stack_trace: - error_message.trace.error.stack_trace = filter_secrets(error_message.trace.error.stack_trace) + if error_message.trace.error.message: # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has AirbyteTraceMessage + error_message.trace.error.message = filter_secrets(error_message.trace.error.message) # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has AirbyteTraceMessage + if error_message.trace.error.internal_message: # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has AirbyteTraceMessage + error_message.trace.error.internal_message = filter_secrets(error_message.trace.error.internal_message) # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has AirbyteTraceMessage + if error_message.trace.error.stack_trace: # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has AirbyteTraceMessage + error_message.trace.error.stack_trace = filter_secrets(error_message.trace.error.stack_trace) # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has AirbyteTraceMessage return error_message diff --git a/airbyte-cdk/python/cdk-migrations.md b/airbyte-cdk/python/cdk-migrations.md index 2f38fb8452fe..02ebf2e751c4 100644 --- a/airbyte-cdk/python/cdk-migrations.md +++ b/airbyte-cdk/python/cdk-migrations.md @@ -1,5 +1,36 @@ # CDK Migration Guide +## Upgrading to 5.0.0 + +Version 5.0.0 of the CDK updates the `airbyte_cdk.models` dependency to replace Pydantic v2 models with Python `dataclasses`. It also +updates the `airbyte-protocol-models` dependency to a version that uses dataclasses models. + +The changes to Airbyte CDK itself are backwards-compatible, but some changes are required if the connector: +- uses the `airbyte_protocol` models directly, or `airbyte_cdk.models`, which points to `airbyte_protocol` models +- uses third-party libraries, such as `pandas`, to read data from sources, which output non-native Python objects that cannot be serialized by the [orjson](https://github.com/ijl/orjson) library. + +### Updating direct usage of Pydantic based Airbyte Protocol Models + +If the connector uses Pydantic based Airbyte Protocol Models, the code will need to be updated to reflect the changes `pydantic`. +It is recommended to import protocol classes not directly by `import airbyte_protocol` statement, but from `airbyte_cdk.models` package. +It is also recommended to use `Serializers` from `airbyte_cdk.models` to manipulate the data or convert to/from JSON. + +### Updating third-party libraries + +For example, if `pandas` outputs data from the source, which has date-time `pandas.Timestamp` object in +it, [Orjson supported Types](https://github.com/ijl/orjson?tab=readme-ov-file#types), these fields should be transformed to native JSON +objects. + +```python3 +# Before +yield from df.to_dict(orient="records") + +# After - Option 1 +yield orjson.loads(df.to_json(orient="records", date_format="iso", date_unit="us")) + +``` + + ## Upgrading to 4.5.0 In this release, we are no longer supporting the legacy state format in favor of the current per-stream state diff --git a/airbyte-cdk/python/poetry.lock b/airbyte-cdk/python/poetry.lock index f327a8d2bf00..4564f7891812 100644 --- a/airbyte-cdk/python/poetry.lock +++ b/airbyte-cdk/python/poetry.lock @@ -2,98 +2,113 @@ [[package]] name = "aiohappyeyeballs" -version = "2.3.5" +version = "2.4.0" description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, + {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, + {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, ] [[package]] name = "aiohttp" -version = "3.10.3" +version = "3.10.5" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71bb1d97bfe7e6726267cea169fdf5df7658831bb68ec02c9c6b9f3511e108bb"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baec1eb274f78b2de54471fc4c69ecbea4275965eab4b556ef7a7698dee18bf2"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13031e7ec1188274bad243255c328cc3019e36a5a907978501256000d57a7201"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bbc55a964b8eecb341e492ae91c3bd0848324d313e1e71a27e3d96e6ee7e8e8"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8cc0564b286b625e673a2615ede60a1704d0cbbf1b24604e28c31ed37dc62aa"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f817a54059a4cfbc385a7f51696359c642088710e731e8df80d0607193ed2b73"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8542c9e5bcb2bd3115acdf5adc41cda394e7360916197805e7e32b93d821ef93"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:671efce3a4a0281060edf9a07a2f7e6230dca3a1cbc61d110eee7753d28405f7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0974f3b5b0132edcec92c3306f858ad4356a63d26b18021d859c9927616ebf27"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:44bb159b55926b57812dca1b21c34528e800963ffe130d08b049b2d6b994ada7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6ae9ae382d1c9617a91647575255ad55a48bfdde34cc2185dd558ce476bf16e9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win32.whl", hash = "sha256:aed12a54d4e1ee647376fa541e1b7621505001f9f939debf51397b9329fd88b9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b51aef59370baf7444de1572f7830f59ddbabd04e5292fa4218d02f085f8d299"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e021c4c778644e8cdc09487d65564265e6b149896a17d7c0f52e9a088cc44e1b"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24fade6dae446b183e2410a8628b80df9b7a42205c6bfc2eff783cbeedc224a2"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bc8e9f15939dacb0e1f2d15f9c41b786051c10472c7a926f5771e99b49a5957f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a9ec959b5381271c8ec9310aae1713b2aec29efa32e232e5ef7dcca0df0279"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a5d0ea8a6467b15d53b00c4e8ea8811e47c3cc1bdbc62b1aceb3076403d551f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9ed607dbbdd0d4d39b597e5bf6b0d40d844dfb0ac6a123ed79042ef08c1f87e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e66d5b506832e56add66af88c288c1d5ba0c38b535a1a59e436b300b57b23e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fda91ad797e4914cca0afa8b6cccd5d2b3569ccc88731be202f6adce39503189"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:61ccb867b2f2f53df6598eb2a93329b5eee0b00646ee79ea67d68844747a418e"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d881353264e6156f215b3cb778c9ac3184f5465c2ece5e6fce82e68946868ef"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b031ce229114825f49cec4434fa844ccb5225e266c3e146cb4bdd025a6da52f1"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5337cc742a03f9e3213b097abff8781f79de7190bbfaa987bd2b7ceb5bb0bdec"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab3361159fd3dcd0e48bbe804006d5cfb074b382666e6c064112056eb234f1a9"}, - {file = "aiohttp-3.10.3-cp311-cp311-win32.whl", hash = "sha256:05d66203a530209cbe40f102ebaac0b2214aba2a33c075d0bf825987c36f1f0b"}, - {file = "aiohttp-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:70b4a4984a70a2322b70e088d654528129783ac1ebbf7dd76627b3bd22db2f17"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:166de65e2e4e63357cfa8417cf952a519ac42f1654cb2d43ed76899e2319b1ee"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7084876352ba3833d5d214e02b32d794e3fd9cf21fdba99cff5acabeb90d9806"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d98c604c93403288591d7d6d7d6cc8a63459168f8846aeffd5b3a7f3b3e5e09"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d73b073a25a0bb8bf014345374fe2d0f63681ab5da4c22f9d2025ca3e3ea54fc"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8da6b48c20ce78f5721068f383e0e113dde034e868f1b2f5ee7cb1e95f91db57"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a9dcdccf50284b1b0dc72bc57e5bbd3cc9bf019060dfa0668f63241ccc16aa7"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56fb94bae2be58f68d000d046172d8b8e6b1b571eb02ceee5535e9633dcd559c"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf75716377aad2c718cdf66451c5cf02042085d84522aec1f9246d3e4b8641a6"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c51ed03e19c885c8e91f574e4bbe7381793f56f93229731597e4a499ffef2a5"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b84857b66fa6510a163bb083c1199d1ee091a40163cfcbbd0642495fed096204"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c124b9206b1befe0491f48185fd30a0dd51b0f4e0e7e43ac1236066215aff272"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3461d9294941937f07bbbaa6227ba799bc71cc3b22c40222568dc1cca5118f68"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08bd0754d257b2db27d6bab208c74601df6f21bfe4cb2ec7b258ba691aac64b3"}, - {file = "aiohttp-3.10.3-cp312-cp312-win32.whl", hash = "sha256:7f9159ae530297f61a00116771e57516f89a3de6ba33f314402e41560872b50a"}, - {file = "aiohttp-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:e1128c5d3a466279cb23c4aa32a0f6cb0e7d2961e74e9e421f90e74f75ec1edf"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d1100e68e70eb72eadba2b932b185ebf0f28fd2f0dbfe576cfa9d9894ef49752"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a541414578ff47c0a9b0b8b77381ea86b0c8531ab37fc587572cb662ccd80b88"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d5548444ef60bf4c7b19ace21f032fa42d822e516a6940d36579f7bfa8513f9c"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba2e838b5e6a8755ac8297275c9460e729dc1522b6454aee1766c6de6d56e5e"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48665433bb59144aaf502c324694bec25867eb6630fcd831f7a893ca473fcde4"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bac352fceed158620ce2d701ad39d4c1c76d114255a7c530e057e2b9f55bdf9f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0f670502100cdc567188c49415bebba947eb3edaa2028e1a50dd81bd13363f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b09f38a67679e32d380fe512189ccb0b25e15afc79b23fbd5b5e48e4fc8fd9"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cd788602e239ace64f257d1c9d39898ca65525583f0fbf0988bcba19418fe93f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:214277dcb07ab3875f17ee1c777d446dcce75bea85846849cc9d139ab8f5081f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:32007fdcaab789689c2ecaaf4b71f8e37bf012a15cd02c0a9db8c4d0e7989fa8"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:123e5819bfe1b87204575515cf448ab3bf1489cdeb3b61012bde716cda5853e7"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:812121a201f0c02491a5db335a737b4113151926a79ae9ed1a9f41ea225c0e3f"}, - {file = "aiohttp-3.10.3-cp38-cp38-win32.whl", hash = "sha256:b97dc9a17a59f350c0caa453a3cb35671a2ffa3a29a6ef3568b523b9113d84e5"}, - {file = "aiohttp-3.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:3731a73ddc26969d65f90471c635abd4e1546a25299b687e654ea6d2fc052394"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38d91b98b4320ffe66efa56cb0f614a05af53b675ce1b8607cdb2ac826a8d58e"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9743fa34a10a36ddd448bba8a3adc2a66a1c575c3c2940301bacd6cc896c6bf1"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c126f532caf238031c19d169cfae3c6a59129452c990a6e84d6e7b198a001dc"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:926e68438f05703e500b06fe7148ef3013dd6f276de65c68558fa9974eeb59ad"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434b3ab75833accd0b931d11874e206e816f6e6626fd69f643d6a8269cd9166a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d35235a44ec38109b811c3600d15d8383297a8fab8e3dec6147477ec8636712a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59c489661edbd863edb30a8bd69ecb044bd381d1818022bc698ba1b6f80e5dd1"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50544fe498c81cb98912afabfc4e4d9d85e89f86238348e3712f7ca6a2f01dab"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09bc79275737d4dc066e0ae2951866bb36d9c6b460cb7564f111cc0427f14844"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:af4dbec58e37f5afff4f91cdf235e8e4b0bd0127a2a4fd1040e2cad3369d2f06"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b22cae3c9dd55a6b4c48c63081d31c00fc11fa9db1a20c8a50ee38c1a29539d2"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ba562736d3fbfe9241dad46c1a8994478d4a0e50796d80e29d50cabe8fbfcc3f"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f25d6c4e82d7489be84f2b1c8212fafc021b3731abdb61a563c90e37cced3a21"}, - {file = "aiohttp-3.10.3-cp39-cp39-win32.whl", hash = "sha256:b69d832e5f5fa15b1b6b2c8eb6a9fd2c0ec1fd7729cb4322ed27771afc9fc2ac"}, - {file = "aiohttp-3.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:673bb6e3249dc8825df1105f6ef74e2eab779b7ff78e96c15cadb78b04a83752"}, - {file = "aiohttp-3.10.3.tar.gz", hash = "sha256:21650e7032cc2d31fc23d353d7123e771354f2a3d5b05a5647fc30fea214e696"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, + {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, + {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, + {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, + {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, + {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, + {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, + {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, + {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, + {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, + {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, + {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, + {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, + {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, ] [package.dependencies] @@ -123,19 +138,16 @@ files = [ frozenlist = ">=1.1.0" [[package]] -name = "airbyte-protocol-models-pdv2" -version = "0.12.2" -description = "Declares the Airbyte Protocol." +name = "airbyte-protocol-models-dataclasses" +version = "0.13.0" +description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclasses in Python have less performance overhead compared to Pydantic models, making them a more efficient choice for scenarios where speed and memory usage are critical" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models_pdv2-0.12.2-py3-none-any.whl", hash = "sha256:8b3f9d0388928547cdf2e9134c0d589e4bcaa6f63bf71a21299f6824bfb7ad0e"}, - {file = "airbyte_protocol_models_pdv2-0.12.2.tar.gz", hash = "sha256:130c9ab289f3f53749ce63ff1abbfb67a44b7e5bd2794865315a2976138b672b"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0-py3-none-any.whl", hash = "sha256:0aedb99ffc4f9aab0ce91bba2c292fa17cd8fd4b42eeba196d6a16c20bbbd7a5"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0.tar.gz", hash = "sha256:72e67850d661e2808406aec5839b3158ebb94d3553b798dbdae1b4a278548d2f"}, ] -[package.dependencies] -pydantic = ">=2.7.2,<3.0.0" - [[package]] name = "alabaster" version = "0.7.16" @@ -169,6 +181,28 @@ files = [ {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, ] +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = true +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "async-timeout" version = "4.0.3" @@ -193,6 +227,17 @@ files = [ {file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"}, ] +[[package]] +name = "attributes-doc" +version = "0.4.0" +description = "PEP 224 implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "attributes-doc-0.4.0.tar.gz", hash = "sha256:b1576c94a714e9fc2c65c47cf10d0c8e1a5f7c4f5ae7f69006be108d95cbfbfb"}, + {file = "attributes_doc-0.4.0-py2.py3-none-any.whl", hash = "sha256:4c3007d9e58f3a6cb4b9c614c4d4ce2d92161581f28e594ddd8241cc3a113bdd"}, +] + [[package]] name = "attrs" version = "24.2.0" @@ -347,24 +392,24 @@ files = [ [[package]] name = "cachetools" -version = "5.4.0" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "cattrs" -version = "23.2.3" +version = "24.1.0" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.8" files = [ - {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, - {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, + {file = "cattrs-24.1.0-py3-none-any.whl", hash = "sha256:043bb8af72596432a7df63abcff0055ac0f198a4d2e95af8db5a936a7074a761"}, + {file = "cattrs-24.1.0.tar.gz", hash = "sha256:8274f18b253bf7674a43da851e3096370d67088165d23138b04a1c04c8eaf48e"}, ] [package.dependencies] @@ -376,6 +421,7 @@ typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_ver bson = ["pymongo (>=4.4.0)"] cbor2 = ["cbor2 (>=5.4.6)"] msgpack = ["msgpack (>=1.0.5)"] +msgspec = ["msgspec (>=0.18.5)"] orjson = ["orjson (>=3.9.2)"] pyyaml = ["pyyaml (>=6.0)"] tomlkit = ["tomlkit (>=0.11.8)"] @@ -383,13 +429,13 @@ ujson = ["ujson (>=5.7.0)"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -597,13 +643,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "codeflash" -version = "0.6.17" +version = "0.6.19" description = "Client for codeflash.ai - automatic code performance optimization, powered by AI" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "codeflash-0.6.17-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2d767d1bf035fbfd95d4f8d62a8800ceee833cfff919ea65e6ec406e618228b3"}, - {file = "codeflash-0.6.17.tar.gz", hash = "sha256:96afca1263230c0dd0c6fc3a4601e2680fb25ffa43657310bde4cbaeb83b9000"}, + {file = "codeflash-0.6.19-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f4e19b78ef4dfbbbaeab6d953422027a6e9120545de0657e70adfe23a3fdb51a"}, + {file = "codeflash-0.6.19.tar.gz", hash = "sha256:0cb61aec565b286b1d5fcaa64e55f3e91e03bd6154e03c718c086186c30040da"}, ] [package.dependencies] @@ -662,66 +708,87 @@ files = [ [[package]] name = "contourpy" -version = "1.2.1" +version = "1.3.0" description = "Python library for calculating contours of 2D quadrilateral grids" optional = true python-versions = ">=3.9" files = [ - {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, - {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, - {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, - {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, - {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, - {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, - {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, - {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, - {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, - {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, - {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, + {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, + {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, + {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, + {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, + {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, + {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, + {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, + {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, + {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, + {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, + {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, + {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, + {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, ] [package.dependencies] -numpy = ">=1.20" +numpy = ">=1.23" [package.extras] bokeh = ["bokeh", "selenium"] docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] +test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] [[package]] name = "coverage" @@ -1350,6 +1417,63 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = true +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = true +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = true +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "humanize" version = "4.10.0" @@ -1366,13 +1490,13 @@ tests = ["freezegun", "pytest", "pytest-cov"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -1824,16 +1948,17 @@ six = "*" [[package]] name = "langsmith" -version = "0.1.99" +version = "0.1.107" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = true python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.99-py3-none-any.whl", hash = "sha256:ef8d1d74a2674c514aa429b0171a9fbb661207dc3835142cca0e8f1bf97b26b0"}, - {file = "langsmith-0.1.99.tar.gz", hash = "sha256:b5c6a1f158abda61600a4a445081ee848b4a28b758d91f2793dc02aeffafcaf1"}, + {file = "langsmith-0.1.107-py3-none-any.whl", hash = "sha256:ddd0c846980474e271a553e9c220122e32d1f2ce877cc87d39ecd86726b9e78c"}, + {file = "langsmith-0.1.107.tar.gz", hash = "sha256:f44de0a5f199381d0b518ecbe295d541c44ff33d13f18098ecc54a4547eccb3f"}, ] [package.dependencies] +httpx = ">=0.23.0,<1" orjson = ">=3.9.14,<4.0.0" pydantic = [ {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, @@ -2057,13 +2182,13 @@ source = ["Cython (>=3.0.11)"] [[package]] name = "markdown" -version = "3.6" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = true python-versions = ">=3.8" files = [ - {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, - {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.extras] @@ -2167,13 +2292,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.3" +version = "3.22.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = true python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [package.dependencies] @@ -2181,45 +2306,56 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] name = "matplotlib" -version = "3.9.1.post1" +version = "3.9.2" description = "Python plotting package" optional = true python-versions = ">=3.9" files = [ - {file = "matplotlib-3.9.1.post1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3779ad3e8b72df22b8a622c5796bbcfabfa0069b835412e3c1dec8ee3de92d0c"}, - {file = "matplotlib-3.9.1.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec400340f8628e8e2260d679078d4e9b478699f386e5cc8094e80a1cb0039c7c"}, - {file = "matplotlib-3.9.1.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82c18791b8862ea095081f745b81f896b011c5a5091678fb33204fef641476af"}, - {file = "matplotlib-3.9.1.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621a628389c09a6b9f609a238af8e66acecece1cfa12febc5fe4195114ba7446"}, - {file = "matplotlib-3.9.1.post1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9a54734ca761ebb27cd4f0b6c2ede696ab6861052d7d7e7b8f7a6782665115f5"}, - {file = "matplotlib-3.9.1.post1-cp310-cp310-win_amd64.whl", hash = "sha256:0721f93db92311bb514e446842e2b21c004541dcca0281afa495053e017c5458"}, - {file = "matplotlib-3.9.1.post1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b08b46058fe2a31ecb81ef6aa3611f41d871f6a8280e9057cb4016cb3d8e894a"}, - {file = "matplotlib-3.9.1.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22b344e84fcc574f561b5731f89a7625db8ef80cdbb0026a8ea855a33e3429d1"}, - {file = "matplotlib-3.9.1.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b49fee26d64aefa9f061b575f0f7b5fc4663e51f87375c7239efa3d30d908fa"}, - {file = "matplotlib-3.9.1.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89eb7e89e2b57856533c5c98f018aa3254fa3789fcd86d5f80077b9034a54c9a"}, - {file = "matplotlib-3.9.1.post1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c06e742bade41fda6176d4c9c78c9ea016e176cd338e62a1686384cb1eb8de41"}, - {file = "matplotlib-3.9.1.post1-cp311-cp311-win_amd64.whl", hash = "sha256:c44edab5b849e0fc1f1c9d6e13eaa35ef65925f7be45be891d9784709ad95561"}, - {file = "matplotlib-3.9.1.post1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:bf28b09986aee06393e808e661c3466be9c21eff443c9bc881bce04bfbb0c500"}, - {file = "matplotlib-3.9.1.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:92aeb8c439d4831510d8b9d5e39f31c16c7f37873879767c26b147cef61e54cd"}, - {file = "matplotlib-3.9.1.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f15798b0691b45c80d3320358a88ce5a9d6f518b28575b3ea3ed31b4bd95d009"}, - {file = "matplotlib-3.9.1.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d59fc6096da7b9c1df275f9afc3fef5cbf634c21df9e5f844cba3dd8deb1847d"}, - {file = "matplotlib-3.9.1.post1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab986817a32a70ce22302438691e7df4c6ee4a844d47289db9d583d873491e0b"}, - {file = "matplotlib-3.9.1.post1-cp312-cp312-win_amd64.whl", hash = "sha256:0d78e7d2d86c4472da105d39aba9b754ed3dfeaeaa4ac7206b82706e0a5362fa"}, - {file = "matplotlib-3.9.1.post1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bd07eba6431b4dc9253cce6374a28c415e1d3a7dc9f8aba028ea7592f06fe172"}, - {file = "matplotlib-3.9.1.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ca230cc4482010d646827bd2c6d140c98c361e769ae7d954ebf6fff2a226f5b1"}, - {file = "matplotlib-3.9.1.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ace27c0fdeded399cbc43f22ffa76e0f0752358f5b33106ec7197534df08725a"}, - {file = "matplotlib-3.9.1.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a4f3aeb7ba14c497dc6f021a076c48c2e5fbdf3da1e7264a5d649683e284a2f"}, - {file = "matplotlib-3.9.1.post1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:23f96fbd4ff4cfa9b8a6b685a65e7eb3c2ced724a8d965995ec5c9c2b1f7daf5"}, - {file = "matplotlib-3.9.1.post1-cp39-cp39-win_amd64.whl", hash = "sha256:2808b95452b4ffa14bfb7c7edffc5350743c31bda495f0d63d10fdd9bc69e895"}, - {file = "matplotlib-3.9.1.post1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ffc91239f73b4179dec256b01299d46d0ffa9d27d98494bc1476a651b7821cbe"}, - {file = "matplotlib-3.9.1.post1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f965ebca9fd4feaaca45937c4849d92b70653057497181100fcd1e18161e5f29"}, - {file = "matplotlib-3.9.1.post1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801ee9323fd7b2da0d405aebbf98d1da77ea430bbbbbec6834c0b3af15e5db44"}, - {file = "matplotlib-3.9.1.post1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50113e9b43ceb285739f35d43db36aa752fb8154325b35d134ff6e177452f9ec"}, - {file = "matplotlib-3.9.1.post1.tar.gz", hash = "sha256:c91e585c65092c975a44dc9d4239ba8c594ba3c193d7c478b6d178c4ef61f406"}, + {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"}, + {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"}, + {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"}, + {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"}, + {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"}, + {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"}, + {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"}, + {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"}, + {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"}, + {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"}, + {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"}, + {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"}, + {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"}, + {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"}, ] [package.dependencies] @@ -2447,38 +2583,38 @@ files = [ [[package]] name = "mypy" -version = "1.11.1" +version = "1.11.2" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, - {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, - {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, - {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, - {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, - {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, - {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, - {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, - {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, - {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, - {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, - {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, - {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, - {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, - {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, - {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, - {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, - {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, - {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] @@ -3016,13 +3152,13 @@ type = ["mypy (>=1.8)"] [[package]] name = "plotly" -version = "5.23.0" +version = "5.24.0" description = "An open-source, interactive data visualization library for Python" optional = true python-versions = ">=3.8" files = [ - {file = "plotly-5.23.0-py3-none-any.whl", hash = "sha256:76cbe78f75eddc10c56f5a4ee3e7ccaade7c0a57465546f02098c0caed6c2d1a"}, - {file = "plotly-5.23.0.tar.gz", hash = "sha256:89e57d003a116303a34de6700862391367dd564222ab71f8531df70279fc0193"}, + {file = "plotly-5.24.0-py3-none-any.whl", hash = "sha256:0e54efe52c8cef899f7daa41be9ed97dfb6be622613a2a8f56a86a0634b2b67e"}, + {file = "plotly-5.24.0.tar.gz", hash = "sha256:eae9f4f54448682442c92c1e97148e3ad0c52f0cf86306e1b76daba24add554a"}, ] [package.dependencies] @@ -3064,13 +3200,13 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "posthog" -version = "3.5.0" +version = "3.6.0" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, - {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, + {file = "posthog-3.6.0-py2.py3-none-any.whl", hash = "sha256:6f8dacc6d14d80734b1d15bd4ab08b049629c5f0fc420cafcf1ce0667c76c83c"}, + {file = "posthog-3.6.0.tar.gz", hash = "sha256:27dbf537241a69fb5f6a3e9561caa2d555d5891d95fa65c27ffa6b52d1fb63b6"}, ] [package.dependencies] @@ -3322,13 +3458,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -4094,13 +4230,13 @@ compatible-mypy = ["mypy (>=1.10,<1.11)"] [[package]] name = "rich" -version = "13.7.1" +version = "13.8.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, + {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, + {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, ] [package.dependencies] @@ -4171,36 +4307,44 @@ tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc ( [[package]] name = "scipy" -version = "1.14.0" +version = "1.14.1" description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.10" files = [ - {file = "scipy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1"}, - {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0"}, - {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0"}, - {file = "scipy-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d"}, - {file = "scipy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86"}, - {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8"}, - {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74"}, - {file = "scipy-1.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb"}, - {file = "scipy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f"}, - {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209"}, - {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14"}, - {file = "scipy-1.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159"}, - {file = "scipy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20"}, - {file = "scipy-1.14.0.tar.gz", hash = "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"}, + {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"}, + {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"}, + {file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"}, + {file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"}, + {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"}, + {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"}, + {file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"}, + {file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"}, + {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"}, + {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"}, + {file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"}, + {file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e"}, + {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d"}, + {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e"}, + {file = "scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06"}, + {file = "scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84"}, + {file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"}, ] [package.dependencies] @@ -4208,8 +4352,8 @@ numpy = ">=1.23.5,<2.3" [package.extras] dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "sentry-sdk" @@ -4262,21 +4406,71 @@ starlette = ["starlette (>=0.19.1)"] starlite = ["starlite (>=1.48)"] tornado = ["tornado (>=6)"] +[[package]] +name = "serpyco-rs" +version = "1.10.2" +description = "" +optional = false +python-versions = ">=3.9" +files = [ + {file = "serpyco_rs-1.10.2-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e01d824fdebb9bded57ec40b9ac0ca3b312ad617fd5deba61113a3b23bcb915d"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef9a31f8d62c17b1ccfffb3e91c5aed2d6fd2187c7611ee3ca1b572046150cd"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aab2241b2d87bca5f15d5d34a3948b1c9ad1724cc55d1332e0c5325aff02635f"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:87d8118e9ba6e37aee1b0f7c14b19fe494f1589dc81ae0cc5168812779e1bfab"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d18a77d23aeb49904b2462410e57b4027511158845291bf6251e5857a881d60"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8da7ff487ada75f6b724d6ef9e40cde5cf703a2b89e6a3f466a8db0049e153a"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5212fa00ff8874ecabca0cf5f11eb7c1291b55ec9ee6aa7ee3ae2ec344abcf7f"}, + {file = "serpyco_rs-1.10.2-cp310-none-win_amd64.whl", hash = "sha256:ff83f5296f0ab08e77d09a4888020e636d4385a642fec52eacd2ab480d0ec22c"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d0e6d6546145ba30d6032381b27261e338f7c1b96b9fb0773a481970a809827"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4d5c69d1fcd7007b7792cb5ea62a0702822f6f8982349f44b795677ab7414c"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fc4c1241c0707bfdd93991c0a2cea3f51a17acad343d9b5c296fc0a9f044d78"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:413fe29db4cab826269371a89ff9ccbd897ee7ff0eaaf1090362fdb86d5b8beb"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ce4d5ac0ac4d62911998bfba1ac149a61c43f5dbfa23f831f0d87290c1861a"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9c8a31440a3158c601fdcd523e77cd5fefa2ae5be061a4151c38a7a6060624"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8e323f5420c3e6f99627291a2d47d7fcd7f5c4433aaa6cc35e15d5b22ba19d6"}, + {file = "serpyco_rs-1.10.2-cp311-none-win_amd64.whl", hash = "sha256:743c1e1943f51883cb498c2c16c5f49bab2adb991c842077fcd0fa5a1658da25"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6379d789daff44e5f535d7e1c0131b30cee86988e9561cc9d98e87021188220d"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805162d7b67fd08b04b1e2ef1deeaedc37c7ee24a200f24778fb98b9fe7f5cdd"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1366df15ae2548a8a063eca84b9a8c2af92ac55df73ce60a7c4f2dfe71e2526b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:35d0a1a1a69ae074b123f6ad1487dc67717727d9dce4f95a393298743d60aafb"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a79517070e0b021803cafdf11d326e1149eac4a226443040e9fa1492c74337b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bdd2b8d3b9160ddcab0400ca5e258c16e870ae49c6586ed5405c18e8910c957b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:045965a32c651797a73c7b7165165ed0d78efc233af4bf24c47acd41d222fae8"}, + {file = "serpyco_rs-1.10.2-cp312-none-win_amd64.whl", hash = "sha256:c6c95f6c9e04af94c33e4e514291df7380c3960a155e9fe264ccaaa46d4d0de8"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f19a82836699d102b288b17ae370dd4d37af60ccd2254f5bfdbd053d168cecee"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3830bb3f6a342825e27592e86baa46774bfb1f08c82dbf561b5f1380a18b48"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f726392e6380b1e7d642d7633ac27929c8616a59db0a54632f5a9ab80987e071"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9ce029f8f29f4f335d0f3c9e005b71d7e8a934735d9654e3f03ccc54d50c107a"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1f011370259602b55141ce866bf31dcdc9d8b68105c32f18ee442bc651ee880"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14d9e22016e2860c1f524aa123cfadd4a4eea25af10d1be76cc3d97d9c85c2e2"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441b8045f91f30120c00a1f617a0ad6f22c1753c6b98899e8476d6e7775a3667"}, + {file = "serpyco_rs-1.10.2-cp39-none-win_amd64.whl", hash = "sha256:a124608cc998e3854fc743dea5dd7d948edbeaa70c1c1777b6dbb4b64ce465b0"}, + {file = "serpyco_rs-1.10.2.tar.gz", hash = "sha256:9cf06956eb14b326e522c9665aa5136f8fd7ece2df8a393c2e84bee8204362d0"}, +] + +[package.dependencies] +attributes-doc = "*" +typing-extensions = "*" + [[package]] name = "setuptools" -version = "72.1.0" +version = "74.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, + {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, + {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, ] [package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -4300,6 +4494,17 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = true +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -4313,13 +4518,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = true python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] @@ -4587,17 +4792,18 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "textual" -version = "0.76.0" +version = "0.79.0" description = "Modern Text User Interface framework" optional = false python-versions = "<4.0.0,>=3.8.1" files = [ - {file = "textual-0.76.0-py3-none-any.whl", hash = "sha256:e2035609c889dba507d34a5d7b333f1c8c53a29fb170962cb92101507663517a"}, - {file = "textual-0.76.0.tar.gz", hash = "sha256:b12e8879d591090c0901b5cb8121d086e28e677353b368292d3865ec99b83b70"}, + {file = "textual-0.79.0-py3-none-any.whl", hash = "sha256:59785f20e13b0e530e3d21c0fca5eb09bd1ff329f47abce29a8e50a59646228d"}, + {file = "textual-0.79.0.tar.gz", hash = "sha256:b5ae63ae11227c158da90e486e99a6db7ef198470219edaf8c200a999d27577a"}, ] [package.dependencies] markdown-it-py = {version = ">=2.1.0", extras = ["linkify", "plugins"]} +platformdirs = ">=4.2.2,<5.0.0" rich = ">=13.3.3" typing-extensions = ">=4.4.0,<5.0.0" @@ -4881,13 +5087,13 @@ xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] [[package]] name = "unstructured-pytesseract" -version = "0.3.12" +version = "0.3.13" description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" optional = true python-versions = ">=3.8" files = [ - {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, - {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, + {file = "unstructured.pytesseract-0.3.13-py3-none-any.whl", hash = "sha256:8001bc860470d56185176eb3ceb4623e888eba058ca3b30af79003784bc40e19"}, + {file = "unstructured.pytesseract-0.3.13.tar.gz", hash = "sha256:ff2e6391496e457dbf4b4e327f4a4577cce18921ea6570dc74bd64381b10e963"}, ] [package.dependencies] @@ -4952,13 +5158,13 @@ files = [ [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.0.4" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, ] [package.dependencies] @@ -5173,18 +5379,22 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = true python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] file-based = ["avro", "fastavro", "markdown", "pandas", "pdf2image", "pdfminer.six", "pyarrow", "pytesseract", "python-calamine", "unstructured", "unstructured.pytesseract"] @@ -5194,4 +5404,4 @@ vector-db-based = ["cohere", "langchain", "openai", "tiktoken"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "a61b0b329edd46e9efd6ff722f9518d63210de55c4f770a29785e630c372bb0e" +content-hash = "1759d8574c392cf39fccff997263873168087159c5f741314ceff6db4e5a32af" diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index 0eda309b44a8..4b07bed0626f 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -22,9 +22,10 @@ classifiers = [ ] keywords = ["airbyte", "connector-development-kit", "cdk"] + [tool.poetry.dependencies] python = "^3.10" -airbyte-protocol-models-pdv2 = "^0.12.2" +airbyte-protocol-models-dataclasses = "^0.13" backoff = "*" cachetools = "*" Deprecated = "~1.2" @@ -66,6 +67,7 @@ pyjwt = "^2.8.0" cryptography = "^42.0.5" pytz = "2024.1" orjson = "^3.10.7" +serpyco-rs = "^1.10.2" [tool.poetry.group.dev.dependencies] freezegun = "*" diff --git a/airbyte-cdk/python/unit_tests/conftest.py b/airbyte-cdk/python/unit_tests/conftest.py index a5883fe095a5..5d1e1f03f342 100644 --- a/airbyte-cdk/python/unit_tests/conftest.py +++ b/airbyte-cdk/python/unit_tests/conftest.py @@ -10,6 +10,6 @@ @pytest.fixture() def mock_sleep(monkeypatch): - with freezegun.freeze_time(datetime.datetime.now(), ignore=['_pytest.runner', '_pytest.terminal']) as frozen_datetime: - monkeypatch.setattr('time.sleep', lambda x: frozen_datetime.tick(x)) + with freezegun.freeze_time(datetime.datetime.now(), ignore=["_pytest.runner", "_pytest.terminal"]) as frozen_datetime: + monkeypatch.setattr("time.sleep", lambda x: frozen_datetime.tick(x)) yield diff --git a/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py b/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py index a967087f0d0e..ca6b8e47ea68 100644 --- a/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py +++ b/airbyte-cdk/python/unit_tests/connector_builder/test_connector_builder_handler.py @@ -27,11 +27,13 @@ from airbyte_cdk.models import ( AirbyteLogMessage, AirbyteMessage, + AirbyteMessageSerializer, AirbyteRecordMessage, AirbyteStateMessage, AirbyteStream, AirbyteStreamState, ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, ConfiguredAirbyteStream, ConnectorSpecification, DestinationSyncMode, @@ -46,6 +48,7 @@ from airbyte_cdk.sources.declarative.retrievers import SimpleRetrieverTestReadDecorator from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever from airbyte_cdk.utils.airbyte_secrets_utils import filter_secrets, update_secrets +from orjson import orjson from unit_tests.connector_builder.utils import create_configured_catalog _stream_name = "stream_with_custom_requester" @@ -73,8 +76,8 @@ }, ], "parent_state": {}, - } - ) + }, + ), ) ] @@ -277,13 +280,13 @@ def _mocked_send(self, request, **kwargs) -> requests.Response: def test_handle_resolve_manifest(valid_resolve_manifest_config_file, dummy_catalog): - with mock.patch.object(connector_builder.main, "handle_connector_builder_request") as patched_handle: + with mock.patch.object(connector_builder.main, "handle_connector_builder_request", return_value=AirbyteMessage(type=MessageType.RECORD)) as patched_handle: handle_request(["read", "--config", str(valid_resolve_manifest_config_file), "--catalog", str(dummy_catalog)]) assert patched_handle.call_count == 1 def test_handle_test_read(valid_read_config_file, configured_catalog): - with mock.patch.object(connector_builder.main, "handle_connector_builder_request") as patch: + with mock.patch.object(connector_builder.main, "handle_connector_builder_request", return_value=AirbyteMessage(type=MessageType.RECORD)) as patch: handle_request(["read", "--config", str(valid_read_config_file), "--catalog", str(configured_catalog)]) assert patch.call_count == 1 @@ -487,11 +490,14 @@ def test_read(): limits = TestReadLimits() with patch("airbyte_cdk.connector_builder.message_grouper.MessageGrouper.get_message_groups", return_value=stream_read) as mock: output_record = handle_connector_builder_request( - source, "test_read", config, ConfiguredAirbyteCatalog.parse_obj(CONFIGURED_CATALOG), _A_STATE, limits + source, "test_read", config, ConfiguredAirbyteCatalogSerializer.load(CONFIGURED_CATALOG), _A_STATE, limits ) - mock.assert_called_with(source, config, ConfiguredAirbyteCatalog.parse_obj(CONFIGURED_CATALOG), _A_STATE, limits.max_records) + mock.assert_called_with(source, config, ConfiguredAirbyteCatalogSerializer.load(CONFIGURED_CATALOG), _A_STATE, limits.max_records) output_record.record.emitted_at = 1 - assert output_record == expected_airbyte_message + assert ( + orjson.dumps(AirbyteMessageSerializer.dump(output_record)).decode() + == orjson.dumps(AirbyteMessageSerializer.dump(expected_airbyte_message)).decode() + ) def test_config_update(): @@ -523,7 +529,12 @@ def test_config_update(): return_value=refresh_request_response, ): output = handle_connector_builder_request( - source, "test_read", config, ConfiguredAirbyteCatalog.parse_obj(CONFIGURED_CATALOG), _A_PER_PARTITION_STATE, TestReadLimits() + source, + "test_read", + config, + ConfiguredAirbyteCatalogSerializer.load(CONFIGURED_CATALOG), + _A_PER_PARTITION_STATE, + TestReadLimits(), ) assert output.record.data["latest_config_update"] @@ -560,7 +571,7 @@ def check_config_against_spec(self): source = MockManifestDeclarativeSource() limits = TestReadLimits() - response = read_stream(source, TEST_READ_CONFIG, ConfiguredAirbyteCatalog.parse_obj(CONFIGURED_CATALOG), _A_STATE, limits) + response = read_stream(source, TEST_READ_CONFIG, ConfiguredAirbyteCatalogSerializer.load(CONFIGURED_CATALOG), _A_STATE, limits) expected_stream_read = StreamRead( logs=[LogMessage("error_message - a stack trace", "ERROR")], @@ -584,13 +595,8 @@ def test_handle_429_response(): response = _create_429_page_response({"result": [{"error": "too many requests"}], "_metadata": {"next": "next"}}) # Add backoff strategy to avoid default endless backoff loop - TEST_READ_CONFIG["__injected_declarative_manifest"]['definitions']['retriever']['requester']['error_handler'] = { - "backoff_strategies": [ - { - "type": "ConstantBackoffStrategy", - "backoff_time_in_seconds": 5 - } - ] + TEST_READ_CONFIG["__injected_declarative_manifest"]["definitions"]["retriever"]["requester"]["error_handler"] = { + "backoff_strategies": [{"type": "ConstantBackoffStrategy", "backoff_time_in_seconds": 5}] } config = TEST_READ_CONFIG @@ -599,7 +605,7 @@ def test_handle_429_response(): with patch("requests.Session.send", return_value=response) as mock_send: response = handle_connector_builder_request( - source, "test_read", config, ConfiguredAirbyteCatalog.parse_obj(CONFIGURED_CATALOG), _A_PER_PARTITION_STATE, limits + source, "test_read", config, ConfiguredAirbyteCatalogSerializer.load(CONFIGURED_CATALOG), _A_PER_PARTITION_STATE, limits ) mock_send.assert_called_once() diff --git a/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py b/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py index b865c719b211..41ce94513560 100644 --- a/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py +++ b/airbyte-cdk/python/unit_tests/connector_builder/test_message_grouper.py @@ -23,6 +23,7 @@ StreamDescriptor, ) from airbyte_cdk.models import Type as MessageType +from orjson import orjson from unit_tests.connector_builder.utils import create_configured_catalog _NO_PK = [[]] @@ -147,7 +148,10 @@ def test_get_grouped_messages(mock_entrypoint_read: Mock) -> None: connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) actual_response: StreamRead = connector_builder_handler.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) assert actual_response.inferred_schema == expected_schema @@ -212,7 +216,10 @@ def test_get_grouped_messages_with_logs(mock_entrypoint_read: Mock) -> None: connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) actual_response: StreamRead = connector_builder_handler.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) single_slice = actual_response.slices[0] for i, actual_page in enumerate(single_slice.pages): @@ -230,7 +237,9 @@ def test_get_grouped_messages_with_logs(mock_entrypoint_read: Mock) -> None: ], ) @patch("airbyte_cdk.connector_builder.message_grouper.AirbyteEntrypoint.read") -def test_get_grouped_messages_record_limit(mock_entrypoint_read: Mock, request_record_limit: int, max_record_limit: int, should_fail: bool) -> None: +def test_get_grouped_messages_record_limit( + mock_entrypoint_read: Mock, request_record_limit: int, max_record_limit: int, should_fail: bool +) -> None: url = "https://demonslayers.com/api/v1/hashiras?era=taisho" request = { "headers": {"Content-Type": "application/json"}, @@ -258,11 +267,19 @@ def test_get_grouped_messages_record_limit(mock_entrypoint_read: Mock, request_r if should_fail: with pytest.raises(ValueError): api.get_message_groups( - mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, record_limit=request_record_limit + mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, + record_limit=request_record_limit, ) else: actual_response: StreamRead = api.get_message_groups( - mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, record_limit=request_record_limit + mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, + record_limit=request_record_limit, ) single_slice = actual_response.slices[0] total_records = 0 @@ -338,7 +355,9 @@ def test_get_grouped_messages_limit_0(mock_entrypoint_read: Mock) -> None: api = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) with pytest.raises(ValueError): - api.get_message_groups(source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, record_limit=0) + api.get_message_groups( + source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, record_limit=0 + ) @patch("airbyte_cdk.connector_builder.message_grouper.AirbyteEntrypoint.read") @@ -386,7 +405,10 @@ def test_get_grouped_messages_no_records(mock_entrypoint_read: Mock) -> None: message_grouper = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) actual_response: StreamRead = message_grouper.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) single_slice = actual_response.slices[0] @@ -484,7 +506,10 @@ def test_get_grouped_messages_with_many_slices(mock_entrypoint_read: Mock) -> No connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) stream_read: StreamRead = connector_builder_handler.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) assert not stream_read.test_read_limit_reached @@ -501,7 +526,10 @@ def test_get_grouped_messages_with_many_slices(mock_entrypoint_read: Mock) -> No assert len(stream_read.slices[1].pages[1].records) == 1 assert len(stream_read.slices[1].pages[2].records) == 0 - assert stream_read.slices[1].state[0].stream.stream_state == AirbyteStateBlob(a_timestamp=123) + assert ( + orjson.dumps(stream_read.slices[1].state[0].stream.stream_state).decode() + == orjson.dumps(AirbyteStateBlob(a_timestamp=123)).decode() + ) @patch("airbyte_cdk.connector_builder.message_grouper.AirbyteEntrypoint.read") @@ -516,7 +544,10 @@ def test_get_grouped_messages_given_maximum_number_of_slices_then_test_read_limi api = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) stream_read: StreamRead = api.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) assert stream_read.test_read_limit_reached @@ -535,7 +566,10 @@ def test_get_grouped_messages_given_maximum_number_of_pages_then_test_read_limit api = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) stream_read: StreamRead = api.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) assert stream_read.test_read_limit_reached @@ -550,7 +584,10 @@ def test_read_stream_returns_error_if_stream_does_not_exist() -> None: message_grouper = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) actual_response = message_grouper.get_message_groups( - source=mock_source, config=full_config, configured_catalog=create_configured_catalog("not_in_manifest"), state=_NO_STATE, + source=mock_source, + config=full_config, + configured_catalog=create_configured_catalog("not_in_manifest"), + state=_NO_STATE, ) assert len(actual_response.logs) == 1 @@ -566,7 +603,10 @@ def test_given_control_message_then_stream_read_has_config_update(mock_entrypoin ) connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) stream_read: StreamRead = connector_builder_handler.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) assert stream_read.latest_config_update == updated_config @@ -591,7 +631,10 @@ def test_given_multiple_control_messages_then_stream_read_has_latest_based_on_em ) connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) stream_read: StreamRead = connector_builder_handler.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) assert stream_read.latest_config_update == latest_config @@ -616,7 +659,10 @@ def test_given_multiple_control_messages_with_same_timestamp_then_stream_read_ha ) connector_builder_handler = MessageGrouper(MAX_PAGES_PER_SLICE, MAX_SLICES) stream_read: StreamRead = connector_builder_handler.get_message_groups( - source=mock_source, config=CONFIG, configured_catalog=create_configured_catalog("hashiras"), state=_NO_STATE, + source=mock_source, + config=CONFIG, + configured_catalog=create_configured_catalog("hashiras"), + state=_NO_STATE, ) assert stream_read.latest_config_update == latest_config @@ -646,11 +692,16 @@ def test_given_no_slices_then_return_empty_slices(mock_entrypoint_read: Mock) -> @patch("airbyte_cdk.connector_builder.message_grouper.AirbyteEntrypoint.read") def test_given_pk_then_ensure_pk_is_pass_to_schema_inferrence(mock_entrypoint_read: Mock) -> None: - mock_source = make_mock_source(mock_entrypoint_read, iter([ - request_response_log_message({"request": 1}, {"response": 2}, "http://any_url.com"), - record_message("hashiras", {"id": "Shinobu Kocho", "date": "2023-03-03"}), - record_message("hashiras", {"id": "Muichiro Tokito", "date": "2023-03-04"}), - ])) + mock_source = make_mock_source( + mock_entrypoint_read, + iter( + [ + request_response_log_message({"request": 1}, {"response": 2}, "http://any_url.com"), + record_message("hashiras", {"id": "Shinobu Kocho", "date": "2023-03-03"}), + record_message("hashiras", {"id": "Muichiro Tokito", "date": "2023-03-04"}), + ] + ), + ) mock_source.streams.return_value = [Mock()] mock_source.streams.return_value[0].primary_key = [["id"]] mock_source.streams.return_value[0].cursor_field = _NO_CURSOR_FIELD @@ -665,11 +716,16 @@ def test_given_pk_then_ensure_pk_is_pass_to_schema_inferrence(mock_entrypoint_re @patch("airbyte_cdk.connector_builder.message_grouper.AirbyteEntrypoint.read") def test_given_cursor_field_then_ensure_cursor_field_is_pass_to_schema_inferrence(mock_entrypoint_read: Mock) -> None: - mock_source = make_mock_source(mock_entrypoint_read, iter([ - request_response_log_message({"request": 1}, {"response": 2}, "http://any_url.com"), - record_message("hashiras", {"id": "Shinobu Kocho", "date": "2023-03-03"}), - record_message("hashiras", {"id": "Muichiro Tokito", "date": "2023-03-04"}), - ])) + mock_source = make_mock_source( + mock_entrypoint_read, + iter( + [ + request_response_log_message({"request": 1}, {"response": 2}, "http://any_url.com"), + record_message("hashiras", {"id": "Shinobu Kocho", "date": "2023-03-03"}), + record_message("hashiras", {"id": "Muichiro Tokito", "date": "2023-03-04"}), + ] + ), + ) mock_source.streams.return_value = [Mock()] mock_source.streams.return_value[0].primary_key = _NO_PK mock_source.streams.return_value[0].cursor_field = ["date"] @@ -709,10 +765,10 @@ def record_message(stream: str, data: Mapping[str, Any]) -> AirbyteMessage: def state_message(stream: str, data: Mapping[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=MessageType.STATE, state=AirbyteStateMessage(stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name=stream), - stream_state=data - ))) + return AirbyteMessage( + type=MessageType.STATE, + state=AirbyteStateMessage(stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name=stream), stream_state=data)), + ) def slice_message(slice_descriptor: str = '{"key": "value"}') -> AirbyteMessage: diff --git a/airbyte-cdk/python/unit_tests/connector_builder/utils.py b/airbyte-cdk/python/unit_tests/connector_builder/utils.py index 15abdd30b9d9..a94a0416437c 100644 --- a/airbyte-cdk/python/unit_tests/connector_builder/utils.py +++ b/airbyte-cdk/python/unit_tests/connector_builder/utils.py @@ -4,7 +4,7 @@ from typing import Any, Mapping -from airbyte_cdk.models.airbyte_protocol import ConfiguredAirbyteCatalog +from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteCatalogSerializer def create_configured_catalog_dict(stream_name: str) -> Mapping[str, Any]: @@ -24,4 +24,4 @@ def create_configured_catalog_dict(stream_name: str) -> Mapping[str, Any]: def create_configured_catalog(stream_name: str) -> ConfiguredAirbyteCatalog: - return ConfiguredAirbyteCatalog.parse_obj(create_configured_catalog_dict(stream_name)) + return ConfiguredAirbyteCatalogSerializer.load(create_configured_catalog_dict(stream_name)) diff --git a/airbyte-cdk/python/unit_tests/destinations/test_destination.py b/airbyte-cdk/python/unit_tests/destinations/test_destination.py index 89d16453d530..a03d7ffcc6b0 100644 --- a/airbyte-cdk/python/unit_tests/destinations/test_destination.py +++ b/airbyte-cdk/python/unit_tests/destinations/test_destination.py @@ -16,10 +16,12 @@ AirbyteCatalog, AirbyteConnectionStatus, AirbyteMessage, + AirbyteMessageSerializer, AirbyteRecordMessage, AirbyteStateMessage, AirbyteStream, ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, ConfiguredAirbyteStream, ConnectorSpecification, DestinationSyncMode, @@ -27,6 +29,7 @@ SyncMode, Type, ) +from orjson import orjson @pytest.fixture(name="destination") @@ -194,7 +197,7 @@ def test_run_check_with_invalid_config(self, mocker, destination: Destination, t parsed_args = argparse.Namespace(**args) destination.run_cmd(parsed_args) - spec = {'type': 'integer'} + spec = {"type": "integer"} spec_msg = ConnectorSpecification(connectionSpecification=spec) mocker.patch.object(destination, "spec", return_value=spec_msg) @@ -214,7 +217,7 @@ def test_run_check_with_invalid_config(self, mocker, destination: Destination, t assert returned_check_result.type == Type.CONNECTION_STATUS assert returned_check_result.connectionStatus.status == Status.FAILED # the specific phrasing is not relevant, so only check for the keywords - assert 'validation error' in returned_check_result.connectionStatus.message + assert "validation error" in returned_check_result.connectionStatus.message def test_run_write(self, mocker, destination: Destination, tmp_path, monkeypatch): config_path, dummy_config = tmp_path / "config.json", {"user": "sherif"} @@ -230,7 +233,7 @@ def test_run_write(self, mocker, destination: Destination, tmp_path, monkeypatch ] ) catalog_path = tmp_path / "catalog.json" - write_file(catalog_path, dummy_catalog.json(exclude_unset=True)) + write_file(catalog_path, ConfiguredAirbyteCatalogSerializer.dump(dummy_catalog)) args = {"command": "write", "config": config_path, "catalog": catalog_path} parsed_args = argparse.Namespace(**args) @@ -244,7 +247,7 @@ def test_run_write(self, mocker, destination: Destination, tmp_path, monkeypatch validate_mock = mocker.patch("airbyte_cdk.destinations.destination.check_config_against_spec_or_exit") # mock input is a record followed by some state messages mocked_input: List[AirbyteMessage] = [_wrapped(_record("s1", {"k1": "v1"})), *expected_write_result] - mocked_stdin_string = "\n".join([record.json(exclude_unset=True) for record in mocked_input]) + mocked_stdin_string = "\n".join([orjson.dumps(AirbyteMessageSerializer.dump(record)).decode() for record in mocked_input]) mocked_stdin_string += "\n add this non-serializable string to verify the destination does not break on malformed input" mocked_stdin = io.TextIOWrapper(io.BytesIO(bytes(mocked_stdin_string, "utf-8"))) diff --git a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/document_processor_test.py b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/document_processor_test.py index 41da64916368..db3ce730c89e 100644 --- a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/document_processor_test.py +++ b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/document_processor_test.py @@ -14,8 +14,14 @@ SeparatorSplitterConfigModel, ) from airbyte_cdk.destinations.vector_db_based.document_processor import DocumentProcessor -from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream -from airbyte_cdk.models.airbyte_protocol import AirbyteRecordMessage, DestinationSyncMode, SyncMode +from airbyte_cdk.models import ( + AirbyteRecordMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, +) from airbyte_cdk.utils.traced_exception import AirbyteTracedException diff --git a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/embedder_test.py b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/embedder_test.py index a5f22b752ed2..600a4c0890d3 100644 --- a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/embedder_test.py +++ b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/embedder_test.py @@ -24,7 +24,7 @@ OpenAICompatibleEmbedder, OpenAIEmbedder, ) -from airbyte_cdk.models.airbyte_protocol import AirbyteRecordMessage +from airbyte_cdk.models import AirbyteRecordMessage from airbyte_cdk.utils.traced_exception import AirbyteTracedException diff --git a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/writer_test.py b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/writer_test.py index c906d0f3e9b5..ac831694c726 100644 --- a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/writer_test.py +++ b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/writer_test.py @@ -7,12 +7,13 @@ import pytest from airbyte_cdk.destinations.vector_db_based import ProcessingConfigModel, Writer -from airbyte_cdk.models.airbyte_protocol import ( +from airbyte_cdk.models import ( AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, AirbyteStateMessage, ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, Level, Type, ) @@ -61,7 +62,7 @@ def test_write(omit_raw_text: bool): """ config_model = ProcessingConfigModel(chunk_overlap=0, chunk_size=1000, metadata_fields=None, text_fields=["column_name"]) - configured_catalog: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalog.parse_obj({"streams": [generate_stream()]}) + configured_catalog: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalogSerializer.load({"streams": [generate_stream()]}) # messages are flushed after 32 records or after a state message, so this will trigger two batches to be processed input_messages = [_generate_record_message(i) for i in range(BATCH_SIZE + 5)] state_message = AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage()) @@ -126,7 +127,7 @@ def test_write_stream_namespace_split(): """ config_model = ProcessingConfigModel(chunk_overlap=0, chunk_size=1000, metadata_fields=None, text_fields=["column_name"]) - configured_catalog: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalog.parse_obj( + configured_catalog: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalogSerializer.load( { "streams": [ generate_stream(), diff --git a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py index f6ff8684fa94..1c7315cb6969 100644 --- a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py +++ b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py @@ -43,7 +43,15 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> def streams(self, config: Mapping[str, Any]) -> List[Stream]: return [ - StreamFacade.create_from_stream(s, self, self._logger, None, FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository())) if is_concurrent else s + StreamFacade.create_from_stream( + s, + self, + self._logger, + None, + FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository()), + ) + if is_concurrent + else s for s, is_concurrent in self._streams_to_is_concurrent.items() ] @@ -96,7 +104,13 @@ def test_concurrent_source_adapter(as_stream_status, remove_stack_trace): assert records == expected_records - unavailable_stream_trace_messages = [m for m in messages if m.type == MessageType.TRACE and m.trace.type == TraceType.STREAM_STATUS and m.trace.stream_status.status == AirbyteStreamStatus.INCOMPLETE] + unavailable_stream_trace_messages = [ + m + for m in messages + if m.type == MessageType.TRACE + and m.trace.type == TraceType.STREAM_STATUS + and m.trace.stream_status.status == AirbyteStreamStatus.INCOMPLETE + ] expected_status = [as_stream_status("s3", AirbyteStreamStatus.INCOMPLETE)] assert len(unavailable_stream_trace_messages) == 1 @@ -133,7 +147,9 @@ def _configured_catalog(streams: List[Stream]): @pytest.mark.parametrize("raise_exception_on_missing_stream", [True, False]) -def test_read_nonexistent_concurrent_stream_emit_incomplete_stream_status(mocker, remove_stack_trace, as_stream_status, raise_exception_on_missing_stream): +def test_read_nonexistent_concurrent_stream_emit_incomplete_stream_status( + mocker, remove_stack_trace, as_stream_status, raise_exception_on_missing_stream +): """ Tests that attempting to sync a stream which the source does not return from the `streams` method emits incomplete stream status. """ diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_jwt.py b/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_jwt.py index b625ddd5b357..51bef48230c9 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_jwt.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_jwt.py @@ -19,11 +19,18 @@ class TestJwtAuthenticator: """ @pytest.mark.parametrize( - "algorithm, kid, typ, cty, additional_jwt_headers, expected", - [ - ("ALGORITHM", "test_kid", "test_typ", "test_cty", {"test": "test"}, {"kid": "test_kid", "typ": "test_typ", "cty": "test_cty", "test": "test", "alg": "ALGORITHM"}), - ("ALGORITHM", None, None, None, None, {"alg": "ALGORITHM"}) - ] + "algorithm, kid, typ, cty, additional_jwt_headers, expected", + [ + ( + "ALGORITHM", + "test_kid", + "test_typ", + "test_cty", + {"test": "test"}, + {"kid": "test_kid", "typ": "test_typ", "cty": "test_cty", "test": "test", "alg": "ALGORITHM"}, + ), + ("ALGORITHM", None, None, None, None, {"alg": "ALGORITHM"}), + ], ) def test_get_jwt_headers(self, algorithm, kid, typ, cty, additional_jwt_headers, expected): authenticator = JwtAuthenticator( @@ -61,14 +68,8 @@ def test_given_overriden_reserverd_properties_get_jwt_headers_throws_error(self) {"test": "test"}, {"iss": "test_iss", "sub": "test_sub", "aud": "test_aud", "test": "test"}, ), - ( - None, - None, - None, - None, - {} - ), - ] + (None, None, None, None, {}), + ], ) def test_get_jwt_payload(self, iss, sub, aud, additional_jwt_payload, expected): authenticator = JwtAuthenticator( @@ -105,7 +106,7 @@ def test_given_overriden_reserverd_properties_get_jwt_payload_throws_error(self) [ (True, "test", base64.b64encode("test".encode()).decode()), (False, "test", "test"), - ] + ], ) def test_get_secret_key(self, base64_encode_secret_key, secret_key, expected): authenticator = JwtAuthenticator( @@ -152,13 +153,7 @@ def test_given_invalid_algorithm_get_signed_token_throws_error(self): with pytest.raises(ValueError): authenticator._get_signed_token() - @pytest.mark.parametrize( - "header_prefix, expected", - [ - ("test", "test"), - (None, None) - ] - ) + @pytest.mark.parametrize("header_prefix, expected", [("test", "test"), (None, None)]) def test_get_header_prefix(self, header_prefix, expected): authenticator = JwtAuthenticator( config={}, diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/checks/test_check_stream.py b/airbyte-cdk/python/unit_tests/sources/declarative/checks/test_check_stream.py index 8ccf70b4e7a9..4ebe449dcd69 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/checks/test_check_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/checks/test_check_stream.py @@ -92,9 +92,7 @@ def test_check_stream_with_no_stream_slices_aborts(): "test_stream_unavailable_handled_error", 403, False, - [ - "Forbidden. You don't have permission to access this resource." - ], + ["Forbidden. You don't have permission to access this resource."], ), ("test_stream_available", 200, True, []), ], diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py index 0a25be1129c1..1a7d45f7a78f 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_datetime_parser.py @@ -55,7 +55,12 @@ def test_parse_date(test_name, input_date, date_format, expected_output_date): [ ("test_format_timestamp", datetime.datetime(2021, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), "%s", "1609459200"), ("test_format_timestamp_ms", datetime.datetime(2021, 1, 1, 0, 0, 0, 1000, tzinfo=datetime.timezone.utc), "%ms", "1609459200001"), - ("test_format_timestamp_as_float", datetime.datetime(2023, 1, 30, 15, 28, 28, 873709, tzinfo=datetime.timezone.utc), "%s_as_float", "1675092508.873709"), + ( + "test_format_timestamp_as_float", + datetime.datetime(2023, 1, 30, 15, 28, 28, 873709, tzinfo=datetime.timezone.utc), + "%s_as_float", + "1675092508.873709", + ), ("test_format_string", datetime.datetime(2021, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), "%Y-%m-%d", "2021-01-01"), ("test_format_to_number", datetime.datetime(2021, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), "%Y%m%d", "20210101"), ], diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py index 84a63969cec6..ff9aedf0752a 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py @@ -114,11 +114,12 @@ def test_min_max_datetime_lazy_eval(): @pytest.mark.parametrize( - "input_datetime", [ + "input_datetime", + [ pytest.param("2022-01-01T00:00:00", id="test_create_min_max_datetime_from_string"), pytest.param(InterpolatedString.create("2022-01-01T00:00:00", parameters={}), id="test_create_min_max_datetime_from_string"), - pytest.param(MinMaxDatetime("2022-01-01T00:00:00", parameters={}), id="test_create_min_max_datetime_from_minmaxdatetime") - ] + pytest.param(MinMaxDatetime("2022-01-01T00:00:00", parameters={}), id="test_create_min_max_datetime_from_minmaxdatetime"), + ], ) def test_create_min_max_datetime(input_datetime): minMaxDatetime = MinMaxDatetime.create(input_datetime, parameters={}) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/decoders/test_json_decoder.py b/airbyte-cdk/python/unit_tests/sources/declarative/decoders/test_json_decoder.py index 52bc55201bbe..65ed78698ca6 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/decoders/test_json_decoder.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/decoders/test_json_decoder.py @@ -7,19 +7,15 @@ import pytest import requests from airbyte_cdk import YamlDeclarativeSource +from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder, JsonlDecoder from airbyte_cdk.sources.declarative.models import DeclarativeStream as DeclarativeStreamModel from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import ModelToComponentFactory -from airbyte_protocol.models import SyncMode @pytest.mark.parametrize( "response_body, first_element", - [ - ("", {}), - ("[]", {}), - ('{"healthcheck": {"status": "ok"}}', {"healthcheck": {"status": "ok"}}) - ], + [("", {}), ("[]", {}), ('{"healthcheck": {"status": "ok"}}', {"healthcheck": {"status": "ok"}})], ) def test_json_decoder(requests_mock, response_body, first_element): requests_mock.register_uri("GET", "https://airbyte.io/", text=response_body) @@ -45,13 +41,13 @@ def test_jsonl_decoder(requests_mock, response_body, expected_json): @pytest.fixture(name="large_events_response") def large_event_response_fixture(): data = {"email": "email1@example.com"} - json_string = json.dumps(data) - lines_in_response = 5_000_000 + jsonl_string = f"{json.dumps(data)}\n" + lines_in_response = 2_000_000 # ≈ 58 MB of response dir_path = os.path.dirname(os.path.realpath(__file__)) file_path = f"{dir_path}/test_response.txt" with open(file_path, "w") as file: for _ in range(lines_in_response): - file.write(json_string + "\n") + file.write(jsonl_string) yield (lines_in_response, file_path) os.remove(file_path) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_dpath_extractor.py b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_dpath_extractor.py index 24fb662d726a..92b4ffbb4804 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_dpath_extractor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_dpath_extractor.py @@ -56,12 +56,15 @@ def create_response(body: Union[Dict, bytes]): ["data"], decoder_jsonl, b'{"data": [{"id": 1, "text_field": "This is a text\\n. New paragraph start here."}]}\n{"data": [{"id": 2, "text_field": "This is another text\\n. New paragraph start here."}]}', - [{"id": 1, "text_field": "This is a text\n. New paragraph start here."}, {"id": 2, "text_field": "This is another text\n. New paragraph start here."}], + [ + {"id": 1, "text_field": "This is a text\n. New paragraph start here."}, + {"id": 2, "text_field": "This is another text\n. New paragraph start here."}, + ], ), ( [], decoder_iterable, - b'user1@example.com\nuser2@example.com', + b"user1@example.com\nuser2@example.com", [{"record": "user1@example.com"}, {"record": "user2@example.com"}], ), ], diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py index 8132e4b60349..33bd6786c152 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py @@ -333,7 +333,7 @@ def mock_datetime_now(monkeypatch): [ {"start_time": "2021-01-01T00:00:00.000000+0000", "end_time": "2021-01-31T23:59:59.999999+0000"}, ], - ) + ), ], ) def test_stream_slices( @@ -580,10 +580,11 @@ def test_request_option(test_name, inject_into, field_name, expected_req_params, @pytest.mark.parametrize( - "stream_slice", [ + "stream_slice", + [ pytest.param(None, id="test_none_stream_slice"), pytest.param({}, id="test_none_stream_slice"), - ] + ], ) def test_request_option_with_empty_stream_slice(stream_slice): start_request_option = RequestOption(inject_into=RequestOptionType.request_parameter, parameters={}, field_name="starttime") diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py index 96eb3c86e52a..b2c8d5faf46d 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py @@ -6,12 +6,12 @@ from unittest.mock import Mock import pytest +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.declarative.incremental.declarative_cursor import DeclarativeCursor from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import PerPartitionCursor, PerPartitionKeySerializer, StreamSlice from airbyte_cdk.sources.declarative.partition_routers.partition_router import PartitionRouter from airbyte_cdk.sources.types import Record from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import FailureType PARTITION = { "partition_key string": "partition value", diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py index 1b3550a99861..4d2141b42373 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py @@ -37,9 +37,8 @@ def with_substream_partition_router(self, stream_name): "stream": "#/definitions/Rates", "parent_key": "id", "partition_field": "parent_id", - } - ] + ], } return self @@ -100,10 +99,7 @@ def build(self): }, }, }, - "streams": [ - {"$ref": "#/definitions/Rates"}, - {"$ref": "#/definitions/AnotherStream"} - ], + "streams": [{"$ref": "#/definitions/Rates"}, {"$ref": "#/definitions/AnotherStream"}], "spec": { "connection_specification": { "$schema": "http://json-schema.org/draft-07/schema#", @@ -180,11 +176,9 @@ def test_given_record_for_partition_when_read_then_update_state(): stream_instance = source.streams({})[0] list(stream_instance.stream_slices(sync_mode=SYNC_MODE)) - stream_slice = StreamSlice(partition={"partition_field": "1"}, - cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}) + stream_slice = StreamSlice(partition={"partition_field": "1"}, cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}) with patch.object( - SimpleRetriever, "_read_pages", - side_effect=[[Record({"a record key": "a record value", CURSOR_FIELD: "2022-01-15"}, stream_slice)]] + SimpleRetriever, "_read_pages", side_effect=[[Record({"a record key": "a record value", CURSOR_FIELD: "2022-01-15"}, stream_slice)]] ): list( stream_instance.read_records( @@ -236,17 +230,41 @@ def test_substream_without_input_state(): # This mocks the resulting records of the Rates stream which acts as the parent stream of the SubstreamPartitionRouter being tested with patch.object( - SimpleRetriever, "_read_pages", side_effect=[[Record({"id": "1", CURSOR_FIELD: "2022-01-15"}, parent_stream_slice)], - [Record({"id": "2", CURSOR_FIELD: "2022-01-15"}, parent_stream_slice)]] + SimpleRetriever, + "_read_pages", + side_effect=[ + [Record({"id": "1", CURSOR_FIELD: "2022-01-15"}, parent_stream_slice)], + [Record({"id": "2", CURSOR_FIELD: "2022-01-15"}, parent_stream_slice)], + ], ): slices = list(stream_instance.stream_slices(sync_mode=SYNC_MODE)) assert list(slices) == [ - StreamSlice(partition={"parent_id": "1", "parent_slice": {}, }, - cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}), - StreamSlice(partition={"parent_id": "1", "parent_slice": {}, }, - cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}), - StreamSlice(partition={"parent_id": "2", "parent_slice": {}, }, - cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}), - StreamSlice(partition={"parent_id": "2", "parent_slice": {}, }, - cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}), + StreamSlice( + partition={ + "parent_id": "1", + "parent_slice": {}, + }, + cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}, + ), + StreamSlice( + partition={ + "parent_id": "1", + "parent_slice": {}, + }, + cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}, + ), + StreamSlice( + partition={ + "parent_id": "2", + "parent_slice": {}, + }, + cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}, + ), + StreamSlice( + partition={ + "parent_id": "2", + "parent_slice": {}, + }, + cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}, + ), ] diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_resumable_full_refresh_cursor.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_resumable_full_refresh_cursor.py index bb15e465e8fa..b45973283aad 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_resumable_full_refresh_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_resumable_full_refresh_cursor.py @@ -32,7 +32,7 @@ StreamSlice(cursor_slice={}, partition={}), id="test_empty_substream_resumable_full_refresh_stream_state", ), - ] + ], ) def test_stream_slices(stream_state, cursor, expected_slice): cursor = cursor(parameters={}) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/migrations/test_legacy_to_per_partition_migration.py b/airbyte-cdk/python/unit_tests/sources/declarative/migrations/test_legacy_to_per_partition_migration.py index 7fce15031ee1..97e5efd69f97 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/migrations/test_legacy_to_per_partition_migration.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/migrations/test_legacy_to_per_partition_migration.py @@ -23,12 +23,8 @@ def test_migrate_a_valid_legacy_state_to_per_partition(): input_state = { - "13506132": { - "last_changed": "2022-12-27T08:34:39+00:00" - }, - "14351124": { - "last_changed": "2022-12-27T08:35:39+00:00" - }, + "13506132": {"last_changed": "2022-12-27T08:34:39+00:00"}, + "14351124": {"last_changed": "2022-12-27T08:35:39+00:00"}, } migrator = _migrator() @@ -37,14 +33,8 @@ def test_migrate_a_valid_legacy_state_to_per_partition(): expected_state = { "states": [ - { - "partition": {"parent_id": "13506132"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, - { - "partition": {"parent_id": "14351124"}, - "cursor": {"last_changed": "2022-12-27T08:35:39+00:00"} - }, + {"partition": {"parent_id": "13506132"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + {"partition": {"parent_id": "14351124"}, "cursor": {"last_changed": "2022-12-27T08:35:39+00:00"}}, ] } @@ -52,115 +42,88 @@ def test_migrate_a_valid_legacy_state_to_per_partition(): @pytest.mark.parametrize( - "input_state", [ - pytest.param({ - "states": [ - { - "partition": {"id": "13506132"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, - { - "partition": {"id": "14351124"}, - "cursor": {"last_changed": "2022-12-27T08:35:39+00:00"} - }, - ] - }, id="test_should_not_migrate_a_per_partition_state"), - pytest.param({ - "states": [ - { - "partition": {"id": "13506132"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, - { - "partition": {"id": "14351124"}, - }, - ] - }, id="test_should_not_migrate_state_without_a_cursor_component"), - pytest.param({ - "states": [ - { - "partition": {"id": "13506132"}, - "cursor": {"updated_at": "2022-12-27T08:34:39+00:00"} - }, - { - "partition": {"id": "14351124"}, - "cursor": {"updated_at": "2022-12-27T08:35:39+00:00"} - }, - ] - }, id="test_should_not_migrate_a_per_partition_state_with_wrong_cursor_field"), - pytest.param({ - "states": [ - { - "partition": {"id": "13506132"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, - { - "partition": {"id": "14351124"}, - "cursor": {"last_changed": "2022-12-27T08:35:39+00:00", "updated_at": "2021-01-01"} - }, - ] - }, id="test_should_not_migrate_a_per_partition_state_with_multiple_cursor_fields"), + "input_state", + [ pytest.param( { "states": [ - { - "partition": {"id": "13506132"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, - { - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, + {"partition": {"id": "13506132"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + {"partition": {"id": "14351124"}, "cursor": {"last_changed": "2022-12-27T08:35:39+00:00"}}, ] - }, id="test_should_not_migrate_state_without_a_partition_component" + }, + id="test_should_not_migrate_a_per_partition_state", ), pytest.param( { "states": [ + {"partition": {"id": "13506132"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, { - "partition": {"id": "13506132", "another_id": "A"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, - { - "partition": {"id": "13506134"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} + "partition": {"id": "14351124"}, }, ] - }, id="test_should_not_migrate_state_if_multiple_partition_keys" + }, + id="test_should_not_migrate_state_without_a_cursor_component", + ), + pytest.param( + { + "states": [ + {"partition": {"id": "13506132"}, "cursor": {"updated_at": "2022-12-27T08:34:39+00:00"}}, + {"partition": {"id": "14351124"}, "cursor": {"updated_at": "2022-12-27T08:35:39+00:00"}}, + ] + }, + id="test_should_not_migrate_a_per_partition_state_with_wrong_cursor_field", ), pytest.param( { "states": [ - { - "partition": {"identifier": "13506132"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, - { - "partition": {"id": "13506134"}, - "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"} - }, + {"partition": {"id": "13506132"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + {"partition": {"id": "14351124"}, "cursor": {"last_changed": "2022-12-27T08:35:39+00:00", "updated_at": "2021-01-01"}}, + ] + }, + id="test_should_not_migrate_a_per_partition_state_with_multiple_cursor_fields", + ), + pytest.param( + { + "states": [ + {"partition": {"id": "13506132"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + {"cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, ] - }, id="test_should_not_migrate_state_if_invalid_partition_key" + }, + id="test_should_not_migrate_state_without_a_partition_component", ), pytest.param( { - "13506132": { - "last_changed": "2022-12-27T08:34:39+00:00" - }, - "14351124": { - "last_changed": "2022-12-27T08:35:39+00:00", - "another_key": "2022-12-27T08:35:39+00:00" - }, - }, id="test_should_not_migrate_if_the_partitioned_state_has_more_than_one_key" + "states": [ + {"partition": {"id": "13506132", "another_id": "A"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + {"partition": {"id": "13506134"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + ] + }, + id="test_should_not_migrate_state_if_multiple_partition_keys", + ), + pytest.param( + { + "states": [ + {"partition": {"identifier": "13506132"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + {"partition": {"id": "13506134"}, "cursor": {"last_changed": "2022-12-27T08:34:39+00:00"}}, + ] + }, + id="test_should_not_migrate_state_if_invalid_partition_key", ), - pytest.param({ - "13506132": { - "last_changed": "2022-12-27T08:34:39+00:00" + pytest.param( + { + "13506132": {"last_changed": "2022-12-27T08:34:39+00:00"}, + "14351124": {"last_changed": "2022-12-27T08:35:39+00:00", "another_key": "2022-12-27T08:35:39+00:00"}, }, - "14351124": { - "another_key": "2022-12-27T08:35:39+00:00" + id="test_should_not_migrate_if_the_partitioned_state_has_more_than_one_key", + ), + pytest.param( + { + "13506132": {"last_changed": "2022-12-27T08:34:39+00:00"}, + "14351124": {"another_key": "2022-12-27T08:35:39+00:00"}, }, - }, id="test_should_not_migrate_if_the_partitioned_state_key_is_not_the_cursor_field"), - ] + id="test_should_not_migrate_if_the_partitioned_state_key_is_not_the_cursor_field", + ), + ], ) def test_should_not_migrate(input_state): migrator = _migrator() @@ -169,12 +132,8 @@ def test_should_not_migrate(input_state): def test_should_not_migrate_stream_with_multiple_parent_streams(): input_state = { - "13506132": { - "last_changed": "2022-12-27T08:34:39+00:00" - }, - "14351124": { - "last_changed": "2022-12-27T08:35:39+00:00" - }, + "13506132": {"last_changed": "2022-12-27T08:34:39+00:00"}, + "14351124": {"last_changed": "2022-12-27T08:35:39+00:00"}, } migrator = _migrator_with_multiple_parent_streams() @@ -191,14 +150,10 @@ def _migrator(): parent_key="{{ parameters['parent_key_id'] }}", partition_field="parent_id", stream=DeclarativeStream( - type="DeclarativeStream", - retriever=CustomRetriever( - type="CustomRetriever", - class_name="a_class_name" - ) - ) + type="DeclarativeStream", retriever=CustomRetriever(type="CustomRetriever", class_name="a_class_name") + ), ) - ] + ], ) cursor = DatetimeBasedCursor( type="DatetimeBasedCursor", @@ -220,26 +175,18 @@ def _migrator_with_multiple_parent_streams(): parent_key="id", partition_field="parent_id", stream=DeclarativeStream( - type="DeclarativeStream", - retriever=CustomRetriever( - type="CustomRetriever", - class_name="a_class_name" - ) - ) + type="DeclarativeStream", retriever=CustomRetriever(type="CustomRetriever", class_name="a_class_name") + ), ), ParentStreamConfig( type="ParentStreamConfig", parent_key="id", partition_field="parent_id", stream=DeclarativeStream( - type="DeclarativeStream", - retriever=CustomRetriever( - type="CustomRetriever", - class_name="a_class_name" - ) - ) + type="DeclarativeStream", retriever=CustomRetriever(type="CustomRetriever", class_name="a_class_name") + ), ), - ] + ], ) cursor = DatetimeBasedCursor( type="DatetimeBasedCursor", @@ -256,10 +203,28 @@ def _migrator_with_multiple_parent_streams(): "retriever_type, partition_router_class, is_parent_stream_config, expected_exception, expected_error_message", [ (SimpleRetriever, CustomPartitionRouter, True, None, None), - (None, CustomPartitionRouter, True, ValueError, "LegacyToPerPartitionStateMigrations can only be applied on a DeclarativeStream with a SimpleRetriever. Got "), - (SimpleRetriever, None, False, ValueError, "LegacyToPerPartitionStateMigrations can only be applied on a SimpleRetriever with a Substream partition router. Got "), - (SimpleRetriever, CustomPartitionRouter, False, ValueError, "LegacyToPerPartitionStateMigrations can only be applied with a parent stream configuration."), - ] + ( + None, + CustomPartitionRouter, + True, + ValueError, + "LegacyToPerPartitionStateMigrations can only be applied on a DeclarativeStream with a SimpleRetriever. Got ", + ), + ( + SimpleRetriever, + None, + False, + ValueError, + "LegacyToPerPartitionStateMigrations can only be applied on a SimpleRetriever with a Substream partition router. Got ", + ), + ( + SimpleRetriever, + CustomPartitionRouter, + False, + ValueError, + "LegacyToPerPartitionStateMigrations can only be applied with a parent stream configuration.", + ), + ], ) def test_create_legacy_to_per_partition_state_migration( retriever_type, @@ -283,13 +248,30 @@ def test_create_legacy_to_per_partition_state_migration( state_migrations_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["state_migrations"][0], {}) if is_parent_stream_config: - parent_stream_config = ParentStreamConfig(type="ParentStreamConfig", parent_key="id", partition_field="parent_id", stream=DeclarativeStream(type="DeclarativeStream", retriever=CustomRetriever(type="CustomRetriever", class_name="a_class_name"))) + parent_stream_config = ParentStreamConfig( + type="ParentStreamConfig", + parent_key="id", + partition_field="parent_id", + stream=DeclarativeStream( + type="DeclarativeStream", retriever=CustomRetriever(type="CustomRetriever", class_name="a_class_name") + ), + ) partition_router.parent_stream_configs = [parent_stream_config] if expected_exception: with pytest.raises(expected_exception) as excinfo: - factory.create_component(model_type=LegacyToPerPartitionStateMigrationModel, component_definition=state_migrations_manifest, config={}, declarative_stream=stream) + factory.create_component( + model_type=LegacyToPerPartitionStateMigrationModel, + component_definition=state_migrations_manifest, + config={}, + declarative_stream=stream, + ) assert str(excinfo.value) == expected_error_message else: - migration_instance = factory.create_component(model_type=LegacyToPerPartitionStateMigrationModel, component_definition=state_migrations_manifest, config={}, declarative_stream=stream) + migration_instance = factory.create_component( + model_type=LegacyToPerPartitionStateMigrationModel, + component_definition=state_migrations_manifest, + config={}, + declarative_stream=stream, + ) assert migration_instance is not None diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py index d73527ad6f0f..d574ed8724e8 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py @@ -9,7 +9,7 @@ import freezegun import pytest from airbyte_cdk import AirbyteTracedException -from airbyte_cdk.models import Level +from airbyte_cdk.models import FailureType, Level from airbyte_cdk.sources.declarative.auth import DeclarativeOauth2Authenticator, JwtAuthenticator from airbyte_cdk.sources.declarative.auth.token import ( ApiKeyAuthenticator, @@ -82,7 +82,6 @@ from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource from airbyte_cdk.sources.streams.http.error_handlers.response_models import ResponseAction from airbyte_cdk.sources.streams.http.requests_native_auth.oauth import SingleUseRefreshTokenOauth2Authenticator -from airbyte_protocol.models import FailureType from unit_tests.sources.declarative.parsers.testing_components import TestingCustomSubstreamPartitionRouter, TestingSomeComponent factory = ModelToComponentFactory() @@ -1043,10 +1042,7 @@ def test_create_record_selector(test_name, record_selector, expected_runtime_sel selector_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["selector"], {}) selector = factory.create_component( - model_type=RecordSelectorModel, component_definition=selector_manifest, - decoder=None, - transformations=[], - config=input_config + model_type=RecordSelectorModel, component_definition=selector_manifest, decoder=None, transformations=[], config=input_config ) assert isinstance(selector, RecordSelector) @@ -1127,7 +1123,8 @@ def test_create_requester(test_name, error_handler, expected_backoff_strategy_ty selector = factory.create_component( model_type=HttpRequesterModel, - component_definition=requester_manifest, config=input_config, + component_definition=requester_manifest, + config=input_config, name=name, decoder=None, ) @@ -1179,8 +1176,7 @@ def test_create_request_with_legacy_session_authenticator(): requester_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["requester"], {}) selector = factory.create_component( - model_type=HttpRequesterModel, component_definition=requester_manifest, config=input_config, name=name, - decoder=None + model_type=HttpRequesterModel, component_definition=requester_manifest, config=input_config, name=name, decoder=None ) assert isinstance(selector, HttpRequester) @@ -1265,11 +1261,13 @@ def test_given_composite_error_handler_does_not_match_response_then_fallback_on_ resolved_manifest = resolver.preprocess_manifest(parsed_manifest) requester_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["requester"], {}) http_requester = factory.create_component( - model_type=HttpRequesterModel, component_definition=requester_manifest, config=input_config, name="any name", decoder=JsonDecoder(parameters={}) - ) - requests_mock.get( - "https://api.sendgrid.com/v3/marketing/lists", status_code=401 + model_type=HttpRequesterModel, + component_definition=requester_manifest, + config=input_config, + name="any name", + decoder=JsonDecoder(parameters={}), ) + requests_mock.get("https://api.sendgrid.com/v3/marketing/lists", status_code=401) with pytest.raises(AirbyteTracedException) as exception: http_requester.send_request() @@ -1453,8 +1451,11 @@ def test_create_default_paginator(): paginator_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["paginator"], {}) paginator = factory.create_component( - model_type=DefaultPaginatorModel, component_definition=paginator_manifest, config=input_config, url_base="https://airbyte.io", - decoder=JsonDecoder(parameters={}) + model_type=DefaultPaginatorModel, + component_definition=paginator_manifest, + config=input_config, + url_base="https://airbyte.io", + decoder=JsonDecoder(parameters={}), ) assert isinstance(paginator, DefaultPaginator) @@ -1481,7 +1482,12 @@ def test_create_default_paginator(): "subcomponent_field_with_hint": {"type": "DpathExtractor", "field_path": [], "decoder": {"type": "JsonDecoder"}}, }, "subcomponent_field_with_hint", - DpathExtractor(field_path=[], config={"apikey": "verysecrettoken", "repos": ["airbyte", "airbyte-cloud"]}, decoder=JsonDecoder(parameters={}), parameters={}), + DpathExtractor( + field_path=[], + config={"apikey": "verysecrettoken", "repos": ["airbyte", "airbyte-cloud"]}, + decoder=JsonDecoder(parameters={}), + parameters={}, + ), None, id="test_create_custom_component_with_subcomponent_that_must_be_parsed", ), @@ -2118,10 +2124,7 @@ def test_create_page_increment_with_interpolated_page_size(): start_from_page=1, inject_on_first_request=True, ) - config = { - **input_config, - "page_size": 5 - } + config = {**input_config, "page_size": 5} expected_strategy = PageIncrement(page_size=5, start_from_page=1, inject_on_first_request=True, parameters={}, config=config) strategy = factory.create_page_increment(model, config) @@ -2156,7 +2159,7 @@ def test_create_custom_schema_loader(): definition = { "type": "CustomSchemaLoader", - "class_name": "unit_tests.sources.declarative.parsers.test_model_to_component_factory.MyCustomSchemaLoader" + "class_name": "unit_tests.sources.declarative.parsers.test_model_to_component_factory.MyCustomSchemaLoader", } component = factory.create_component(CustomSchemaLoaderModel, definition, {}) assert isinstance(component, MyCustomSchemaLoader) @@ -2181,12 +2184,9 @@ def test_create_custom_schema_loader(): "algorithm": "HS256", "base64_encode_secret_key": False, "token_duration": 1200, - "jwt_headers": { - "typ": "JWT", - "alg": "HS256" - }, - "jwt_payload": {} - } + "jwt_headers": {"typ": "JWT", "alg": "HS256"}, + "jwt_payload": {}, + }, ), ( { @@ -2228,7 +2228,6 @@ def test_create_custom_schema_loader(): "alg": "RS256", "cty": "JWT", "test": "test custom header", - }, "jwt_payload": { "iss": "test iss", @@ -2236,7 +2235,7 @@ def test_create_custom_schema_loader(): "aud": "test aud", "test": "test custom payload", }, - } + }, ), ( { @@ -2261,12 +2260,11 @@ def test_create_custom_schema_loader(): "typ": "JWT", "alg": "HS256", "custom_header": "custom header value", - }, "jwt_payload": { "custom_payload": "custom payload value", }, - } + }, ), ( { @@ -2280,7 +2278,7 @@ def test_create_custom_schema_loader(): """, { "expect_error": True, - } + }, ), ], ) @@ -2297,9 +2295,7 @@ def test_create_jwt_authenticator(config, manifest, expected): ) return - authenticator = factory.create_component( - model_type=JwtAuthenticatorModel, component_definition=authenticator_manifest, config=config - ) + authenticator = factory.create_component(model_type=JwtAuthenticatorModel, component_definition=authenticator_manifest, config=config) assert isinstance(authenticator, JwtAuthenticator) assert authenticator._secret_key.eval(config) == expected["secret_key"] @@ -2310,9 +2306,11 @@ def test_create_jwt_authenticator(config, manifest, expected): assert authenticator._header_prefix.eval(config) == expected["header_prefix"] assert authenticator._get_jwt_headers() == expected["jwt_headers"] jwt_payload = expected["jwt_payload"] - jwt_payload.update({ - "iat": int(datetime.datetime.now().timestamp()), - "nbf": int(datetime.datetime.now().timestamp()), - "exp": int(datetime.datetime.now().timestamp()) + expected["token_duration"] - }) + jwt_payload.update( + { + "iat": int(datetime.datetime.now().timestamp()), + "nbf": int(datetime.datetime.now().timestamp()), + "exp": int(datetime.datetime.now().timestamp()) + expected["token_duration"], + } + ) assert authenticator._get_jwt_payload() == jwt_payload diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_cartesian_product_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_cartesian_product_partition_router.py index 3ec2537e0072..2b9313b3ebd7 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_cartesian_product_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_cartesian_product_partition_router.py @@ -17,9 +17,11 @@ ( "test_single_stream_slicer", [ListPartitionRouter(values=["customer", "store", "subscription"], cursor_field="owner_resource", config={}, parameters={})], - [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], + [ + StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={}), + ], ), ( "test_two_stream_slicers", @@ -37,24 +39,24 @@ ], ), ( - "test_singledatetime", - [ - DatetimeBasedCursor( - start_datetime=MinMaxDatetime(datetime="2021-01-01", datetime_format="%Y-%m-%d", parameters={}), - end_datetime=MinMaxDatetime(datetime="2021-01-03", datetime_format="%Y-%m-%d", parameters={}), - step="P1D", - cursor_field=InterpolatedString.create("", parameters={}), - datetime_format="%Y-%m-%d", - cursor_granularity="P1D", - config={}, - parameters={}, - ), - ], - [ - StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), - StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), - StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), - ], + "test_singledatetime", + [ + DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01", datetime_format="%Y-%m-%d", parameters={}), + end_datetime=MinMaxDatetime(datetime="2021-01-03", datetime_format="%Y-%m-%d", parameters={}), + step="P1D", + cursor_field=InterpolatedString.create("", parameters={}), + datetime_format="%Y-%m-%d", + cursor_granularity="P1D", + config={}, + parameters={}, + ), + ], + [ + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), + ], ), ( "test_list_and_datetime", @@ -78,9 +80,15 @@ StreamSlice(partition={"owner_resource": "store"}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), StreamSlice(partition={"owner_resource": "store"}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), StreamSlice(partition={"owner_resource": "store"}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), - StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), - StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), - StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), + StreamSlice( + partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"} + ), + StreamSlice( + partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"} + ), + StreamSlice( + partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"} + ), ], ), ], diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py index 387579783e4d..87aa18f5a0b4 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py @@ -18,23 +18,29 @@ ( ["customer", "store", "subscription"], "owner_resource", - [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], + [ + StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={}), + ], ), ( '["customer", "store", "subscription"]', "owner_resource", - [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], + [ + StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={}), + ], ), ( '["customer", "store", "subscription"]', "{{ parameters['cursor_field'] }}", - [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), - StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], + [ + StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={}), + ], ), ], ids=[ @@ -106,8 +112,8 @@ def test_request_option(request_option, expected_req_params, expected_headers, e [ pytest.param({}, id="test_request_option_is_empty_if_empty_stream_slice"), pytest.param({"not the cursor": "value"}, id="test_request_option_is_empty_if_the_stream_slice_does_not_have_cursor_field"), - pytest.param(None, id="test_request_option_is_empty_if_no_stream_slice") - ] + pytest.param(None, id="test_request_option_is_empty_if_no_stream_slice"), + ], ) def test_request_option_is_empty_if_no_stream_slice(stream_slice): request_option = RequestOption(inject_into=RequestOptionType.body_data, parameters={}, field_name="owner_resource") diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py index 773ed96571d0..9ced561742f6 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py @@ -19,6 +19,7 @@ SyncMode, ) from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource +from orjson import orjson SUBSTREAM_MANIFEST: MutableMapping[str, Any] = { "version": "0.51.42", @@ -349,7 +350,7 @@ def _run_read( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="post_comment_votes", namespace=None), - stream_state=AirbyteStateBlob.parse_obj( + stream_state=AirbyteStateBlob( { "parent_state": { "post_comments": { @@ -424,7 +425,7 @@ def test_incremental_parent_state(test_name, manifest, mock_requests, expected_r output_data = [message.record.data for message in output if message.record] assert output_data == expected_records - final_state = [message.state.stream.stream_state.dict() for message in output if message.state] + final_state = [orjson.loads(orjson.dumps(message.state.stream.stream_state)) for message in output if message.state] assert final_state[-1] == expected_state @@ -467,56 +468,56 @@ def test_incremental_parent_state(test_name, manifest, mock_requests, expected_r ), # Fetch the first page of votes for comment 10 of post 1 ( - "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&start_time=2024-01-02T00:00:00Z", - { - "votes": [{"id": 100, "comment_id": 10, "created_at": "2024-01-15T00:00:00Z"}], - "next_page": "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-01T00:00:01Z", - }, + "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&start_time=2024-01-02T00:00:00Z", + { + "votes": [{"id": 100, "comment_id": 10, "created_at": "2024-01-15T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-01T00:00:01Z", + }, ), # Fetch the second page of votes for comment 10 of post 1 ( - "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-01T00:00:01Z", - {"votes": [{"id": 101, "comment_id": 10, "created_at": "2024-01-14T00:00:00Z"}]}, + "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-01T00:00:01Z", + {"votes": [{"id": 101, "comment_id": 10, "created_at": "2024-01-14T00:00:00Z"}]}, ), # Fetch the first page of votes for comment 11 of post 1 ( - "https://api.example.com/community/posts/1/comments/11/votes?per_page=100&start_time=2024-01-03T00:00:00Z", - {"votes": [{"id": 102, "comment_id": 11, "created_at": "2024-01-13T00:00:00Z"}]}, + "https://api.example.com/community/posts/1/comments/11/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 102, "comment_id": 11, "created_at": "2024-01-13T00:00:00Z"}]}, ), # Fetch the first page of votes for comment 12 of post 1 ("https://api.example.com/community/posts/1/comments/12/votes?per_page=100&start_time=2024-01-01T00:00:01Z", {"votes": []}), # Fetch the first page of comments for post 2 ( - "https://api.example.com/community/posts/2/comments?per_page=100", - { - "comments": [{"id": 20, "post_id": 2, "updated_at": "2024-01-22T00:00:00Z"}], - "next_page": "https://api.example.com/community/posts/2/comments?per_page=100&page=2", - }, + "https://api.example.com/community/posts/2/comments?per_page=100", + { + "comments": [{"id": 20, "post_id": 2, "updated_at": "2024-01-22T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts/2/comments?per_page=100&page=2", + }, ), # Fetch the second page of comments for post 2 ( - "https://api.example.com/community/posts/2/comments?per_page=100&page=2", - {"comments": [{"id": 21, "post_id": 2, "updated_at": "2024-01-21T00:00:00Z"}]}, + "https://api.example.com/community/posts/2/comments?per_page=100&page=2", + {"comments": [{"id": 21, "post_id": 2, "updated_at": "2024-01-21T00:00:00Z"}]}, ), # Fetch the first page of votes for comment 20 of post 2 ( - "https://api.example.com/community/posts/2/comments/20/votes?per_page=100&start_time=2024-01-01T00:00:01Z", - {"votes": [{"id": 200, "comment_id": 20, "created_at": "2024-01-12T00:00:00Z"}]}, + "https://api.example.com/community/posts/2/comments/20/votes?per_page=100&start_time=2024-01-01T00:00:01Z", + {"votes": [{"id": 200, "comment_id": 20, "created_at": "2024-01-12T00:00:00Z"}]}, ), # Fetch the first page of votes for comment 21 of post 2 ( - "https://api.example.com/community/posts/2/comments/21/votes?per_page=100&start_time=2024-01-01T00:00:01Z", - {"votes": [{"id": 201, "comment_id": 21, "created_at": "2024-01-12T00:00:15Z"}]}, + "https://api.example.com/community/posts/2/comments/21/votes?per_page=100&start_time=2024-01-01T00:00:01Z", + {"votes": [{"id": 201, "comment_id": 21, "created_at": "2024-01-12T00:00:15Z"}]}, ), # Fetch the first page of comments for post 3 ( - "https://api.example.com/community/posts/3/comments?per_page=100", - {"comments": [{"id": 30, "post_id": 3, "updated_at": "2024-01-09T00:00:00Z"}]}, + "https://api.example.com/community/posts/3/comments?per_page=100", + {"comments": [{"id": 30, "post_id": 3, "updated_at": "2024-01-09T00:00:00Z"}]}, ), # Fetch the first page of votes for comment 30 of post 3 ( - "https://api.example.com/community/posts/3/comments/30/votes?per_page=100", - {"votes": [{"id": 300, "comment_id": 30, "created_at": "2024-01-10T00:00:00Z"}]}, + "https://api.example.com/community/posts/3/comments/30/votes?per_page=100", + {"votes": [{"id": 300, "comment_id": 30, "created_at": "2024-01-10T00:00:00Z"}]}, ), ], # Expected records @@ -534,7 +535,7 @@ def test_incremental_parent_state(test_name, manifest, mock_requests, expected_r type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="post_comment_votes", namespace=None), - stream_state=AirbyteStateBlob.parse_obj( + stream_state=AirbyteStateBlob( { # This should not happen since parent state is disabled, but I've added this to validate that and # incoming parent_state is ignored when the parent stream's incremental_dependency is disabled @@ -590,12 +591,7 @@ def test_incremental_parent_state(test_name, manifest, mock_requests, expected_r ], ) def test_incremental_parent_state_no_incremental_dependency( - test_name, - manifest, - mock_requests, - expected_records, - initial_state, - expected_state + test_name, manifest, mock_requests, expected_records, initial_state, expected_state ): """ This is a pretty complicated test that syncs a low-code connector stream with three levels of substreams @@ -614,8 +610,12 @@ def test_incremental_parent_state_no_incremental_dependency( config = {"start_date": "2024-01-01T00:00:01Z", "credentials": {"email": "email", "api_token": "api_token"}} # Disable incremental_dependency - manifest["definitions"]["post_comments_stream"]["retriever"]["partition_router"]["parent_stream_configs"][0]["incremental_dependency"] = False - manifest["definitions"]["post_comment_votes_stream"]["retriever"]["partition_router"]["parent_stream_configs"][0]["incremental_dependency"] = False + manifest["definitions"]["post_comments_stream"]["retriever"]["partition_router"]["parent_stream_configs"][0][ + "incremental_dependency" + ] = False + manifest["definitions"]["post_comment_votes_stream"]["retriever"]["partition_router"]["parent_stream_configs"][0][ + "incremental_dependency" + ] = False with requests_mock.Mocker() as m: for url, response in mock_requests: @@ -625,5 +625,5 @@ def test_incremental_parent_state_no_incremental_dependency( output_data = [message.record.data for message in output if message.record] assert output_data == expected_records - final_state = [message.state.stream.stream_state.dict() for message in output if message.state] + final_state = [orjson.loads(orjson.dumps(message.state.stream.stream_state)) for message in output if message.state] assert final_state[-1] == expected_state diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py index 5201dbc8f241..3a80407cea96 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py @@ -41,9 +41,7 @@ def __init__(self, slices, records, name, cursor_field="", cursor=None): self._slices = slices self._records = records self._stream_cursor_field = ( - InterpolatedString.create(cursor_field, parameters={}) - if isinstance(cursor_field, str) - else cursor_field + InterpolatedString.create(cursor_field, parameters={}) if isinstance(cursor_field, str) else cursor_field ) self._name = name self._state = {"states": []} @@ -311,15 +309,17 @@ def test_substream_partition_router(parent_stream_configs, expected_slices): def test_substream_partition_router_invalid_parent_record_type(): partition_router = SubstreamPartitionRouter( - parent_stream_configs=[ParentStreamConfig( - stream=MockStream([{}], [list()], "first_stream"), - parent_key="id", - partition_field="first_stream_id", - parameters={}, - config={}, - )], + parent_stream_configs=[ + ParentStreamConfig( + stream=MockStream([{}], [list()], "first_stream"), + parent_key="id", + partition_field="first_stream_id", + parameters={}, + config={}, + ) + ], parameters={}, - config={} + config={}, ) with pytest.raises(AirbyteTracedException): @@ -664,7 +664,7 @@ def test_substream_checkpoints_after_each_parent_partition(): [ pytest.param(False, id="test_resumable_full_refresh_stream_without_parent_checkpoint"), pytest.param(True, id="test_resumable_full_refresh_stream_with_use_incremental_dependency_for_parent_checkpoint"), - ] + ], ) def test_substream_using_resumable_full_refresh_parent_stream(use_incremental_dependency): mock_slices = [ @@ -687,8 +687,8 @@ def test_substream_using_resumable_full_refresh_parent_stream(use_incremental_de {"next_page_token": 2}, {"next_page_token": 3}, {"next_page_token": 3}, - {'__ab_full_refresh_sync_complete': True}, - {'__ab_full_refresh_sync_complete': True}, + {"__ab_full_refresh_sync_complete": True}, + {"__ab_full_refresh_sync_complete": True}, ] partition_router = SubstreamPartitionRouter( @@ -737,7 +737,7 @@ def test_substream_using_resumable_full_refresh_parent_stream(use_incremental_de [ pytest.param(False, id="test_substream_resumable_full_refresh_stream_without_parent_checkpoint"), pytest.param(True, id="test_substream_resumable_full_refresh_stream_with_use_incremental_dependency_for_parent_checkpoint"), - ] + ], ) def test_substream_using_resumable_full_refresh_parent_stream_slices(use_incremental_dependency): mock_parent_slices = [ @@ -760,72 +760,20 @@ def test_substream_using_resumable_full_refresh_parent_stream_slices(use_increme {"next_page_token": 2}, {"next_page_token": 3}, {"next_page_token": 3}, - {'__ab_full_refresh_sync_complete': True}, - {'__ab_full_refresh_sync_complete': True}, + {"__ab_full_refresh_sync_complete": True}, + {"__ab_full_refresh_sync_complete": True}, ] expected_substream_state = { "states": [ - { - "partition": { - "parent_slice": {}, - "partition_field": "makoto_yuki" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "parent_slice": {}, - "partition_field": "yukari_takeba" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "parent_slice": {}, - "partition_field": "mitsuru_kirijo" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "parent_slice": {}, - "partition_field": "akihiko_sanada" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "parent_slice": {}, - "partition_field": "junpei_iori" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "parent_slice": {}, - "partition_field": "fuuka_yamagishi" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - } + {"partition": {"parent_slice": {}, "partition_field": "makoto_yuki"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"parent_slice": {}, "partition_field": "yukari_takeba"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"parent_slice": {}, "partition_field": "mitsuru_kirijo"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"parent_slice": {}, "partition_field": "akihiko_sanada"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"parent_slice": {}, "partition_field": "junpei_iori"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"parent_slice": {}, "partition_field": "fuuka_yamagishi"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, ], - "parent_state": { - "persona_3_characters": { - "__ab_full_refresh_sync_complete": True - } - } + "parent_state": {"persona_3_characters": {"__ab_full_refresh_sync_complete": True}}, } partition_router = SubstreamPartitionRouter( @@ -874,7 +822,9 @@ def test_substream_using_resumable_full_refresh_parent_stream_slices(use_increme assert actual_slice == expected_parent_slices[expected_counter] # check for parent state if use_incremental_dependency: - assert substream_cursor_slicer._partition_router._parent_state["persona_3_characters"] == expected_parent_state[expected_counter] + assert ( + substream_cursor_slicer._partition_router._parent_state["persona_3_characters"] == expected_parent_state[expected_counter] + ) expected_counter += 1 # validate final state for closed substream slices diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/backoff_strategies/test_wait_time_from_header.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/backoff_strategies/test_wait_time_from_header.py index b57fd714b735..59dbb6b419a7 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/backoff_strategies/test_wait_time_from_header.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/backoff_strategies/test_wait_time_from_header.py @@ -6,10 +6,10 @@ import pytest from airbyte_cdk import AirbyteTracedException +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.declarative.requesters.error_handlers.backoff_strategies.wait_time_from_header_backoff_strategy import ( WaitTimeFromHeaderBackoffStrategy, ) -from airbyte_protocol.models import FailureType from requests import Response SOME_BACKOFF_TIME = 60 @@ -44,7 +44,9 @@ def test_given_retry_after_smaller_than_max_time_then_raise_transient_error(): response_mock = MagicMock(spec=Response) retry_after = _A_MAX_TIME - 1 response_mock.headers = {_A_RETRY_HEADER: str(retry_after)} - backoff_strategy = WaitTimeFromHeaderBackoffStrategy(header=_A_RETRY_HEADER, max_waiting_time_in_seconds=_A_MAX_TIME, parameters={}, config={}) + backoff_strategy = WaitTimeFromHeaderBackoffStrategy( + header=_A_RETRY_HEADER, max_waiting_time_in_seconds=_A_MAX_TIME, parameters={}, config={} + ) assert backoff_strategy.backoff_time(response_mock, 1) == retry_after @@ -52,7 +54,9 @@ def test_given_retry_after_smaller_than_max_time_then_raise_transient_error(): def test_given_retry_after_greater_than_max_time_then_raise_transient_error(): response_mock = MagicMock(spec=Response) response_mock.headers = {_A_RETRY_HEADER: str(_A_MAX_TIME + 1)} - backoff_strategy = WaitTimeFromHeaderBackoffStrategy(header=_A_RETRY_HEADER, max_waiting_time_in_seconds=_A_MAX_TIME, parameters={}, config={}) + backoff_strategy = WaitTimeFromHeaderBackoffStrategy( + header=_A_RETRY_HEADER, max_waiting_time_in_seconds=_A_MAX_TIME, parameters={}, config={} + ) with pytest.raises(AirbyteTracedException) as exception: backoff_strategy.backoff_time(response_mock, 1) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_composite_error_handler.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_composite_error_handler.py index 5f34bb28c969..574f3eec0e75 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_composite_error_handler.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_composite_error_handler.py @@ -6,11 +6,11 @@ import pytest import requests +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.declarative.requesters.error_handlers import HttpResponseFilter from airbyte_cdk.sources.declarative.requesters.error_handlers.composite_error_handler import CompositeErrorHandler from airbyte_cdk.sources.declarative.requesters.error_handlers.default_error_handler import DefaultErrorHandler from airbyte_cdk.sources.streams.http.error_handlers.response_models import ErrorResolution, ResponseAction -from airbyte_protocol.models import FailureType SOME_BACKOFF_TIME = 60 @@ -34,7 +34,7 @@ response_action=ResponseAction.SUCCESS, failure_type=None, error_message=None, - ) + ), ), ( "test_chain_retrier_ignore_fail", @@ -83,7 +83,7 @@ ErrorResolution( response_action=ResponseAction.IGNORE, ), - ) + ), ], ) def test_composite_error_handler(test_name, first_handler_behavior, second_handler_behavior, expected_behavior): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_error_handler.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_error_handler.py index f80aef233ebe..6fc99159afed 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_error_handler.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_error_handler.py @@ -18,42 +18,42 @@ @pytest.mark.parametrize( - "test_name, http_status_code, expected_error_resolution", - [ - ( - "_with_http_response_status_200", - 200, - ErrorResolution( - response_action=ResponseAction.SUCCESS, - failure_type=None, - error_message=None, - ), - ), - ( - "_with_http_response_status_400", - 400, - DEFAULT_ERROR_MAPPING[400], - ), - ( - "_with_http_response_status_404", - 404, - DEFAULT_ERROR_MAPPING[404], + "test_name, http_status_code, expected_error_resolution", + [ + ( + "_with_http_response_status_200", + 200, + ErrorResolution( + response_action=ResponseAction.SUCCESS, + failure_type=None, + error_message=None, ), - ( - "_with_http_response_status_408", - 408, - DEFAULT_ERROR_MAPPING[408], + ), + ( + "_with_http_response_status_400", + 400, + DEFAULT_ERROR_MAPPING[400], + ), + ( + "_with_http_response_status_404", + 404, + DEFAULT_ERROR_MAPPING[404], + ), + ( + "_with_http_response_status_408", + 408, + DEFAULT_ERROR_MAPPING[408], + ), + ( + "_with_unmapped_http_status_418", + 418, + ErrorResolution( + response_action=ResponseAction.RETRY, + failure_type=FailureType.system_error, + error_message="Unexpected response with HTTP status 418", ), - ( - "_with_unmapped_http_status_418", - 418, - ErrorResolution( - response_action=ResponseAction.RETRY, - failure_type=FailureType.system_error, - error_message="Unexpected response with HTTP status 418", - ), - ) - ], + ), + ], ) def test_default_error_handler_with_default_response_filter(test_name, http_status_code: int, expected_error_resolution: ErrorResolution): response_mock = create_response(http_status_code) @@ -65,76 +65,78 @@ def test_default_error_handler_with_default_response_filter(test_name, http_stat @pytest.mark.parametrize( - "test_name, http_status_code, test_response_filter, response_action, failure_type, error_message", - [ - ( - "_with_http_response_status_400_fail_with_default_failure_type", - 400, - HttpResponseFilter( - http_codes=[400], - action=ResponseAction.RETRY, - config={}, - parameters={}, - ), - ResponseAction.RETRY, - FailureType.system_error, - "Bad request. Please check your request parameters.", + "test_name, http_status_code, test_response_filter, response_action, failure_type, error_message", + [ + ( + "_with_http_response_status_400_fail_with_default_failure_type", + 400, + HttpResponseFilter( + http_codes=[400], + action=ResponseAction.RETRY, + config={}, + parameters={}, ), - ( - "_with_http_response_status_402_fail_with_default_failure_type", - 402, - HttpResponseFilter( - http_codes=[402], - action=ResponseAction.FAIL, - config={}, - parameters={}, - ), - ResponseAction.FAIL, - FailureType.system_error, - "", + ResponseAction.RETRY, + FailureType.system_error, + "Bad request. Please check your request parameters.", + ), + ( + "_with_http_response_status_402_fail_with_default_failure_type", + 402, + HttpResponseFilter( + http_codes=[402], + action=ResponseAction.FAIL, + config={}, + parameters={}, ), - ( - "_with_http_response_status_403_fail_with_default_failure_type", - 403, - HttpResponseFilter( - http_codes=[403], - action="FAIL", - config={}, - parameters={}, - ), - ResponseAction.FAIL, - FailureType.config_error, - "Forbidden. You don't have permission to access this resource.", + ResponseAction.FAIL, + FailureType.system_error, + "", + ), + ( + "_with_http_response_status_403_fail_with_default_failure_type", + 403, + HttpResponseFilter( + http_codes=[403], + action="FAIL", + config={}, + parameters={}, ), - ( - "_with_http_response_status_200_fail_with_contained_error_message", - 418, - HttpResponseFilter( - action=ResponseAction.FAIL, - error_message_contains="test", - config={}, - parameters={}, - ), - ResponseAction.FAIL, - FailureType.system_error, - "", + ResponseAction.FAIL, + FailureType.config_error, + "Forbidden. You don't have permission to access this resource.", + ), + ( + "_with_http_response_status_200_fail_with_contained_error_message", + 418, + HttpResponseFilter( + action=ResponseAction.FAIL, + error_message_contains="test", + config={}, + parameters={}, ), - ( - "_fail_with_predicate", - 418, - HttpResponseFilter( - action=ResponseAction.FAIL, - predicate="{{ 'error' in response }}", - config={}, - parameters={}, - ), - ResponseAction.FAIL, - FailureType.system_error, - "", + ResponseAction.FAIL, + FailureType.system_error, + "", + ), + ( + "_fail_with_predicate", + 418, + HttpResponseFilter( + action=ResponseAction.FAIL, + predicate="{{ 'error' in response }}", + config={}, + parameters={}, ), - ], + ResponseAction.FAIL, + FailureType.system_error, + "", + ), + ], ) -def test_default_error_handler_with_custom_response_filter(test_name, http_status_code, test_response_filter, response_action, failure_type, error_message): +def test_default_error_handler_with_custom_response_filter( + test_name, http_status_code, test_response_filter, response_action, failure_type, error_message +): response_mock = create_response(http_status_code) if http_status_code == 418: response_mock.json.return_value = {"error": "test"} @@ -148,11 +150,11 @@ def test_default_error_handler_with_custom_response_filter(test_name, http_statu @pytest.mark.parametrize( - "http_status_code, expected_response_action", - [ - (400, ResponseAction.RETRY), - (402, ResponseAction.FAIL), - ], + "http_status_code, expected_response_action", + [ + (400, ResponseAction.RETRY), + (402, ResponseAction.FAIL), + ], ) def test_default_error_handler_with_multiple_response_filters(http_status_code, expected_response_action): response_filter_one = HttpResponseFilter( @@ -175,15 +177,17 @@ def test_default_error_handler_with_multiple_response_filters(http_status_code, @pytest.mark.parametrize( - "first_response_filter_action, second_response_filter_action, expected_response_action", - [ - (ResponseAction.RETRY, ResponseAction.FAIL, ResponseAction.RETRY), - (ResponseAction.FAIL, ResponseAction.RETRY, ResponseAction.FAIL), - (ResponseAction.IGNORE, ResponseAction.IGNORE, ResponseAction.IGNORE), - (ResponseAction.SUCCESS, ResponseAction.IGNORE, ResponseAction.SUCCESS), - ] + "first_response_filter_action, second_response_filter_action, expected_response_action", + [ + (ResponseAction.RETRY, ResponseAction.FAIL, ResponseAction.RETRY), + (ResponseAction.FAIL, ResponseAction.RETRY, ResponseAction.FAIL), + (ResponseAction.IGNORE, ResponseAction.IGNORE, ResponseAction.IGNORE), + (ResponseAction.SUCCESS, ResponseAction.IGNORE, ResponseAction.SUCCESS), + ], ) -def test_default_error_handler_with_conflicting_response_filters(first_response_filter_action, second_response_filter_action, expected_response_action): +def test_default_error_handler_with_conflicting_response_filters( + first_response_filter_action, second_response_filter_action, expected_response_action +): response_filter_one = HttpResponseFilter( http_codes=[400], action=first_response_filter_action, @@ -205,19 +209,29 @@ def test_default_error_handler_with_conflicting_response_filters(first_response_ def test_default_error_handler_with_constant_backoff_strategy(): response_mock = create_response(429) - error_handler = DefaultErrorHandler(config={}, parameters={}, backoff_strategies=[ConstantBackoffStrategy(SOME_BACKOFF_TIME, config={}, parameters={})]) + error_handler = DefaultErrorHandler( + config={}, parameters={}, backoff_strategies=[ConstantBackoffStrategy(SOME_BACKOFF_TIME, config={}, parameters={})] + ) assert error_handler.backoff_time(response_or_exception=response_mock, attempt_count=0) == SOME_BACKOFF_TIME @pytest.mark.parametrize( "attempt_count", [ - 0, 1, 2, 3, 4, 5, 6, + 0, + 1, + 2, + 3, + 4, + 5, + 6, ], ) def test_default_error_handler_with_exponential_backoff_strategy(attempt_count): response_mock = create_response(429) - error_handler = DefaultErrorHandler(config={}, parameters={}, backoff_strategies=[ExponentialBackoffStrategy(factor=1, config={}, parameters={})]) + error_handler = DefaultErrorHandler( + config={}, parameters={}, backoff_strategies=[ExponentialBackoffStrategy(factor=1, config={}, parameters={})] + ) assert error_handler.backoff_time(response_or_exception=response_mock, attempt_count=attempt_count) == (1 * 2**attempt_count) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_http_response_filter.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_http_response_filter.py index 6da87a183ff2..b3e4c517da26 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_http_response_filter.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_default_http_response_filter.py @@ -5,31 +5,19 @@ from unittest.mock import MagicMock import pytest +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.declarative.requesters.error_handlers.default_http_response_filter import DefaultHttpResponseFilter from airbyte_cdk.sources.streams.http.error_handlers.default_error_mapping import DEFAULT_ERROR_MAPPING from airbyte_cdk.sources.streams.http.error_handlers.response_models import ResponseAction -from airbyte_protocol.models import FailureType from requests import RequestException, Response @pytest.mark.parametrize( "http_code, expected_error_resolution", [ - pytest.param( - 403, - DEFAULT_ERROR_MAPPING[403], - id="403 mapping" - ), - pytest.param( - 404, - DEFAULT_ERROR_MAPPING[404], - id="404 mapping" - ), - pytest.param( - 408, - DEFAULT_ERROR_MAPPING[408], - id="408 mapping" - ), + pytest.param(403, DEFAULT_ERROR_MAPPING[403], id="403 mapping"), + pytest.param(404, DEFAULT_ERROR_MAPPING[404], id="404 mapping"), + pytest.param(408, DEFAULT_ERROR_MAPPING[408], id="408 mapping"), ], ) def test_matches_mapped_http_status_code(http_code, expected_error_resolution): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_http_response_filter.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_http_response_filter.py index 5fd5990e898b..9c6817c268c4 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_http_response_filter.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/error_handlers/test_http_response_filter.py @@ -22,7 +22,9 @@ "", "custom error message", {"status_code": 503}, - ErrorResolution(response_action=ResponseAction.FAIL, failure_type=FailureType.transient_error, error_message="custom error message"), + ErrorResolution( + response_action=ResponseAction.FAIL, failure_type=FailureType.transient_error, error_message="custom error message" + ), id="test_http_code_matches", ), pytest.param( @@ -33,7 +35,11 @@ "", "", {"status_code": 403}, - ErrorResolution(response_action=ResponseAction.IGNORE, failure_type=FailureType.config_error, error_message="Forbidden. You don't have permission to access this resource."), + ErrorResolution( + response_action=ResponseAction.IGNORE, + failure_type=FailureType.config_error, + error_message="Forbidden. You don't have permission to access this resource.", + ), id="test_http_code_matches_ignore_action", ), pytest.param( @@ -44,7 +50,9 @@ "", "", {"status_code": 429}, - ErrorResolution(response_action=ResponseAction.RETRY, failure_type=FailureType.transient_error, error_message="Too many requests."), + ErrorResolution( + response_action=ResponseAction.RETRY, failure_type=FailureType.transient_error, error_message="Too many requests." + ), id="test_http_code_matches_retry_action", ), pytest.param( @@ -55,7 +63,9 @@ "", "error message was: {{ response.failure }}", {"status_code": 404, "json": {"the_body": "do_i_match", "failure": "i failed you"}}, - ErrorResolution(response_action=ResponseAction.FAIL, failure_type=FailureType.system_error, error_message="error message was: i failed you"), + ErrorResolution( + response_action=ResponseAction.FAIL, failure_type=FailureType.system_error, error_message="error message was: i failed you" + ), id="test_predicate_matches_json", ), pytest.param( @@ -66,7 +76,9 @@ "", "error from header: {{ headers.warning }}", {"status_code": 404, "headers": {"the_key": "header_match", "warning": "this failed"}}, - ErrorResolution(response_action=ResponseAction.FAIL, failure_type=FailureType.system_error, error_message="error from header: this failed"), + ErrorResolution( + response_action=ResponseAction.FAIL, failure_type=FailureType.system_error, error_message="error from header: this failed" + ), id="test_predicate_matches_headers", ), pytest.param( @@ -80,7 +92,7 @@ ErrorResolution( response_action=ResponseAction.FAIL, failure_type=FailureType.config_error, - error_message="Forbidden. You don't have permission to access this resource." + error_message="Forbidden. You don't have permission to access this resource.", ), id="test_predicate_matches_headers", ), @@ -147,12 +159,16 @@ "", "rate limits", {"status_code": 500}, - ErrorResolution(response_action=ResponseAction.RATE_LIMITED, failure_type=FailureType.transient_error, error_message="rate limits"), + ErrorResolution( + response_action=ResponseAction.RATE_LIMITED, failure_type=FailureType.transient_error, error_message="rate limits" + ), id="test_http_code_matches_response_action_rate_limited", ), ], ) -def test_matches(requests_mock, action, failure_type, http_codes, predicate, error_contains, error_message, response, expected_error_resolution): +def test_matches( + requests_mock, action, failure_type, http_codes, predicate, error_contains, error_message, response, expected_error_resolution +): requests_mock.register_uri( "GET", "https://airbyte.io/", diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py index 6f010323c8f0..31d9ae5e05f5 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_cursor_pagination_strategy.py @@ -50,7 +50,7 @@ "test_static_token_with_string_stop_condition", "test_token_from_header", "test_token_from_response_header_links", - ] + ], ) def test_cursor_pagination_strategy(template_string, stop_condition, expected_token, page_size): decoder = JsonDecoder(parameters={}) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py index 109f153cbcc8..54fcb2883ab2 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py @@ -247,7 +247,7 @@ def test_page_size_option_cannot_be_set_if_strategy_has_no_limit(): ids=[ "test_reset_inject_on_first_request", "test_reset_no_inject_on_first_request", - ] + ], ) def test_reset(inject_on_first_request): page_size_request_option = RequestOption(inject_into=RequestOptionType.request_parameter, field_name="limit", parameters={}) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py index 1ca14cc60481..da2bf6d9450e 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py @@ -40,13 +40,7 @@ def test_page_increment_paginator_strategy(page_size, start_from, last_page_size assert start_from == paginator_strategy._page -@pytest.mark.parametrize( - "page_size", - [ - pytest.param("{{ config['value'] }}"), - pytest.param("not-an-integer") - ] -) +@pytest.mark.parametrize("page_size", [pytest.param("{{ config['value'] }}"), pytest.param("not-an-integer")]) def test_page_increment_paginator_strategy_malformed_page_size(page_size): with pytest.raises(Exception, match=".* is of type . Expected "): PageIncrement(page_size=page_size, parameters={}, start_from_page=0, config={"value": "not-an-integer"}) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py index 77e94778ea2f..404bf9f50e15 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/test_http_requester.py @@ -415,7 +415,7 @@ def test_send_request_params(provider_params, param_params, authenticator_params {"k": [1, 2]}, "%5B%22a%22%2C+%22b%22%5D=1&%5B%22a%22%2C+%22b%22%5D=2", id="test-key-with-list-to-be-interpolated", - ) + ), ], ) def test_request_param_interpolation(request_parameters, config, expected_query_params): @@ -464,8 +464,7 @@ def test_request_param_interpolation_with_incorrect_values(request_parameters, c requester.send_request() assert ( - error.value.args[0] - == f"Invalid value for `{invalid_value_for_key}` parameter. The values of request params cannot be an object." + error.value.args[0] == f"Invalid value for `{invalid_value_for_key}` parameter. The values of request params cannot be an object." ) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py index 9c36c65b8553..fd3db0452f04 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py @@ -147,7 +147,7 @@ def test_simple_retriever_with_request_response_logs(mock_http_stream): [ pytest.param(None, None, 1, id="test_initial_sync_no_state"), pytest.param({"next_page_token": 10}, 10, 11, id="test_reset_with_next_page_token"), - ] + ], ) def test_simple_retriever_resumable_full_refresh_cursor_page_increment(initial_state, expected_reset_value, expected_next_page): expected_records = [ @@ -184,7 +184,7 @@ def test_simple_retriever_resumable_full_refresh_cursor_page_increment(initial_s expected_records[5], expected_records[6], expected_records[7], - ] + ], ] page_increment_strategy = PageIncrement(config={}, page_size=5, parameters={}) @@ -230,11 +230,13 @@ def test_simple_retriever_resumable_full_refresh_cursor_page_increment(initial_s {"next_page_token": "https://for-all-mankind.nasa.com/api/v1/astronauts?next_page=tracy_stevens"}, "https://for-all-mankind.nasa.com/api/v1/astronauts?next_page=tracy_stevens", "https://for-all-mankind.nasa.com/api/v1/astronauts?next_page=gordo_stevens", - id="test_reset_with_next_page_token" + id="test_reset_with_next_page_token", ), - ] + ], ) -def test_simple_retriever_resumable_full_refresh_cursor_reset_cursor_pagination(initial_state, expected_reset_value, expected_next_page, requests_mock): +def test_simple_retriever_resumable_full_refresh_cursor_reset_cursor_pagination( + initial_state, expected_reset_value, expected_next_page, requests_mock +): expected_records = [ Record(data={"name": "ed_baldwin"}, associated_slice=None), Record(data={"name": "danielle_poole"}, associated_slice=None), @@ -288,7 +290,7 @@ def test_simple_retriever_resumable_full_refresh_cursor_reset_cursor_pagination( stream = factory.create_component(model_type=DeclarativeStreamModel, component_definition=stream_manifest, config={}) response_body = { "data": [r.data for r in expected_records[:5]], - "next_page": "https://for-all-mankind.nasa.com/api/v1/astronauts?next_page=gordo_stevens" + "next_page": "https://for-all-mankind.nasa.com/api/v1/astronauts?next_page=gordo_stevens", } requests_mock.get("https://for-all-mankind.nasa.com/api/v1/astronauts", json=response_body) requests_mock.get("https://for-all-mankind.nasa.com/astronauts?next_page=tracy_stevens", json=response_body) @@ -334,7 +336,10 @@ def test_simple_retriever_resumable_full_refresh_cursor_reset_skip_completed_str ] record_selector = MagicMock() - record_selector.select_records.return_value = [expected_records[0],expected_records[1],] + record_selector.select_records.return_value = [ + expected_records[0], + expected_records[1], + ] page_increment_strategy = PageIncrement(config={}, page_size=5, parameters={}) paginator = DefaultPaginator(config={}, pagination_strategy=page_increment_strategy, url_base="https://airbyte.io", parameters={}) @@ -463,13 +468,39 @@ def test_get_request_headers(test_name, paginator_mapping, expected_mapping): @pytest.mark.parametrize( "test_name, paginator_mapping, ignore_stream_slicer_parameters_on_paginated_requests, next_page_token, expected_mapping", [ - ("test_do_not_ignore_stream_slicer_params_if_ignore_is_true_but_no_next_page_token", {"key_from_pagination": "1000"}, True, None, {"key_from_pagination": "1000"}), - ("test_do_not_ignore_stream_slicer_params_if_ignore_is_false_and_no_next_page_token", {"key_from_pagination": "1000"}, False, None, {"key_from_pagination": "1000", "key_from_slicer": "value"}), - ("test_ignore_stream_slicer_params_on_paginated_request", {"key_from_pagination": "1000"}, True, {"page": 2}, {"key_from_pagination": "1000"}), - ("test_do_not_ignore_stream_slicer_params_on_paginated_request", {"key_from_pagination": "1000"}, False, {"page": 2}, {"key_from_pagination": "1000", "key_from_slicer": "value"}), + ( + "test_do_not_ignore_stream_slicer_params_if_ignore_is_true_but_no_next_page_token", + {"key_from_pagination": "1000"}, + True, + None, + {"key_from_pagination": "1000"}, + ), + ( + "test_do_not_ignore_stream_slicer_params_if_ignore_is_false_and_no_next_page_token", + {"key_from_pagination": "1000"}, + False, + None, + {"key_from_pagination": "1000", "key_from_slicer": "value"}, + ), + ( + "test_ignore_stream_slicer_params_on_paginated_request", + {"key_from_pagination": "1000"}, + True, + {"page": 2}, + {"key_from_pagination": "1000"}, + ), + ( + "test_do_not_ignore_stream_slicer_params_on_paginated_request", + {"key_from_pagination": "1000"}, + False, + {"page": 2}, + {"key_from_pagination": "1000", "key_from_slicer": "value"}, + ), ], ) -def test_ignore_stream_slicer_parameters_on_paginated_requests(test_name, paginator_mapping, ignore_stream_slicer_parameters_on_paginated_requests, next_page_token, expected_mapping): +def test_ignore_stream_slicer_parameters_on_paginated_requests( + test_name, paginator_mapping, ignore_stream_slicer_parameters_on_paginated_requests, next_page_token, expected_mapping +): # This test is separate from the other request options because request headers must be strings paginator = MagicMock() paginator.get_request_headers.return_value = paginator_mapping diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/spec/test_spec.py b/airbyte-cdk/python/unit_tests/sources/declarative/spec/test_spec.py index 46b892256a25..1e1ef498082f 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/spec/test_spec.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/spec/test_spec.py @@ -3,32 +3,34 @@ # import pytest -from airbyte_cdk.models.airbyte_protocol import AdvancedAuth, ConnectorSpecification +from airbyte_cdk.models import AdvancedAuth, AuthFlowType, ConnectorSpecification from airbyte_cdk.sources.declarative.models.declarative_component_schema import AuthFlow from airbyte_cdk.sources.declarative.spec.spec import Spec @pytest.mark.parametrize( - "test_name, spec, expected_connection_specification", + "spec, expected_connection_specification", [ ( - "test_only_connection_specification", Spec(connection_specification={"client_id": "my_client_id"}, parameters={}), ConnectorSpecification(connectionSpecification={"client_id": "my_client_id"}), ), ( - "test_with_doc_url", Spec(connection_specification={"client_id": "my_client_id"}, parameters={}, documentation_url="https://airbyte.io"), ConnectorSpecification(connectionSpecification={"client_id": "my_client_id"}, documentationUrl="https://airbyte.io"), ), ( - "test_auth_flow", Spec(connection_specification={"client_id": "my_client_id"}, parameters={}, advanced_auth=AuthFlow(auth_flow_type="oauth2.0")), ConnectorSpecification( - connectionSpecification={"client_id": "my_client_id"}, advanced_auth=AdvancedAuth(auth_flow_type="oauth2.0") + connectionSpecification={"client_id": "my_client_id"}, advanced_auth=AdvancedAuth(auth_flow_type=AuthFlowType.oauth2_0) ), ), ], + ids=[ + "test_only_connection_specification", + "test_with_doc_url", + "test_auth_flow", + ], ) -def test_spec(test_name, spec, expected_connection_specification): +def test_spec(spec, expected_connection_specification): assert spec.generate_spec() == expected_connection_specification diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py index bd8281b80b46..8906b625fb8f 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py @@ -189,7 +189,7 @@ def test_no_state_migration_is_applied_if_the_state_should_not_be_migrated(): [ pytest.param(True, True, id="test_retriever_has_cursor"), pytest.param(False, False, id="test_retriever_has_cursor"), - ] + ], ) def test_is_resumable(use_cursor, expected_supports_checkpointing): schema_loader = _schema_loader() diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py index d7017eb52dd5..2d350fa12b4b 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py @@ -29,7 +29,6 @@ from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever from jsonschema.exceptions import ValidationError -from pydantic import AnyUrl logger = logging.getLogger("airbyte") @@ -210,7 +209,7 @@ def test_manifest_with_spec(self): source = ManifestDeclarativeSource(source_config=manifest) connector_specification = source.spec(logger) assert connector_specification is not None - assert connector_specification.documentationUrl == AnyUrl("https://airbyte.com/#yaml-from-manifest") + assert connector_specification.documentationUrl == "https://airbyte.com/#yaml-from-manifest" assert connector_specification.connectionSpecification["title"] == "Test Spec" assert connector_specification.connectionSpecification["required"][0] == "api_key" assert connector_specification.connectionSpecification["additionalProperties"] is False @@ -277,7 +276,7 @@ def test_manifest_with_external_spec(self, use_external_yaml_spec): connector_specification = source.spec(logger) - assert connector_specification.documentationUrl == AnyUrl("https://airbyte.com/#yaml-from-external") + assert connector_specification.documentationUrl == "https://airbyte.com/#yaml-from-external" assert connector_specification.connectionSpecification == EXTERNAL_CONNECTION_SPECIFICATION def test_source_is_not_created_if_toplevel_fields_are_unknown(self): @@ -1045,8 +1044,12 @@ def _create_page(response_body): ), [{"ABC": 0, "partition": 0}, {"AED": 1, "partition": 0}, {"ABC": 2, "partition": 1}], [ - call({'states': []}, {"partition": "0"}, None), - call({'states': [{'partition': {'partition': '0'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}]}, {"partition": "1"}, None), + call({"states": []}, {"partition": "0"}, None), + call( + {"states": [{"partition": {"partition": "0"}, "cursor": {"__ab_full_refresh_sync_complete": True}}]}, + {"partition": "1"}, + None, + ), ], ), ( @@ -1119,9 +1122,13 @@ def _create_page(response_body): ), [{"ABC": 0, "partition": 0}, {"AED": 1, "partition": 0}, {"USD": 3, "partition": 0}, {"ABC": 2, "partition": 1}], [ - call({'states': []}, {"partition": "0"}, None), - call({'states': []}, {"partition": "0"}, {"next_page_token": "next"}), - call({'states': [{'partition': {'partition': '0'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}]}, {'partition': '1'}, None), + call({"states": []}, {"partition": "0"}, None), + call({"states": []}, {"partition": "0"}, {"next_page_token": "next"}), + call( + {"states": [{"partition": {"partition": "0"}, "cursor": {"__ab_full_refresh_sync_complete": True}}]}, + {"partition": "1"}, + None, + ), ], ), ], @@ -1269,14 +1276,14 @@ def _run_read(manifest: Mapping[str, Any], stream_name: str) -> List[AirbyteMess def test_declarative_component_schema_valid_ref_links(): def load_yaml(file_path) -> Mapping[str, Any]: - with open(file_path, 'r') as file: + with open(file_path, "r") as file: return yaml.safe_load(file) - def extract_refs(data, base_path='#') -> List[str]: + def extract_refs(data, base_path="#") -> List[str]: refs = [] if isinstance(data, dict): for key, value in data.items(): - if key == '$ref' and isinstance(value, str) and value.startswith('#'): + if key == "$ref" and isinstance(value, str) and value.startswith("#"): ref_path = value refs.append(ref_path) else: @@ -1287,11 +1294,11 @@ def extract_refs(data, base_path='#') -> List[str]: return refs def resolve_pointer(data: Mapping[str, Any], pointer: str) -> bool: - parts = pointer.split('/')[1:] # Skip the first empty part due to leading '#/' + parts = pointer.split("/")[1:] # Skip the first empty part due to leading '#/' current = data try: for part in parts: - part = part.replace('~1', '/').replace('~0', '~') # Unescape JSON Pointer + part = part.replace("~1", "/").replace("~0", "~") # Unescape JSON Pointer current = current[part] return True except (KeyError, TypeError): @@ -1300,8 +1307,10 @@ def resolve_pointer(data: Mapping[str, Any], pointer: str) -> bool: def validate_refs(yaml_file: str) -> List[str]: data = load_yaml(yaml_file) refs = extract_refs(data) - invalid_refs = [ref for ref in refs if not resolve_pointer(data, ref.replace('#', ''))] + invalid_refs = [ref for ref in refs if not resolve_pointer(data, ref.replace("#", ""))] return invalid_refs - yaml_file_path = Path(__file__).resolve().parent.parent.parent.parent / 'airbyte_cdk/sources/declarative/declarative_component_schema.yaml' + yaml_file_path = ( + Path(__file__).resolve().parent.parent.parent.parent / "airbyte_cdk/sources/declarative/declarative_component_schema.yaml" + ) assert not validate_refs(yaml_file_path) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_types.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_types.py index dd3f8e5b4ab2..b6eb42f940b6 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_types.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_types.py @@ -7,13 +7,25 @@ @pytest.mark.parametrize( "stream_slice, expected_partition", [ - pytest.param(StreamSlice(partition={},cursor_slice={}), {}, id="test_partition_with_empty_partition"), - pytest.param(StreamSlice(partition=StreamSlice(partition={}, cursor_slice={}), cursor_slice={}), {}, id="test_partition_nested_empty"), - pytest.param(StreamSlice(partition={"key": "value"}, cursor_slice={}), {"key": "value"}, id="test_partition_with_mapping_partition"), - pytest.param(StreamSlice(partition={},cursor_slice={"cursor": "value"}), {}, id="test_partition_with_only_cursor"), - pytest.param(StreamSlice(partition=StreamSlice(partition={}, cursor_slice={}), cursor_slice={"cursor": "value"}), {}, id="test_partition_nested_empty_and_cursor_value_mapping"), - pytest.param(StreamSlice(partition=StreamSlice(partition={}, cursor_slice={"cursor": "value"}), cursor_slice={}), {}, id="test_partition_nested_empty_and_cursor_value"), - ] + pytest.param(StreamSlice(partition={}, cursor_slice={}), {}, id="test_partition_with_empty_partition"), + pytest.param( + StreamSlice(partition=StreamSlice(partition={}, cursor_slice={}), cursor_slice={}), {}, id="test_partition_nested_empty" + ), + pytest.param( + StreamSlice(partition={"key": "value"}, cursor_slice={}), {"key": "value"}, id="test_partition_with_mapping_partition" + ), + pytest.param(StreamSlice(partition={}, cursor_slice={"cursor": "value"}), {}, id="test_partition_with_only_cursor"), + pytest.param( + StreamSlice(partition=StreamSlice(partition={}, cursor_slice={}), cursor_slice={"cursor": "value"}), + {}, + id="test_partition_nested_empty_and_cursor_value_mapping", + ), + pytest.param( + StreamSlice(partition=StreamSlice(partition={}, cursor_slice={"cursor": "value"}), cursor_slice={}), + {}, + id="test_partition_nested_empty_and_cursor_value", + ), + ], ) def test_partition(stream_slice, expected_partition): partition = stream_slice.partition @@ -24,14 +36,25 @@ def test_partition(stream_slice, expected_partition): @pytest.mark.parametrize( "stream_slice, expected_cursor_slice", [ - pytest.param(StreamSlice(partition={},cursor_slice={}), {}, id="test_cursor_slice_with_empty_cursor"), - pytest.param(StreamSlice(partition={}, cursor_slice=StreamSlice(partition={}, cursor_slice={})), {}, id="test_cursor_slice_nested_empty"), - - pytest.param(StreamSlice(partition={}, cursor_slice={"key": "value"}), {"key": "value"}, id="test_cursor_slice_with_mapping_cursor_slice"), + pytest.param(StreamSlice(partition={}, cursor_slice={}), {}, id="test_cursor_slice_with_empty_cursor"), + pytest.param( + StreamSlice(partition={}, cursor_slice=StreamSlice(partition={}, cursor_slice={})), {}, id="test_cursor_slice_nested_empty" + ), + pytest.param( + StreamSlice(partition={}, cursor_slice={"key": "value"}), {"key": "value"}, id="test_cursor_slice_with_mapping_cursor_slice" + ), pytest.param(StreamSlice(partition={"partition": "value"}, cursor_slice={}), {}, id="test_cursor_slice_with_only_partition"), - pytest.param(StreamSlice(partition={"partition": "value"}, cursor_slice=StreamSlice(partition={}, cursor_slice={})), {}, id="test_cursor_slice_nested_empty_and_partition_mapping"), - pytest.param(StreamSlice(partition=StreamSlice(partition={"partition": "value"}, cursor_slice={}), cursor_slice={}), {}, id="test_cursor_slice_nested_empty_and_partition"), - ] + pytest.param( + StreamSlice(partition={"partition": "value"}, cursor_slice=StreamSlice(partition={}, cursor_slice={})), + {}, + id="test_cursor_slice_nested_empty_and_partition_mapping", + ), + pytest.param( + StreamSlice(partition=StreamSlice(partition={"partition": "value"}, cursor_slice={}), cursor_slice={}), + {}, + id="test_cursor_slice_nested_empty_and_partition", + ), + ], ) def test_cursor_slice(stream_slice, expected_cursor_slice): cursor_slice = stream_slice.cursor_slice diff --git a/airbyte-cdk/python/unit_tests/sources/embedded/test_embedded_integration.py b/airbyte-cdk/python/unit_tests/sources/embedded/test_embedded_integration.py index d2bad84128e2..7560dc403ecd 100644 --- a/airbyte-cdk/python/unit_tests/sources/embedded/test_embedded_integration.py +++ b/airbyte-cdk/python/unit_tests/sources/embedded/test_embedded_integration.py @@ -6,9 +6,7 @@ from typing import Any, Mapping, Optional from unittest.mock import MagicMock -from airbyte_cdk.sources.embedded.base_integration import BaseEmbeddedIntegration -from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import ( +from airbyte_cdk.models import ( AirbyteCatalog, AirbyteLogMessage, AirbyteMessage, @@ -23,6 +21,8 @@ SyncMode, Type, ) +from airbyte_cdk.sources.embedded.base_integration import BaseEmbeddedIntegration +from airbyte_cdk.utils import AirbyteTracedException class TestIntegration(BaseEmbeddedIntegration): diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py b/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py index 5ce69276d974..c233bd7ac9e9 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/config/test_csv_format.py @@ -30,5 +30,5 @@ def test_given_from_csv_then_csv_has_header_row(self) -> None: class CsvDelimiterTest(unittest.TestCase): def test_tab_delimter(self): - assert CsvFormat(delimiter=r"\t").delimiter == '\t' + assert CsvFormat(delimiter=r"\t").delimiter == "\t" assert len(CsvFormat(delimiter=r"\t").delimiter) == 1 diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_avro_parser.py b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_avro_parser.py index 3882d823e196..a45d424b7a2b 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_avro_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_avro_parser.py @@ -222,7 +222,7 @@ def test_convert_primitive_avro_type_to_json(avro_format, avro_type, expected_js pytest.param(_default_avro_format, "float", 123.456, 123.456, id="test_float"), pytest.param(_default_avro_format, "double", 123.456, 123.456, id="test_double_default_config"), pytest.param(_double_as_string_avro_format, "double", 123.456, "123.456", id="test_double_as_string"), - pytest.param(_default_avro_format, "bytes", b"hello world", b"hello world", id="test_bytes"), + pytest.param(_default_avro_format, "bytes", b"hello world", "hello world", id="test_bytes"), pytest.param(_default_avro_format, "string", "hello world", "hello world", id="test_string"), pytest.param(_default_avro_format, {"logicalType": "decimal"}, 3.1415, "3.1415", id="test_decimal"), pytest.param(_default_avro_format, {"logicalType": "uuid"}, _uuid_value, str(_uuid_value), id="test_uuid"), diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_excel_parser.py b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_excel_parser.py index dbee93fd57a6..bd9d8338f094 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_excel_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_excel_parser.py @@ -47,12 +47,14 @@ def setup_parser(remote_file): parser = ExcelParser() # Sample data for the mock Excel file - data = pd.DataFrame({ - "column1": [1, 2, 3], - "column2": ["a", "b", "c"], - "column3": [True, False, True], - "column4": pd.to_datetime(["2021-01-01", "2022-01-01", "2023-01-01"]), - }) + data = pd.DataFrame( + { + "column1": [1, 2, 3], + "column2": ["a", "b", "c"], + "column3": [True, False, True], + "column4": pd.to_datetime(["2021-01-01", "2022-01-01", "2023-01-01"]), + } + ) # Convert the DataFrame to an Excel byte stream excel_bytes = BytesIO() diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py index 1fa2dcbf66fc..c4768facc7dd 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py @@ -238,7 +238,8 @@ def test_value_dictionary() -> None: pytest.param(pa.decimal256(2), _decimal_as_float_parquet_format, id="test_decimal256_as_float"), pytest.param(pa.map_(pa.int32(), pa.int32()), _default_parquet_format, id="test_map"), pytest.param(pa.null(), _default_parquet_format, id="test_null"), - ]) + ], +) def test_null_value_does_not_throw(parquet_type, parquet_format) -> None: pyarrow_value = pa.scalar(None, type=parquet_type) assert ParquetParser._to_output_value(pyarrow_value, parquet_format) is None diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/in_memory_files_source.py b/airbyte-cdk/python/unit_tests/sources/file_based/in_memory_files_source.py index a6de1b290c56..0a2681911211 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/in_memory_files_source.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/in_memory_files_source.py @@ -16,7 +16,7 @@ import pandas as pd import pyarrow as pa import pyarrow.parquet as pq -from airbyte_cdk.models import ConfiguredAirbyteCatalog +from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteCatalogSerializer from airbyte_cdk.sources.file_based.availability_strategy import AbstractFileBasedAvailabilityStrategy, DefaultFileBasedAvailabilityStrategy from airbyte_cdk.sources.file_based.config.abstract_file_based_spec import AbstractFileBasedSpec from airbyte_cdk.sources.file_based.discovery_policy import AbstractDiscoveryPolicy, DefaultDiscoveryPolicy @@ -53,7 +53,7 @@ def __init__( self.files = files self.file_type = file_type self.catalog = catalog - self.configured_catalog = ConfiguredAirbyteCatalog(streams=self.catalog["streams"]) if self.catalog else None + self.configured_catalog = ConfiguredAirbyteCatalogSerializer.load(self.catalog) if self.catalog else None self.config = config self.state = state @@ -224,8 +224,8 @@ def _make_file_contents(self, file_name: str) -> bytes: df = pd.DataFrame(contents) with io.BytesIO() as fp: - writer = pd.ExcelWriter(fp, engine='xlsxwriter') - df.to_excel(writer, index=False, sheet_name='Sheet1') + writer = pd.ExcelWriter(fp, engine="xlsxwriter") + df.to_excel(writer, index=False, sheet_name="Sheet1") writer._save() fp.seek(0) return fp.read() diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py index 0b662519f276..e5a7ee419452 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py @@ -2227,12 +2227,12 @@ .set_expected_records( [ { - "history": { - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-05T03:54:07.000000Z", - "d.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", + "history": { + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-05T03:54:07.000000Z", + "d.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", } ] ) diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py index 723550cc36d7..dc0a97bc1cf2 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py @@ -2,12 +2,11 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from airbyte_cdk.models import AirbyteAnalyticsTraceMessage +from airbyte_cdk.models import AirbyteAnalyticsTraceMessage, SyncMode from airbyte_cdk.sources.file_based.config.csv_format import CsvFormat from airbyte_cdk.sources.file_based.exceptions import ConfigValidationError, FileBasedSourceError from airbyte_cdk.test.catalog_builder import CatalogBuilder from airbyte_cdk.utils.traced_exception import AirbyteTracedException -from airbyte_protocol.models import SyncMode from unit_tests.sources.file_based.helpers import EmptySchemaParser, LowInferenceLimitDiscoveryPolicy from unit_tests.sources.file_based.in_memory_files_source import InMemoryFilesSource from unit_tests.sources.file_based.scenarios.file_based_source_builder import FileBasedSourceBuilder @@ -417,8 +416,8 @@ "properties": { "filetype": {"title": "Filetype", "default": "excel", "const": "excel", "type": "string"} }, - "required": ["filetype"] - } + "required": ["filetype"], + }, ], }, "schemaless": { @@ -432,7 +431,7 @@ "description": "The number of resent files which will be used to discover the schema for this stream.", "exclusiveMinimum": 0, "type": "integer", - } + }, }, "required": ["name", "format"], }, @@ -440,6 +439,8 @@ }, "required": ["streams"], }, + "supportsDBT": False, + "supportsNormalization": False, } ) .set_expected_catalog( @@ -505,7 +506,7 @@ "format": {"filetype": "csv"}, "globs": ["b.csv"], "validation_policy": "Emit Record", - } + }, ] } ) @@ -568,50 +569,52 @@ "source_defined_cursor": True, "supported_sync_modes": ["full_refresh", "incremental"], "is_resumable": True, - } + }, ] } ) - .set_expected_records([ - { - "data": { - "col1": "val11a", - "col2": "val12a", - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", - "_ab_source_file_url": "a.csv", + .set_expected_records( + [ + { + "data": { + "col1": "val11a", + "col2": "val12a", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "a.csv", + }, + "stream": "stream1", }, - "stream": "stream1", - }, - { - "data": { - "col1": "val21a", - "col2": "val22a", - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", - "_ab_source_file_url": "a.csv", + { + "data": { + "col1": "val21a", + "col2": "val22a", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "a.csv", + }, + "stream": "stream1", }, - "stream": "stream1", - }, - { - "data": { - "col1": "val11b", - "col2": "val12b", - "col3": "val13b", - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", - "_ab_source_file_url": "b.csv", + { + "data": { + "col1": "val11b", + "col2": "val12b", + "col3": "val13b", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "b.csv", + }, + "stream": "stream2", }, - "stream": "stream2", - }, - { - "data": { - "col1": "val21b", - "col2": "val22b", - "col3": "val23b", - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", - "_ab_source_file_url": "b.csv", + { + "data": { + "col1": "val21b", + "col2": "val22b", + "col3": "val23b", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "b.csv", + }, + "stream": "stream2", }, - "stream": "stream2", - }, - ]) + ] + ) .set_expected_analytics( [ AirbyteAnalyticsTraceMessage(type="file-cdk-csv-stream-count", value="2"), @@ -2094,7 +2097,6 @@ { "data": { "col1": "2", - "col2": None, "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "a.csv", }, @@ -2305,7 +2307,6 @@ { "data": { "col1": "2", - "col2": None, "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "a.csv", }, @@ -3174,7 +3175,6 @@ [ { "data": { - "col1": None, "col2": "na", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "a.csv", @@ -3316,11 +3316,7 @@ "start_date": "2023-06-10T03:54:07.000000Z", } ) - .set_source_builder( - FileBasedSourceBuilder() - .set_files({}) - .set_file_type("csv") - ) + .set_source_builder(FileBasedSourceBuilder().set_files({}).set_file_type("csv")) .set_expected_check_status("FAILED") .set_expected_catalog( { diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/excel_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/excel_scenarios.py index f92c8420099d..6653296535d5 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/excel_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/excel_scenarios.py @@ -22,7 +22,11 @@ "a.xlsx": { "contents": [ {"col_double": 20.02, "col_string": "Robbers", "col_album": "The 1975"}, - {"col_double": 20.23, "col_string": "Somebody Else", "col_album": "I Like It When You Sleep, for You Are So Beautiful yet So Unaware of It"}, + { + "col_double": 20.23, + "col_string": "Somebody Else", + "col_album": "I Like It When You Sleep, for You Are So Beautiful yet So Unaware of It", + }, ], "last_modified": "2023-06-05T03:54:07.000Z", }, @@ -60,14 +64,22 @@ {"col_title": "White Lies", "col_album": "IN_RETURN", "col_year": 2014, "col_vocals": True}, {"col_title": "Wide Awake", "col_album": "THE_LAST_GOODBYE", "col_year": 2022, "col_vocals": True}, ], - "last_modified": "2023-06-05T03:54:07.000Z" + "last_modified": "2023-06-05T03:54:07.000Z", }, "california_festivals.xlsx": { "contents": [ - {"col_name": "Lightning in a Bottle", "col_location": {"country": "USA", "state": "California", "city": "Buena Vista Lake"}, "col_attendance": 18000}, - {"col_name": "Outside Lands", "col_location": {"country": "USA", "state": "California", "city": "San Francisco"}, "col_attendance": 220000}, + { + "col_name": "Lightning in a Bottle", + "col_location": {"country": "USA", "state": "California", "city": "Buena Vista Lake"}, + "col_attendance": 18000, + }, + { + "col_name": "Outside Lands", + "col_location": {"country": "USA", "state": "California", "city": "San Francisco"}, + "col_attendance": 220000, + }, ], - "last_modified": "2023-06-06T03:54:07.000Z" + "last_modified": "2023-06-06T03:54:07.000Z", }, } @@ -257,7 +269,7 @@ "col_long": 1992, "col_float": 999.09723456, "col_string": "Love It If We Made It", - "col_date": "2022-05-29T00:00:00", + "col_date": "2022-05-29T00:00:00.000000", "col_time_millis": "06:00:00.456000", "col_time_micros": "12:00:00.456789", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", @@ -407,7 +419,7 @@ "type": "object", "properties": { "col_name": {"type": ["null", "string"]}, - "col_location": {"type": ["null", "string"]}, + "col_location": {"type": ["null", "string"]}, "col_attendance": {"type": ["null", "number"]}, "_ab_source_file_last_modified": {"type": "string"}, "_ab_source_file_url": {"type": "string"}, diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/file_based_source_builder.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/file_based_source_builder.py index f3d72ab67e7a..6675df380c7c 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/file_based_source_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/file_based_source_builder.py @@ -33,7 +33,9 @@ def __init__(self) -> None: self._config: Optional[Mapping[str, Any]] = None self._state: Optional[TState] = None - def build(self, configured_catalog: Optional[Mapping[str, Any]], config: Optional[Mapping[str, Any]], state: Optional[TState]) -> InMemoryFilesSource: + def build( + self, configured_catalog: Optional[Mapping[str, Any]], config: Optional[Mapping[str, Any]], state: Optional[TState] + ) -> InMemoryFilesSource: if self._file_type is None: raise ValueError("file_type is not set") return InMemoryFilesSource( diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/scenario_builder.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/scenario_builder.py index 25811a9e60ad..8158225ac8f4 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/scenario_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/scenario_builder.py @@ -6,10 +6,15 @@ from dataclasses import dataclass, field from typing import Any, Generic, List, Mapping, Optional, Set, Tuple, Type, TypeVar -from airbyte_cdk.models import AirbyteAnalyticsTraceMessage, AirbyteStateMessage, SyncMode +from airbyte_cdk.models import ( + AirbyteAnalyticsTraceMessage, + AirbyteStateMessageSerializer, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, + SyncMode, +) from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.source import TState -from airbyte_protocol.models import ConfiguredAirbyteCatalog @dataclass @@ -27,7 +32,9 @@ class SourceBuilder(ABC, Generic[SourceType]): """ @abstractmethod - def build(self, configured_catalog: Optional[Mapping[str, Any]], config: Optional[Mapping[str, Any]], state: Optional[TState]) -> SourceType: + def build( + self, configured_catalog: Optional[Mapping[str, Any]], config: Optional[Mapping[str, Any]], state: Optional[TState] + ) -> SourceType: raise NotImplementedError() @@ -78,7 +85,7 @@ def configured_catalog(self, sync_mode: SyncMode) -> Optional[Mapping[str, Any]] # exception to be raised as part of the actual check/discover/read commands # Note that to avoid a breaking change, we still attempt to automatically generate the catalog based on the streams if self.catalog: - return self.catalog.dict() # type: ignore # dict() is not typed + return ConfiguredAirbyteCatalogSerializer.dump(self.catalog) catalog: Mapping[str, Any] = {"streams": []} for stream in catalog["streams"]: @@ -90,7 +97,7 @@ def configured_catalog(self, sync_mode: SyncMode) -> Optional[Mapping[str, Any]] "supported_sync_modes": [sync_mode.value], }, "sync_mode": sync_mode.value, - "destination_sync_mode": "append" + "destination_sync_mode": "append", } ) @@ -192,7 +199,9 @@ def build(self) -> "TestScenario[SourceType]": if self.source_builder is None: raise ValueError("source_builder is not set") if self._incremental_scenario_config and self._incremental_scenario_config.input_state: - state = [AirbyteStateMessage.parse_obj(s) for s in self._incremental_scenario_config.input_state] + state = [ + AirbyteStateMessageSerializer.load(s) if isinstance(s, dict) else s for s in self._incremental_scenario_config.input_state + ] else: state = None source = self.source_builder.build( diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/unstructured_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/unstructured_scenarios.py index 4257da83e604..97c0c491510a 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/unstructured_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/unstructured_scenarios.py @@ -90,7 +90,6 @@ "content": "# Title 1\n\n## Title 2\n\n### Title 3\n\n#### Title 4\n\n##### Title 5\n\n###### Title 6\n\n", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "a.md", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -100,7 +99,6 @@ "content": "Just some text", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "b.md", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -110,7 +108,6 @@ "content": "Detected via mime type", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "c", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -173,7 +170,6 @@ "content": "Just some raw text", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "a.txt", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -183,7 +179,6 @@ "content": "Detected via mime type", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "b", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -290,7 +285,6 @@ { "data": { "document_key": "a.csv", - "content": None, "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "a.csv", "_ab_source_file_parse_error": "Error parsing record. This could be due to a mismatch between the config's file type and the actual file type, or because the file or record is not parseable. Contact Support if you need assistance.\nfilename=a.csv message=File type FileType.CSV is not supported. Supported file types are FileType.MD, FileType.PDF, FileType.DOCX, FileType.PPTX, FileType.TXT", @@ -358,7 +352,6 @@ "content": "A harmless markdown file", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "a.md", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -439,7 +432,6 @@ "content": "# Hello World", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "sample.pdf", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -449,7 +441,6 @@ "content": "# Content", "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z", "_ab_source_file_url": "sample.docx", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -459,7 +450,6 @@ "content": "# Title", "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z", "_ab_source_file_url": "sample.pptx", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -515,7 +505,6 @@ { "data": { "document_key": "sample.pdf", - "content": None, "_ab_source_file_parse_error": "Error parsing record. This could be due to a mismatch between the config's file type and the actual file type, or because the file or record is not parseable. Contact Support if you need assistance.\nfilename=sample.pdf message=No /Root object! - Is this really a PDF?", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "sample.pdf", @@ -587,7 +576,6 @@ "content": "# Hello World", "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", "_ab_source_file_url": "pdf_without_extension", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -597,7 +585,6 @@ "content": "# Content", "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z", "_ab_source_file_url": "docx_without_extension", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, @@ -607,7 +594,6 @@ "content": "# Title", "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z", "_ab_source_file_url": "pptx_without_extension", - "_ab_source_file_parse_error": None, }, "stream": "stream1", }, diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/user_input_schema_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/user_input_schema_scenarios.py index e83ab5345787..3c10e701c629 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/user_input_schema_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/user_input_schema_scenarios.py @@ -3,9 +3,9 @@ # +from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.file_based.exceptions import ConfigValidationError, FileBasedSourceError from airbyte_cdk.test.catalog_builder import CatalogBuilder -from airbyte_protocol.models import SyncMode from unit_tests.sources.file_based.scenarios.file_based_source_builder import FileBasedSourceBuilder from unit_tests.sources.file_based.scenarios.scenario_builder import TestScenarioBuilder diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py index f8122da702bb..96c907901a38 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py @@ -30,7 +30,7 @@ def _make_cursor(input_state: Optional[MutableMapping[str, Any]]) -> FileBasedCo None, input_state, MagicMock(), - ConnectorStateManager(state=[AirbyteStateMessage.parse_obj(input_state)] if input_state is not None else None), + ConnectorStateManager(state=[AirbyteStateMessage(input_state)] if input_state is not None else None), CursorField(FileBasedConcurrentCursor.CURSOR_FIELD), ) return cursor diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py index 119dab4bb6fb..9563fe0af9c1 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py @@ -11,14 +11,13 @@ from _pytest.capture import CaptureFixture from _pytest.reports import ExceptionInfo from airbyte_cdk.entrypoint import launch -from airbyte_cdk.models import AirbyteAnalyticsTraceMessage, SyncMode +from airbyte_cdk.models import AirbyteAnalyticsTraceMessage, AirbyteLogMessage, AirbyteMessage, ConfiguredAirbyteCatalogSerializer, SyncMode from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.file_based.stream.concurrent.cursor import AbstractConcurrentFileBasedCursor from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput from airbyte_cdk.test.entrypoint_wrapper import read as entrypoint_read from airbyte_cdk.utils import message_utils from airbyte_cdk.utils.traced_exception import AirbyteTracedException -from airbyte_protocol.models import AirbyteLogMessage, AirbyteMessage, ConfiguredAirbyteCatalog from unit_tests.sources.file_based.scenarios.scenario_builder import TestScenario @@ -112,10 +111,10 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac if hasattr(scenario.source, "cursor_cls") and issubclass(scenario.source.cursor_cls, AbstractConcurrentFileBasedCursor): # Only check the last state emitted because we don't know the order the others will be in. # This may be needed for non-file-based concurrent scenarios too. - assert states[-1].state.stream.stream_state.dict() == expected_states[-1] + assert {k: v for k, v in states[-1].state.stream.stream_state.__dict__.items()} == expected_states[-1] else: for actual, expected in zip(states, expected_states): # states should be emitted in sorted order - assert actual.state.stream.stream_state.dict() == expected + assert {k: v for k, v in actual.state.stream.stream_state.__dict__.items()} == expected if scenario.expected_logs: read_logs = scenario.expected_logs.get("read") @@ -138,8 +137,7 @@ def _verify_state_record_counts(records: List[AirbyteMessage], states: List[Airb for state_message in states: stream_descriptor = message_utils.get_stream_descriptor(state_message) state_record_count_sums[stream_descriptor] = ( - state_record_count_sums.get(stream_descriptor, 0) - + state_message.state.sourceStats.recordCount + state_record_count_sums.get(stream_descriptor, 0) + state_message.state.sourceStats.recordCount ) for stream, actual_count in actual_record_counts.items(): @@ -154,8 +152,8 @@ def _verify_state_record_counts(records: List[AirbyteMessage], states: List[Airb def _verify_analytics(analytics: List[AirbyteMessage], expected_analytics: Optional[List[AirbyteAnalyticsTraceMessage]]) -> None: if expected_analytics: assert len(analytics) == len( - expected_analytics), \ - f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})" + expected_analytics + ), f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})" for actual, expected in zip(analytics, expected_analytics): actual_type, actual_value = actual.trace.analytics.type, actual.trace.analytics.value expected_type = expected.type @@ -228,7 +226,7 @@ def read(scenario: TestScenario[AbstractSource]) -> EntrypointOutput: return entrypoint_read( scenario.source, scenario.config, - ConfiguredAirbyteCatalog.parse_obj(scenario.configured_catalog(SyncMode.full_refresh)), + ConfiguredAirbyteCatalogSerializer.load(scenario.configured_catalog(SyncMode.full_refresh)), ) @@ -236,7 +234,7 @@ def read_with_state(scenario: TestScenario[AbstractSource]) -> EntrypointOutput: return entrypoint_read( scenario.source, scenario.config, - ConfiguredAirbyteCatalog.parse_obj(scenario.configured_catalog(SyncMode.incremental)), + ConfiguredAirbyteCatalogSerializer.load(scenario.configured_catalog(SyncMode.incremental)), scenario.input_state(), ) diff --git a/airbyte-cdk/python/unit_tests/sources/message/test_repository.py b/airbyte-cdk/python/unit_tests/sources/message/test_repository.py index 95c8f96a154d..48778b657cb8 100644 --- a/airbyte-cdk/python/unit_tests/sources/message/test_repository.py +++ b/airbyte-cdk/python/unit_tests/sources/message/test_repository.py @@ -12,7 +12,6 @@ MessageRepository, NoopMessageRepository, ) -from pydantic.error_wrappers import ValidationError A_CONTROL = AirbyteControlMessage( type=OrchestratorType.CONNECTOR_CONFIG, @@ -90,14 +89,6 @@ def test_given_unknown_log_level_as_threshold_when_log_message_then_allow_messag repo.log_message(Level.DEBUG, lambda: {"message": "this is a log message"}) assert list(repo.consume_queue()) - def test_given_unknown_log_level_for_log_when_log_message_then_raise_error(self): - """ - Pydantic will fail if the log level is unknown but on our side, we should try to log at least - """ - repo = InMemoryMessageRepository(Level.ERROR) - with pytest.raises(ValidationError): - repo.log_message(UNKNOWN_LEVEL, lambda: {"message": "this is a log message"}) - class TestNoopMessageRepository: def test_given_message_emitted_when_consume_queue_then_return_empty(self): diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py index ac7aa179b635..ece5039ba465 100644 --- a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py @@ -9,12 +9,12 @@ import pendulum import requests +from airbyte_cdk.models import ConnectorSpecification, SyncMode from airbyte_cdk.sources import AbstractSource, Source from airbyte_cdk.sources.streams import CheckpointMixin, IncrementalMixin, Stream from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.availability_strategy import HttpAvailabilityStrategy -from airbyte_protocol.models import ConnectorSpecification, SyncMode from requests import HTTPError @@ -23,10 +23,12 @@ class FixtureAvailabilityStrategy(HttpAvailabilityStrategy): Inherit from HttpAvailabilityStrategy with slight modification to 403 error message. """ - def reasons_for_unavailable_status_codes(self, stream: Stream, logger: logging.Logger, source: Source, error: HTTPError) -> Dict[int, str]: + def reasons_for_unavailable_status_codes( + self, stream: Stream, logger: logging.Logger, source: Source, error: HTTPError + ) -> Dict[int, str]: reasons_for_codes: Dict[int, str] = { requests.codes.FORBIDDEN: "This is likely due to insufficient permissions for your Notion integration. " - "Please make sure your integration has read access for the resources you are trying to sync" + "Please make sure your integration has read access for the resources you are trying to sync" } return reasons_for_codes @@ -94,28 +96,16 @@ def get_json_schema(self) -> Mapping[str, Any]: "type": "object", "additionalProperties": True, "properties": { - "type": { - "type": "string" - }, - "id": { - "type": "string" - }, - "created_at": { - "type": "string", - "format": "date-time" - }, - "first_name": { - "type": "string" - }, - "last_name": { - "type": "string" - } - } + "type": {"type": "string"}, + "id": {"type": "string"}, + "created_at": {"type": "string", "format": "date-time"}, + "first_name": {"type": "string"}, + "last_name": {"type": "string"}, + }, } class Planets(IncrementalIntegrationStream): - def __init__(self, **kwargs): super().__init__(**kwargs) self._state: MutableMapping[str, Any] = {} @@ -129,20 +119,11 @@ def get_json_schema(self) -> Mapping[str, Any]: "type": "object", "additionalProperties": True, "properties": { - "type": { - "type": "string" - }, - "id": { - "type": "string" - }, - "created_at": { - "type": "string", - "format": "date-time" - }, - "name": { - "type": "string" - } - } + "type": {"type": "string"}, + "id": {"type": "string"}, + "created_at": {"type": "string", "format": "date-time"}, + "name": {"type": "string"}, + }, } def request_params( @@ -151,10 +132,7 @@ def request_params( stream_slice: Optional[Mapping[str, Any]] = None, next_page_token: Optional[Mapping[str, Any]] = None, ) -> MutableMapping[str, Any]: - return { - "start_date": stream_slice.get("start_date"), - "end_date": stream_slice.get("end_date") - } + return {"start_date": stream_slice.get("start_date"), "end_date": stream_slice.get("end_date")} def stream_slices( self, *, sync_mode: SyncMode, cursor_field: Optional[List[str]] = None, stream_state: Optional[Mapping[str, Any]] = None @@ -170,7 +148,10 @@ def stream_slices( while start_date < end_date: end_date_slice = min(start_date.add(days=7), end_date) - date_slice = {"start_date": start_date.strftime("%Y-%m-%dT%H:%M:%SZ"), "end_date": end_date_slice.strftime("%Y-%m-%dT%H:%M:%SZ")} + date_slice = { + "start_date": start_date.strftime("%Y-%m-%dT%H:%M:%SZ"), + "end_date": end_date_slice.strftime("%Y-%m-%dT%H:%M:%SZ"), + } date_slices.append(date_slice) start_date = end_date_slice @@ -195,20 +176,11 @@ def get_json_schema(self) -> Mapping[str, Any]: "type": "object", "additionalProperties": True, "properties": { - "type": { - "type": "string" - }, - "id": { - "type": "string" - }, - "created_at": { - "type": "string", - "format": "date-time" - }, - "quote": { - "type": "string" - } - } + "type": {"type": "string"}, + "id": {"type": "string"}, + "created_at": {"type": "string", "format": "date-time"}, + "quote": {"type": "string"}, + }, } def get_updated_state( @@ -221,11 +193,11 @@ def get_updated_state( return {} def read_records( - self, - sync_mode: SyncMode, - cursor_field: Optional[List[str]] = None, - stream_slice: Optional[Mapping[str, Any]] = None, - stream_state: Optional[Mapping[str, Any]] = None, + self, + sync_mode: SyncMode, + cursor_field: Optional[List[str]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + stream_state: Optional[Mapping[str, Any]] = None, ) -> Iterable[StreamData]: yield from super().read_records(sync_mode, cursor_field, stream_slice, stream_state) @@ -235,10 +207,7 @@ def request_params( stream_slice: Optional[Mapping[str, Any]] = None, next_page_token: Optional[Mapping[str, Any]] = None, ) -> MutableMapping[str, Any]: - return { - "start_date": stream_slice.get("start_date"), - "end_date": stream_slice.get("end_date") - } + return {"start_date": stream_slice.get("start_date"), "end_date": stream_slice.get("end_date")} def stream_slices( self, *, sync_mode: SyncMode, cursor_field: Optional[List[str]] = None, stream_state: Optional[Mapping[str, Any]] = None @@ -254,7 +223,10 @@ def stream_slices( while start_date < end_date: end_date_slice = min(start_date.add(days=7), end_date) - date_slice = {"start_date": start_date.strftime("%Y-%m-%dT%H:%M:%SZ"), "end_date": end_date_slice.strftime("%Y-%m-%dT%H:%M:%SZ")} + date_slice = { + "start_date": start_date.strftime("%Y-%m-%dT%H:%M:%SZ"), + "end_date": end_date_slice.strftime("%Y-%m-%dT%H:%M:%SZ"), + } date_slices.append(date_slice) start_date = end_date_slice @@ -272,20 +244,11 @@ def get_json_schema(self) -> Mapping[str, Any]: "type": "object", "additionalProperties": True, "properties": { - "type": { - "type": "string" - }, - "id": { - "type": "string" - }, - "created_at": { - "type": "string", - "format": "date-time" - }, - "divide_category": { - "type": "string" - } - } + "type": {"type": "string"}, + "id": {"type": "string"}, + "created_at": {"type": "string", "format": "date-time"}, + "divide_category": {"type": "string"}, + }, } def stream_slices( @@ -319,23 +282,12 @@ def get_json_schema(self) -> Mapping[str, Any]: "type": "object", "additionalProperties": True, "properties": { - "type": { - "type": "string" - }, - "id": { - "type": "string" - }, - "created_at": { - "type": "string", - "format": "date-time" - }, - "name": { - "type": "string" - }, - "album": { - "type": "string" - } - } + "type": {"type": "string"}, + "id": {"type": "string"}, + "created_at": {"type": "string", "format": "date-time"}, + "name": {"type": "string"}, + "album": {"type": "string"}, + }, } @property @@ -360,9 +312,7 @@ def request_params( stream_slice: Optional[Mapping[str, Any]] = None, next_page_token: Optional[Mapping[str, Any]] = None, ) -> MutableMapping[str, Any]: - return { - "page": next_page_token.get("page") - } + return {"page": next_page_token.get("page")} def read_records( self, @@ -433,7 +383,7 @@ def spec(self, logger: logging.Logger) -> ConnectorSpecification: "pattern_descriptor": "YYYY-MM-DDTHH:MM:SS.000Z", "examples": ["2020-11-16T00:00:00.000Z"], "type": "string", - "format": "date-time" + "format": "date-time", } } } diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_helpers/airbyte_message_assertions.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_helpers/airbyte_message_assertions.py index 52affbb6d76e..04b65594cf01 100644 --- a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_helpers/airbyte_message_assertions.py +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_helpers/airbyte_message_assertions.py @@ -5,13 +5,16 @@ from typing import List import pytest -from airbyte_cdk.models import AirbyteMessage, Type -from airbyte_protocol.models import AirbyteStreamStatus +from airbyte_cdk.models import AirbyteMessage, AirbyteStreamStatus, Type def emits_successful_sync_status_messages(status_messages: List[AirbyteStreamStatus]) -> bool: - return (len(status_messages) == 3 and status_messages[0] == AirbyteStreamStatus.STARTED - and status_messages[1] == AirbyteStreamStatus.RUNNING and status_messages[2] == AirbyteStreamStatus.COMPLETE) + return ( + len(status_messages) == 3 + and status_messages[0] == AirbyteStreamStatus.STARTED + and status_messages[1] == AirbyteStreamStatus.RUNNING + and status_messages[2] == AirbyteStreamStatus.COMPLETE + ) def validate_message_order(expected_message_order: List[Type], messages: List[AirbyteMessage]): @@ -20,4 +23,6 @@ def validate_message_order(expected_message_order: List[Type], messages: List[Ai for i, message in enumerate(messages): if message.type != expected_message_order[i]: - pytest.fail(f"At index {i} actual message type {message.type.name} did not match expected message type {expected_message_order[i].name}") + pytest.fail( + f"At index {i} actual message type {message.type.name} did not match expected message type {expected_message_order[i].name}" + ) diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py index 6e68db646675..c7fd2cef433e 100644 --- a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py @@ -114,16 +114,7 @@ def _create_justice_songs_request() -> RequestBuilder: return RequestBuilder.justice_songs_endpoint() -RESPONSE_TEMPLATE = { - "object": "list", - "has_more": False, - "data": [ - { - "id": "123", - "created_at": "2024-01-01T07:04:28.000Z" - } - ] -} +RESPONSE_TEMPLATE = {"object": "list", "has_more": False, "data": [{"id": "123", "created_at": "2024-01-01T07:04:28.000Z"}]} USER_TEMPLATE = { "object": "list", @@ -135,7 +126,7 @@ def _create_justice_songs_request() -> RequestBuilder: "first_name": "Paul", "last_name": "Atreides", } - ] + ], } PLANET_TEMPLATE = { @@ -147,7 +138,7 @@ def _create_justice_songs_request() -> RequestBuilder: "created_at": "2024-01-01T07:04:28.000Z", "name": "Giedi Prime", } - ] + ], } LEGACY_TEMPLATE = { @@ -159,7 +150,7 @@ def _create_justice_songs_request() -> RequestBuilder: "created_at": "2024-02-01T07:04:28.000Z", "quote": "What do you leave behind?", } - ] + ], } DIVIDER_TEMPLATE = { @@ -171,7 +162,7 @@ def _create_justice_songs_request() -> RequestBuilder: "created_at": "2024-02-01T07:04:28.000Z", "divide_category": "dukes", } - ] + ], } @@ -190,8 +181,8 @@ def _create_justice_songs_request() -> RequestBuilder: "created_at": "2024-02-01T07:04:28.000Z", "name": "dukes", "album": "", - } - ] + }, + ], } @@ -208,7 +199,7 @@ def _create_response(pagination_has_more: bool = False) -> HttpResponseBuilder: return create_response_builder( response_template=RESPONSE_TEMPLATE, records_path=FieldPath("data"), - pagination_strategy=FieldUpdatePaginationStrategy(FieldPath("has_more"), pagination_has_more) + pagination_strategy=FieldUpdatePaginationStrategy(FieldPath("has_more"), pagination_has_more), ) @@ -225,9 +216,7 @@ class FullRefreshStreamTest(TestCase): @HttpMocker() def test_full_refresh_sync(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} http_mocker.get( _create_users_request().build(), @@ -248,17 +237,15 @@ def test_full_refresh_sync(self, http_mocker): @HttpMocker() def test_substream_resumable_full_refresh_with_parent_slices(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} expected_first_substream_per_stream_state = [ - {'partition': {'divide_category': 'dukes'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}, + {"partition": {"divide_category": "dukes"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, ] expected_second_substream_per_stream_state = [ - {'partition': {'divide_category': 'dukes'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}, - {'partition': {'divide_category': 'mentats'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}, + {"partition": {"divide_category": "dukes"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"divide_category": "mentats"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, ] http_mocker.get( @@ -277,10 +264,16 @@ def test_substream_resumable_full_refresh_with_parent_slices(self, http_mocker): assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("dividers")) assert len(actual_messages.records) == 4 assert len(actual_messages.state_messages) == 2 - validate_message_order([Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) - assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(states=expected_first_substream_per_stream_state) + validate_message_order( + [Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages + ) + assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob( + states=expected_first_substream_per_stream_state + ) assert actual_messages.state_messages[0].state.sourceStats.recordCount == 2.0 - assert actual_messages.state_messages[1].state.stream.stream_state == AirbyteStateBlob(states=expected_second_substream_per_stream_state) + assert actual_messages.state_messages[1].state.stream.stream_state == AirbyteStateBlob( + states=expected_second_substream_per_stream_state + ) assert actual_messages.state_messages[1].state.sourceStats.recordCount == 2.0 @@ -289,20 +282,25 @@ class IncrementalStreamTest(TestCase): @HttpMocker() def test_incremental_sync(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_planets_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), - _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).build(), + _create_response() + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .build(), ) last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_planets_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), - _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_1)).with_record(record=_create_record("planets").with_cursor(last_record_date_1)).build(), + _create_response() + .with_record(record=_create_record("planets").with_cursor(last_record_date_1)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_1)) + .build(), ) source = SourceFixture() @@ -311,7 +309,10 @@ def test_incremental_sync(self, http_mocker): assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("planets")) assert len(actual_messages.records) == 5 assert len(actual_messages.state_messages) == 2 - validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + validate_message_order( + [Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], + actual_messages.records_and_state_messages, + ) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(created_at=last_record_date_0) assert actual_messages.state_messages[0].state.sourceStats.recordCount == 3.0 @@ -322,20 +323,25 @@ def test_incremental_sync(self, http_mocker): @HttpMocker() def test_incremental_running_as_full_refresh(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_planets_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), - _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).build(), + _create_response() + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .build(), ) last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_planets_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), - _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_1)).with_record(record=_create_record("planets").with_cursor(last_record_date_1)).build(), + _create_response() + .with_record(record=_create_record("planets").with_cursor(last_record_date_1)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_1)) + .build(), ) source = SourceFixture() @@ -344,7 +350,10 @@ def test_incremental_running_as_full_refresh(self, http_mocker): assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("planets")) assert len(actual_messages.records) == 5 assert len(actual_messages.state_messages) == 2 - validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + validate_message_order( + [Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], + actual_messages.records_and_state_messages, + ) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(created_at=last_record_date_0) @@ -356,20 +365,25 @@ def test_incremental_running_as_full_refresh(self, http_mocker): @HttpMocker() def test_legacy_incremental_sync(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_legacies_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), - _create_response().with_record(record=_create_record("legacies").with_cursor(last_record_date_0)).with_record(record=_create_record("legacies").with_cursor(last_record_date_0)).with_record(record=_create_record("legacies").with_cursor(last_record_date_0)).build(), + _create_response() + .with_record(record=_create_record("legacies").with_cursor(last_record_date_0)) + .with_record(record=_create_record("legacies").with_cursor(last_record_date_0)) + .with_record(record=_create_record("legacies").with_cursor(last_record_date_0)) + .build(), ) last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_legacies_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), - _create_response().with_record(record=_create_record("legacies").with_cursor(last_record_date_1)).with_record(record=_create_record("legacies").with_cursor(last_record_date_1)).build(), + _create_response() + .with_record(record=_create_record("legacies").with_cursor(last_record_date_1)) + .with_record(record=_create_record("legacies").with_cursor(last_record_date_1)) + .build(), ) source = SourceFixture() @@ -378,7 +392,10 @@ def test_legacy_incremental_sync(self, http_mocker): assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("legacies")) assert len(actual_messages.records) == 5 assert len(actual_messages.state_messages) == 2 - validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + validate_message_order( + [Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], + actual_messages.records_and_state_messages, + ) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "legacies" assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(created_at=last_record_date_0) assert actual_messages.state_messages[0].state.sourceStats.recordCount == 3.0 @@ -389,9 +406,7 @@ def test_legacy_incremental_sync(self, http_mocker): @HttpMocker() def test_legacy_no_records_retains_incoming_state(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( @@ -412,9 +427,7 @@ def test_legacy_no_records_retains_incoming_state(self, http_mocker): @HttpMocker() def test_legacy_no_slices_retains_incoming_state(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} last_record_date_1 = _NOW.strftime("%Y-%m-%dT%H:%M:%SZ") @@ -434,17 +447,15 @@ class MultipleStreamTest(TestCase): @HttpMocker() def test_incremental_and_full_refresh_streams(self, http_mocker): start_datetime = _NOW - timedelta(days=14) - config = { - "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") - } + config = {"start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")} expected_first_substream_per_stream_state = [ - {'partition': {'divide_category': 'dukes'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}, + {"partition": {"divide_category": "dukes"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, ] expected_second_substream_per_stream_state = [ - {'partition': {'divide_category': 'dukes'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}, - {'partition': {'divide_category': 'mentats'}, 'cursor': {'__ab_full_refresh_sync_complete': True}}, + {"partition": {"divide_category": "dukes"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"divide_category": "mentats"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, ] # Mocks for users full refresh stream @@ -457,13 +468,20 @@ def test_incremental_and_full_refresh_streams(self, http_mocker): last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_planets_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), - _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).build(), + _create_response() + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_0)) + .build(), ) last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") http_mocker.get( _create_planets_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), - _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_1)).with_record(record=_create_record("planets").with_cursor(last_record_date_1)).build(), + _create_response() + .with_record(record=_create_record("planets").with_cursor(last_record_date_1)) + .with_record(record=_create_record("planets").with_cursor(last_record_date_1)) + .build(), ) # Mocks for dividers full refresh stream @@ -478,7 +496,13 @@ def test_incremental_and_full_refresh_streams(self, http_mocker): ) source = SourceFixture() - actual_messages = read(source, config=config, catalog=_create_catalog([("users", SyncMode.full_refresh), ("planets", SyncMode.incremental), ("dividers", SyncMode.full_refresh)])) + actual_messages = read( + source, + config=config, + catalog=_create_catalog( + [("users", SyncMode.full_refresh), ("planets", SyncMode.incremental), ("dividers", SyncMode.full_refresh)] + ), + ) assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("users")) assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("planets")) @@ -486,24 +510,27 @@ def test_incremental_and_full_refresh_streams(self, http_mocker): assert len(actual_messages.records) == 11 assert len(actual_messages.state_messages) == 5 - validate_message_order([ - Type.RECORD, - Type.RECORD, - Type.STATE, - Type.RECORD, - Type.RECORD, - Type.RECORD, - Type.STATE, - Type.RECORD, - Type.RECORD, - Type.STATE, - Type.RECORD, - Type.RECORD, - Type.STATE, - Type.RECORD, - Type.RECORD, - Type.STATE - ], actual_messages.records_and_state_messages) + validate_message_order( + [ + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.STATE, + ], + actual_messages.records_and_state_messages, + ) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "users" assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(__ab_full_refresh_sync_complete=True) assert actual_messages.state_messages[0].state.sourceStats.recordCount == 2.0 @@ -514,8 +541,12 @@ def test_incremental_and_full_refresh_streams(self, http_mocker): assert actual_messages.state_messages[2].state.stream.stream_state == AirbyteStateBlob(created_at=last_record_date_1) assert actual_messages.state_messages[2].state.sourceStats.recordCount == 2.0 assert actual_messages.state_messages[3].state.stream.stream_descriptor.name == "dividers" - assert actual_messages.state_messages[3].state.stream.stream_state == AirbyteStateBlob(states=expected_first_substream_per_stream_state) + assert actual_messages.state_messages[3].state.stream.stream_state == AirbyteStateBlob( + states=expected_first_substream_per_stream_state + ) assert actual_messages.state_messages[3].state.sourceStats.recordCount == 2.0 assert actual_messages.state_messages[4].state.stream.stream_descriptor.name == "dividers" - assert actual_messages.state_messages[4].state.stream.stream_state == AirbyteStateBlob(states=expected_second_substream_per_stream_state) + assert actual_messages.state_messages[4].state.stream.stream_state == AirbyteStateBlob( + states=expected_second_substream_per_stream_state + ) assert actual_messages.state_messages[4].state.sourceStats.recordCount == 2.0 diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_resumable_full_refresh.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_resumable_full_refresh.py index bc5fe899f343..f5a9e8578ab9 100644 --- a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_resumable_full_refresh.py +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_resumable_full_refresh.py @@ -7,7 +7,7 @@ from unittest import TestCase import freezegun -from airbyte_cdk.models import AirbyteStateBlob, ConfiguredAirbyteCatalog, SyncMode, Type +from airbyte_cdk.models import AirbyteStateBlob, AirbyteStreamStatus, ConfiguredAirbyteCatalog, FailureType, SyncMode, Type from airbyte_cdk.test.catalog_builder import ConfiguredAirbyteStreamBuilder from airbyte_cdk.test.entrypoint_wrapper import read from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest @@ -20,7 +20,6 @@ create_response_builder, ) from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import AirbyteStreamStatus, FailureType from unit_tests.sources.mock_server_tests.mock_source_fixture import SourceFixture from unit_tests.sources.mock_server_tests.test_helpers import emits_successful_sync_status_messages, validate_message_order @@ -64,16 +63,7 @@ def _create_justice_songs_request() -> RequestBuilder: return RequestBuilder.justice_songs_endpoint() -RESPONSE_TEMPLATE = { - "object": "list", - "has_more": False, - "data": [ - { - "id": "123", - "created_at": "2024-01-01T07:04:28.000Z" - } - ] -} +RESPONSE_TEMPLATE = {"object": "list", "has_more": False, "data": [{"id": "123", "created_at": "2024-01-01T07:04:28.000Z"}]} JUSTICE_SONGS_TEMPLATE = { @@ -91,8 +81,8 @@ def _create_justice_songs_request() -> RequestBuilder: "created_at": "2024-02-01T07:04:28.000Z", "name": "dukes", "album": "", - } - ] + }, + ], } @@ -105,7 +95,7 @@ def _create_response(pagination_has_more: bool = False) -> HttpResponseBuilder: return create_response_builder( response_template=RESPONSE_TEMPLATE, records_path=FieldPath("data"), - pagination_strategy=FieldUpdatePaginationStrategy(FieldPath("has_more"), pagination_has_more) + pagination_strategy=FieldUpdatePaginationStrategy(FieldPath("has_more"), pagination_has_more), ) @@ -126,12 +116,20 @@ def test_resumable_full_refresh_sync(self, http_mocker): http_mocker.get( _create_justice_songs_request().build(), - _create_response(pagination_has_more=True).with_pagination().with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).build(), + _create_response(pagination_has_more=True) + .with_pagination() + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .build(), ) http_mocker.get( _create_justice_songs_request().with_page(1).build(), - _create_response(pagination_has_more=True).with_pagination().with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).build(), + _create_response(pagination_has_more=True) + .with_pagination() + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .build(), ) http_mocker.get( @@ -145,7 +143,10 @@ def test_resumable_full_refresh_sync(self, http_mocker): assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("justice_songs")) assert len(actual_messages.records) == 5 assert len(actual_messages.state_messages) == 4 - validate_message_order([Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.STATE, Type.STATE], actual_messages.records_and_state_messages) + validate_message_order( + [Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.STATE, Type.STATE], + actual_messages.records_and_state_messages, + ) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "justice_songs" assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(page=1) assert actual_messages.state_messages[0].state.sourceStats.recordCount == 2.0 @@ -167,17 +168,31 @@ def test_resumable_full_refresh_second_attempt(self, http_mocker): http_mocker.get( _create_justice_songs_request().with_page(100).build(), - _create_response(pagination_has_more=True).with_pagination().with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).build(), + _create_response(pagination_has_more=True) + .with_pagination() + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .build(), ) http_mocker.get( _create_justice_songs_request().with_page(101).build(), - _create_response(pagination_has_more=True).with_pagination().with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).build(), + _create_response(pagination_has_more=True) + .with_pagination() + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .build(), ) http_mocker.get( _create_justice_songs_request().with_page(102).build(), - _create_response(pagination_has_more=False).with_pagination().with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).build(), + _create_response(pagination_has_more=False) + .with_pagination() + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .build(), ) source = SourceFixture() @@ -186,7 +201,23 @@ def test_resumable_full_refresh_second_attempt(self, http_mocker): assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("justice_songs")) assert len(actual_messages.records) == 8 assert len(actual_messages.state_messages) == 4 - validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE, Type.STATE], actual_messages.records_and_state_messages) + validate_message_order( + [ + Type.RECORD, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.STATE, + ], + actual_messages.records_and_state_messages, + ) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "justice_songs" assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(page=101) assert actual_messages.state_messages[0].state.sourceStats.recordCount == 3.0 @@ -206,25 +237,37 @@ def test_resumable_full_refresh_failure(self, http_mocker): http_mocker.get( _create_justice_songs_request().build(), - _create_response(pagination_has_more=True).with_pagination().with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).build(), + _create_response(pagination_has_more=True) + .with_pagination() + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .build(), ) http_mocker.get( _create_justice_songs_request().with_page(1).build(), - _create_response(pagination_has_more=True).with_pagination().with_record(record=_create_record("justice_songs")).with_record(record=_create_record("justice_songs")).build(), + _create_response(pagination_has_more=True) + .with_pagination() + .with_record(record=_create_record("justice_songs")) + .with_record(record=_create_record("justice_songs")) + .build(), ) http_mocker.get(_create_justice_songs_request().with_page(2).build(), _create_response().with_status_code(status_code=400).build()) source = SourceFixture() - actual_messages = read(source, config=config, catalog=_create_catalog([("justice_songs", SyncMode.full_refresh, {})]), expecting_exception=True) + actual_messages = read( + source, config=config, catalog=_create_catalog([("justice_songs", SyncMode.full_refresh, {})]), expecting_exception=True + ) status_messages = actual_messages.get_stream_statuses("justice_songs") assert status_messages[-1] == AirbyteStreamStatus.INCOMPLETE assert len(actual_messages.records) == 4 assert len(actual_messages.state_messages) == 2 - validate_message_order([Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + validate_message_order( + [Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages + ) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "justice_songs" assert actual_messages.state_messages[0].state.stream.stream_state == AirbyteStateBlob(page=1) assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "justice_songs" diff --git a/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_checkpoint_reader.py b/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_checkpoint_reader.py index 2ccfaf33e8b6..01ddd363b0d3 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_checkpoint_reader.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_checkpoint_reader.py @@ -310,7 +310,12 @@ def test_legacy_cursor_based_checkpoint_reader_resumable_full_refresh(): {"parent_id": 400, "next_page_token": 2, "partition": {"parent_id": 400}, "cursor_slice": {"next_page_token": 2}}, {"parent_id": 400, "next_page_token": 3, "partition": {"parent_id": 400}, "cursor_slice": {"next_page_token": 3}}, {"parent_id": 400, "next_page_token": 4, "partition": {"parent_id": 400}, "cursor_slice": {"next_page_token": 4}}, - {"parent_id": 400, "__ab_full_refresh_sync_complete": True, "partition": {"parent_id": 400}, "cursor_slice": {"__ab_full_refresh_sync_complete": True}}, + { + "parent_id": 400, + "__ab_full_refresh_sync_complete": True, + "partition": {"parent_id": 400}, + "cursor_slice": {"__ab_full_refresh_sync_complete": True}, + }, ] mocked_state = [ diff --git a/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_substream_resumable_full_refresh_cursor.py b/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_substream_resumable_full_refresh_cursor.py index eb762ee08f33..4944518535f9 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_substream_resumable_full_refresh_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/checkpoint/test_substream_resumable_full_refresh_cursor.py @@ -14,22 +14,8 @@ def test_substream_resumable_full_refresh_cursor(): expected_ending_state = { "states": [ - { - "partition": { - "musician_id": "kousei_arima" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "musician_id": "kaori_miyazono" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - } + {"partition": {"musician_id": "kousei_arima"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"musician_id": "kaori_miyazono"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, ] } @@ -58,65 +44,18 @@ def test_substream_resumable_full_refresh_cursor_with_state(): """ initial_state = { "states": [ - { - "partition": { - "musician_id": "kousei_arima" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "musician_id": "kaori_miyazono" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "musician_id": "takeshi_aiza" - }, - "cursor": {} - } + {"partition": {"musician_id": "kousei_arima"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"musician_id": "kaori_miyazono"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"musician_id": "takeshi_aiza"}, "cursor": {}}, ] } expected_ending_state = { "states": [ - { - "partition": { - "musician_id": "kousei_arima" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "musician_id": "kaori_miyazono" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "musician_id": "takeshi_aiza" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - }, - { - "partition": { - "musician_id": "emi_igawa" - }, - "cursor": { - "__ab_full_refresh_sync_complete": True - } - } + {"partition": {"musician_id": "kousei_arima"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"musician_id": "kaori_miyazono"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"musician_id": "takeshi_aiza"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, + {"partition": {"musician_id": "emi_igawa"}, "cursor": {"__ab_full_refresh_sync_complete": True}}, ] } @@ -146,9 +85,7 @@ def test_substream_resumable_full_refresh_cursor_with_state(): def test_set_initial_state_invalid_incoming_state(): - bad_state = { - "next_page_token": 2 - } + bad_state = {"next_page_token": 2} cursor = SubstreamResumableFullRefreshCursor() with pytest.raises(AirbyteTracedException): diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py index e6c91686209b..090950aa14c7 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py @@ -6,7 +6,14 @@ import logging from typing import Any, List, Mapping, Optional, Tuple, Union -from airbyte_cdk.models import AirbyteStateMessage, ConfiguredAirbyteCatalog, ConnectorSpecification, DestinationSyncMode, SyncMode +from airbyte_cdk.models import ( + AirbyteStateMessage, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + ConnectorSpecification, + DestinationSyncMode, + SyncMode, +) from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource from airbyte_cdk.sources.concurrent_source.concurrent_source_adapter import ConcurrentSourceAdapter from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager @@ -17,7 +24,6 @@ from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import EpochValueConcurrentStreamStateConverter -from airbyte_protocol.models import ConfiguredAirbyteStream from unit_tests.sources.file_based.scenarios.scenario_builder import SourceBuilder from unit_tests.sources.streams.concurrent.scenarios.thread_based_concurrent_stream_source_builder import NeverLogSliceLogger @@ -46,7 +52,7 @@ def __init__( self._threadpool = threadpool_manager self._cursor_field = cursor_field self._cursor_boundaries = cursor_boundaries - self._state = [AirbyteStateMessage.parse_obj(s) for s in input_state] if input_state else None + self._state = [AirbyteStateMessage(s) for s in input_state] if input_state else None def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: return True, None @@ -74,10 +80,12 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: self._cursor_field, self._cursor_boundaries, None, - EpochValueConcurrentStreamStateConverter.get_end_provider() + EpochValueConcurrentStreamStateConverter.get_end_provider(), ) if self._cursor_field - else FinalStateCursor(stream_name=stream.name, stream_namespace=stream.namespace, message_repository=self.message_repository), + else FinalStateCursor( + stream_name=stream.name, stream_namespace=stream.namespace, message_repository=self.message_repository + ), ) for stream, state in zip(self._streams, stream_states) ] @@ -129,6 +137,8 @@ def set_input_state(self, state: List[Mapping[str, Any]]) -> "StreamFacadeSource self._input_state = state return self - def build(self, configured_catalog: Optional[Mapping[str, Any]], config: Optional[Mapping[str, Any]], state: Optional[TState]) -> StreamFacadeSource: + def build( + self, configured_catalog: Optional[Mapping[str, Any]], config: Optional[Mapping[str, Any]], state: Optional[TState] + ) -> StreamFacadeSource: threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=self._max_workers, thread_name_prefix="workerpool") return StreamFacadeSource(self._streams, threadpool, self._cursor_field, self._cursor_boundaries, state) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py index 43c198916a67..51d83084041e 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py @@ -5,7 +5,7 @@ import logging from typing import Any, Iterable, List, Mapping, Optional, Tuple, Union -from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConnectorSpecification, DestinationSyncMode, SyncMode +from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, DestinationSyncMode, SyncMode from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource from airbyte_cdk.sources.concurrent_source.concurrent_source_adapter import ConcurrentSourceAdapter from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository @@ -19,7 +19,6 @@ from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.utils.slice_logger import SliceLogger -from airbyte_protocol.models import ConfiguredAirbyteStream from unit_tests.sources.file_based.scenarios.scenario_builder import SourceBuilder @@ -49,7 +48,16 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> return True, None def streams(self, config: Mapping[str, Any]) -> List[Stream]: - return [StreamFacade(s, LegacyStream(), FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=self.message_repository), NeverLogSliceLogger(), s._logger) for s in self._streams] + return [ + StreamFacade( + s, + LegacyStream(), + FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=self.message_repository), + NeverLogSliceLogger(), + s._logger, + ) + for s in self._streams + ] def spec(self, *args: Any, **kwargs: Any) -> ConnectorSpecification: return ConnectorSpecification(connectionSpecification={}) @@ -58,7 +66,13 @@ def read_catalog(self, catalog_path: str) -> ConfiguredAirbyteCatalog: return ConfiguredAirbyteCatalog( streams=[ ConfiguredAirbyteStream( - stream=StreamFacade(s, LegacyStream(), FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository()), NeverLogSliceLogger(), s._logger).as_airbyte_stream(), + stream=StreamFacade( + s, + LegacyStream(), + FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository()), + NeverLogSliceLogger(), + s._logger, + ).as_airbyte_stream(), sync_mode=SyncMode.full_refresh, destination_sync_mode=DestinationSyncMode.overwrite, ) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py index 19a4cdb62627..31688999372d 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py @@ -566,7 +566,9 @@ def test_on_exception_return_trace_message_and_on_stream_complete_return_stream_ handler.is_done() @freezegun.freeze_time("2020-01-01T00:00:00") - def test_given_underlying_exception_is_traced_exception_on_exception_return_trace_message_and_on_stream_complete_return_stream_status(self): + def test_given_underlying_exception_is_traced_exception_on_exception_return_trace_message_and_on_stream_complete_return_stream_status( + self, + ): stream_instances_to_read_from = [self._stream, self._another_stream] handler = ConcurrentReadProcessor( diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py index b8fa8b2f79e0..3f511c7b51da 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_cursor.py @@ -102,10 +102,7 @@ def test_given_state_not_sequential_when_close_partition_then_emit_state(self) - self._state_manager.update_state_for_stream.assert_called_once_with( _A_STREAM_NAME, _A_STREAM_NAMESPACE, - { - "slices": [{"end": 0, "start": 0}, {"end": 30, "start": 12}], - "state_type": "date-range" - }, + {"slices": [{"end": 0, "start": 0}, {"end": 30, "start": 12}], "state_type": "date-range"}, ) def test_given_boundary_fields_when_close_partition_then_emit_updated_state(self) -> None: @@ -197,7 +194,7 @@ def test_given_one_slice_when_generate_slices_then_create_slice_from_slice_upper "state_type": ConcurrencyCompatibleStateType.date_range.value, "slices": [ {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, - ] + ], }, self._message_repository, self._state_manager, @@ -225,7 +222,7 @@ def test_given_start_after_slices_when_generate_slices_then_generate_from_start( "state_type": ConcurrencyCompatibleStateType.date_range.value, "slices": [ {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, - ] + ], }, self._message_repository, self._state_manager, @@ -254,7 +251,7 @@ def test_given_state_with_gap_and_start_after_slices_when_generate_slices_then_g "slices": [ {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 10}, {EpochValueConcurrentStreamStateConverter.START_KEY: 15, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, - ] + ], }, self._message_repository, self._state_manager, @@ -283,7 +280,7 @@ def test_given_small_slice_range_when_generate_slices_then_create_many_slices(se "state_type": ConcurrencyCompatibleStateType.date_range.value, "slices": [ {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, - ] + ], }, self._message_repository, self._state_manager, @@ -316,7 +313,7 @@ def test_given_difference_between_slices_match_slice_range_when_generate_slices_ "slices": [ {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 30}, {EpochValueConcurrentStreamStateConverter.START_KEY: 40, EpochValueConcurrentStreamStateConverter.END_KEY: 50}, - ] + ], }, self._message_repository, self._state_manager, @@ -346,7 +343,7 @@ def test_given_non_continuous_state_when_generate_slices_then_create_slices_betw {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 10}, {EpochValueConcurrentStreamStateConverter.START_KEY: 20, EpochValueConcurrentStreamStateConverter.END_KEY: 25}, {EpochValueConcurrentStreamStateConverter.START_KEY: 30, EpochValueConcurrentStreamStateConverter.END_KEY: 40}, - ] + ], }, self._message_repository, self._state_manager, @@ -378,7 +375,7 @@ def test_given_lookback_window_when_generate_slices_then_apply_lookback_on_most_ "slices": [ {EpochValueConcurrentStreamStateConverter.START_KEY: 0, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, {EpochValueConcurrentStreamStateConverter.START_KEY: 30, EpochValueConcurrentStreamStateConverter.END_KEY: 40}, - ] + ], }, self._message_repository, self._state_manager, @@ -407,7 +404,7 @@ def test_given_start_is_before_first_slice_lower_boundary_when_generate_slices_t "state_type": ConcurrencyCompatibleStateType.date_range.value, "slices": [ {EpochValueConcurrentStreamStateConverter.START_KEY: 10, EpochValueConcurrentStreamStateConverter.END_KEY: 20}, - ] + ], }, self._message_repository, self._state_manager, diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py index d11154e71297..da67ff82588d 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_partition_enqueuer.py @@ -68,7 +68,10 @@ def test_given_exception_when_generate_partitions_then_return_exception_and_sent self._partition_generator.generate_partitions(stream) queue_content = self._consume_queue() - assert queue_content == _SOME_PARTITIONS + [StreamThreadException(exception, _A_STREAM_NAME), PartitionGenerationCompletedSentinel(stream)] + assert queue_content == _SOME_PARTITIONS + [ + StreamThreadException(exception, _A_STREAM_NAME), + PartitionGenerationCompletedSentinel(stream), + ] def _partitions_before_raising(self, partitions: List[Partition], exception: Exception) -> Callable[[], Iterable[Partition]]: def inner_function() -> Iterable[Partition]: diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_default_backoff_strategy.py b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_default_backoff_strategy.py index de795a409b1a..67e7e3503c6c 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_default_backoff_strategy.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_default_backoff_strategy.py @@ -15,8 +15,9 @@ def test_given_no_arguments_default_backoff_strategy_returns_default_values(): class CustomBackoffStrategy(BackoffStrategy): - - def backoff_time(self, response_or_exception: Optional[Union[requests.Response, requests.RequestException]], attempt_count: int) -> Optional[float]: + def backoff_time( + self, response_or_exception: Optional[Union[requests.Response, requests.RequestException]], attempt_count: int + ) -> Optional[float]: return response_or_exception.headers["Retry-After"] diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_http_status_error_handler.py b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_http_status_error_handler.py index e56d97a4fa18..6da3e15b2a69 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_http_status_error_handler.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_http_status_error_handler.py @@ -26,10 +26,12 @@ def test_given_ok_response_http_status_error_handler_returns_success_action(mock "error, expected_action, expected_failure_type, expected_error_message", [ (403, ResponseAction.FAIL, FailureType.config_error, "Forbidden. You don't have permission to access this resource."), - (404, ResponseAction.FAIL, FailureType.system_error, "Not found. The requested resource was not found on the server.") - ] + (404, ResponseAction.FAIL, FailureType.system_error, "Not found. The requested resource was not found on the server."), + ], ) -def test_given_error_code_in_response_http_status_error_handler_returns_expected_actions(error, expected_action, expected_failure_type, expected_error_message): +def test_given_error_code_in_response_http_status_error_handler_returns_expected_actions( + error, expected_action, expected_failure_type, expected_error_message +): response = requests.Response() response.status_code = error error_resolution = HttpStatusErrorHandler(logger).interpret_response(response) @@ -98,14 +100,10 @@ def test_given_injected_error_mapping_returns_expected_action(): assert default_error_resolution.error_message == f"Unexpected HTTP Status Code in error handler: {mock_response.status_code}" mapped_error_resolution = ErrorResolution( - response_action=ResponseAction.IGNORE, - failure_type=FailureType.transient_error, - error_message="Injected mapping" - ) - - error_mapping = { - 509: mapped_error_resolution - } + response_action=ResponseAction.IGNORE, failure_type=FailureType.transient_error, error_message="Injected mapping" + ) + + error_mapping = {509: mapped_error_resolution} actual_error_resolution = HttpStatusErrorHandler(logger, error_mapping).interpret_response(mock_response) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_json_error_message_parser.py b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_json_error_message_parser.py index 81f838170341..2eff4bc3f05e 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_json_error_message_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_json_error_message_parser.py @@ -8,23 +8,21 @@ @pytest.mark.parametrize( - "response_body,expected_error_message", - [ - (b'{"message": "json error message"}', "json error message"), - (b'[{"message": "list error message"}]', "list error message"), - (b'[{"message": "list error message 1"}, {"message": "list error message 2"}]', "list error message 1, list error message 2"), - (b'{"error": "messages error message"}', "messages error message"), - (b'[{"errors": "list error message 1"}, {"errors": "list error message 2"}]', "list error message 1, list error message 2"), - (b'{"failures": "failures error message"}', "failures error message"), - (b'{"failure": "failure error message"}', "failure error message"), - (b'{"detail": "detail error message"}', "detail error message"), - (b'{"err": "err error message"}', "err error message"), - (b'{"error_message": "error_message error message"}', "error_message error message"), - (b'{"msg": "msg error message"}', "msg error message"), - (b'{"reason": "reason error message"}', "reason error message"), - (b'{"status_message": "status_message error message"}', "status_message error message"), - ] - + "response_body,expected_error_message", + [ + (b'{"message": "json error message"}', "json error message"), + (b'[{"message": "list error message"}]', "list error message"), + (b'[{"message": "list error message 1"}, {"message": "list error message 2"}]', "list error message 1, list error message 2"), + (b'{"error": "messages error message"}', "messages error message"), + (b'[{"errors": "list error message 1"}, {"errors": "list error message 2"}]', "list error message 1, list error message 2"), + (b'{"failures": "failures error message"}', "failures error message"), + (b'{"failure": "failure error message"}', "failure error message"), + (b'{"detail": "detail error message"}', "detail error message"), + (b'{"err": "err error message"}', "err error message"), + (b'{"error_message": "error_message error message"}', "error_message error message"), + (b'{"msg": "msg error message"}', "msg error message"), + (b'{"reason": "reason error message"}', "reason error message"), + (b'{"status_message": "status_message error message"}', "status_message error message"),], ) def test_given_error_message_in_response_body_parse_response_error_message_returns_error_message(response_body, expected_error_message): response = requests.Response() @@ -35,6 +33,6 @@ def test_given_error_message_in_response_body_parse_response_error_message_retur def test_given_invalid_json_body_parse_response_error_message_returns_none(): response = requests.Response() - response._content = b'invalid json body' + response._content = b"invalid json body" error_message = JsonErrorMessageParser().parse_response_error_message(response) assert error_message is None diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_response_models.py b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_response_models.py index 62cde8d86690..a19d3c8d5fe0 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_response_models.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/error_handlers/test_response_models.py @@ -4,16 +4,15 @@ import requests import requests_mock +from airbyte_cdk.models import FailureType from airbyte_cdk.sources.streams.http.error_handlers.response_models import ResponseAction, create_fallback_error_resolution from airbyte_cdk.utils.airbyte_secrets_utils import update_secrets -from airbyte_protocol.models import FailureType _A_SECRET = "a-secret" _A_URL = "https://a-url.com" class DefaultErrorResolutionTest(TestCase): - def setUp(self) -> None: update_secrets([_A_SECRET]) @@ -26,7 +25,10 @@ def test_given_none_when_create_fallback_error_resolution_then_return_error_reso assert error_resolution.failure_type == FailureType.system_error assert error_resolution.response_action == ResponseAction.RETRY - assert error_resolution.error_message == "Error handler did not receive a valid response or exception. This is unexpected please contact Airbyte Support" + assert ( + error_resolution.error_message + == "Error handler did not receive a valid response or exception. This is unexpected please contact Airbyte Support" + ) def test_given_exception_when_create_fallback_error_resolution_then_return_error_resolution(self) -> None: exception = ValueError("This is an exception") diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py b/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py index 1300ad8e94df..42975d8ed5a9 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/test_availability_strategy.py @@ -83,7 +83,7 @@ def read_records(self, *args, **kvargs): http_stream = MockListHttpStream() response = requests.Response() response.status_code = status_code - response.raw = io.BytesIO(json.dumps(json_contents).encode('utf-8')) + response.raw = io.BytesIO(json.dumps(json_contents).encode("utf-8")) mocker.patch.object(requests.Session, "send", return_value=response) actual_is_available, reason = HttpAvailabilityStrategy().check_availability(http_stream, logger) @@ -104,7 +104,9 @@ def test_http_availability_raises_unhandled_error(mocker): req.status_code = 404 mocker.patch.object(requests.Session, "send", return_value=req) - assert (False, 'Not found. The requested resource was not found on the server.') == HttpAvailabilityStrategy().check_availability(http_stream, logger) + assert (False, "Not found. The requested resource was not found on the server.") == HttpAvailabilityStrategy().check_availability( + http_stream, logger + ) def test_send_handles_retries_when_checking_availability(mocker, caplog): @@ -120,7 +122,7 @@ def test_send_handles_retries_when_checking_availability(mocker, caplog): mock_send = mocker.patch.object(requests.Session, "send", side_effect=[req_1, req_2, req_3]) with caplog.at_level(logging.INFO): - stream_is_available, _ = HttpAvailabilityStrategy().check_availability(stream=http_stream,logger=logger) + stream_is_available, _ = HttpAvailabilityStrategy().check_availability(stream=http_stream, logger=logger) assert stream_is_available assert mock_send.call_count == 3 diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py index 27c7c7414e36..8737289a780f 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http.py @@ -490,15 +490,13 @@ def should_retry(self, *args, **kwargs): [ (300, True, True, ResponseAction.RETRY), (200, False, True, ResponseAction.SUCCESS), - (503, False,True, ResponseAction.FAIL), - (503,False,False, ResponseAction.IGNORE) - ] + (503, False, True, ResponseAction.FAIL), + (503, False, False, ResponseAction.IGNORE), + ], ) -def test_http_stream_adapter_http_status_error_handler_should_retry_false_raise_on_http_errors(mocker, - response_status_code: int, - should_retry: bool, - raise_on_http_errors: bool, - expected_response_action: ResponseAction): +def test_http_stream_adapter_http_status_error_handler_should_retry_false_raise_on_http_errors( + mocker, response_status_code: int, should_retry: bool, raise_on_http_errors: bool, expected_response_action: ResponseAction +): stream = AutoFailTrueHttpStream() mocker.patch.object(stream, "should_retry", return_value=should_retry) mocker.patch.object(stream, "raise_on_http_errors", raise_on_http_errors) @@ -664,9 +662,19 @@ def test_duplicate_request_params_are_deduped(deduplicate_query_params, path, pa if expected_url is None: with pytest.raises(ValueError): - stream._http_client._create_prepared_request(http_method=stream.http_method, url=stream._join_url(stream.url_base, path), params=params, dedupe_query_params=deduplicate_query_params) + stream._http_client._create_prepared_request( + http_method=stream.http_method, + url=stream._join_url(stream.url_base, path), + params=params, + dedupe_query_params=deduplicate_query_params, + ) else: - prepared_request = stream._http_client._create_prepared_request(http_method=stream.http_method, url=stream._join_url(stream.url_base, path), params=params, dedupe_query_params=deduplicate_query_params) + prepared_request = stream._http_client._create_prepared_request( + http_method=stream.http_method, + url=stream._join_url(stream.url_base, path), + params=params, + dedupe_query_params=deduplicate_query_params, + ) assert prepared_request.url == expected_url @@ -689,8 +697,13 @@ def __init__(self, records: List[Mapping[str, Any]]): def url_base(self) -> str: return "https://airbyte.io/api/v1" - def path(self, *, stream_state: Optional[Mapping[str, Any]] = None, stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None) -> str: + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: return "/stub" def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: @@ -709,12 +722,12 @@ def _read_single_page( self.state = {"__ab_full_refresh_sync_complete": True} def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, ) -> Iterable[Mapping[str, Any]]: return [] @@ -736,8 +749,13 @@ def __init__(self, record_pages: List[List[Mapping[str, Any]]]): def url_base(self) -> str: return "https://airbyte.io/api/v1" - def path(self, *, stream_state: Optional[Mapping[str, Any]] = None, stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None) -> str: + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: return "/stub" def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: @@ -759,12 +777,12 @@ def read_records( self.state = {"__ab_full_refresh_sync_complete": True} def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, ) -> Iterable[Mapping[str, Any]]: return [] @@ -779,8 +797,13 @@ class StubHttpSubstream(HttpSubStream): def url_base(self) -> str: return "https://airbyte.io/api/v1" - def path(self, *, stream_state: Optional[Mapping[str, Any]] = None, stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None) -> str: + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: return "/stub" def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: @@ -800,12 +823,12 @@ def _read_pages( ] def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, ) -> Iterable[Mapping[str, Any]]: return [] @@ -841,7 +864,7 @@ def test_substream_with_resumable_full_refresh_parent(): [ {"id": "page_3_abc"}, {"id": "page_3_def"}, - ] + ], ] expected_slices = [ @@ -987,10 +1010,7 @@ def test_resumable_full_refresh_read_from_state(mocker): mocker.patch.object(stream, method, wraps=getattr(stream, method)) checkpoint_reader = stream._get_checkpoint_reader( - cursor_field=[], - logger=logging.getLogger("airbyte"), - sync_mode=SyncMode.full_refresh, - stream_state={"page": 3} + cursor_field=[], logger=logging.getLogger("airbyte"), sync_mode=SyncMode.full_refresh, stream_state={"page": 3} ) next_stream_slice = checkpoint_reader.next() records = [] @@ -1036,10 +1056,7 @@ def test_resumable_full_refresh_legacy_stream_slice(mocker): mocker.patch.object(stream, method, wraps=getattr(stream, method)) checkpoint_reader = stream._get_checkpoint_reader( - cursor_field=[], - logger=logging.getLogger("airbyte"), - sync_mode=SyncMode.full_refresh, - stream_state={"page": 2} + cursor_field=[], logger=logging.getLogger("airbyte"), sync_mode=SyncMode.full_refresh, stream_state={"page": 2} ) next_stream_slice = checkpoint_reader.next() records = [] @@ -1082,8 +1099,13 @@ def __init__(self, parent: HttpStream, partition_id_to_child_records: Mapping[st def url_base(self) -> str: return "https://airbyte.io/api/v1" - def path(self, *, stream_state: Optional[Mapping[str, Any]] = None, stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None) -> str: + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: return f"/parents/{stream_slice.get('parent_id')}/children" def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: @@ -1113,12 +1135,12 @@ def _fetch_next_page( return requests.PreparedRequest(), requests.Response() def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, ) -> Iterable[Mapping[str, Any]]: partition_id = stream_slice.get("parent").get("parent_id") if partition_id in self._partition_id_to_child_records: @@ -1141,14 +1163,21 @@ def test_substream_resumable_full_refresh_read_from_start(mocker): {"parent_id": "100", "name": "christopher_nolan"}, {"parent_id": "101", "name": "celine_song"}, {"parent_id": "102", "name": "david_fincher"}, - ] parent_stream = StubParentHttpStream(records=parent_records) parents_to_children_records = { - "100": [{"id": "a200", "parent_id": "100", "film": "interstellar"}, {"id": "a201", "parent_id": "100", "film": "oppenheimer"}, {"id": "a202", "parent_id": "100", "film": "inception"}], + "100": [ + {"id": "a200", "parent_id": "100", "film": "interstellar"}, + {"id": "a201", "parent_id": "100", "film": "oppenheimer"}, + {"id": "a202", "parent_id": "100", "film": "inception"}, + ], "101": [{"id": "b200", "parent_id": "101", "film": "past_lives"}, {"id": "b201", "parent_id": "101", "film": "materialists"}], - "102": [{"id": "c200", "parent_id": "102", "film": "the_social_network"}, {"id": "c201", "parent_id": "102", "film": "gone_girl"}, {"id": "c202", "parent_id": "102", "film": "the_curious_case_of_benjamin_button"}], + "102": [ + {"id": "c200", "parent_id": "102", "film": "the_social_network"}, + {"id": "c201", "parent_id": "102", "film": "gone_girl"}, + {"id": "c202", "parent_id": "102", "film": "the_curious_case_of_benjamin_button"}, + ], } stream = StubSubstreamResumableFullRefreshStream(parent=parent_stream, partition_id_to_child_records=parents_to_children_records) @@ -1168,61 +1197,31 @@ def test_substream_resumable_full_refresh_read_from_start(mocker): { "states": [ { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "christopher_nolan", "parent_id": "100"} - } + "cursor": {"__ab_full_refresh_sync_complete": True}, + "partition": {"parent": {"name": "christopher_nolan", "parent_id": "100"}}, } ] }, { "states": [ { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "christopher_nolan", "parent_id": "100"} - } + "cursor": {"__ab_full_refresh_sync_complete": True}, + "partition": {"parent": {"name": "christopher_nolan", "parent_id": "100"}}, }, - { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "celine_song", "parent_id": "101"} - } - } + {"cursor": {"__ab_full_refresh_sync_complete": True}, "partition": {"parent": {"name": "celine_song", "parent_id": "101"}}}, ] }, { "states": [ { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "christopher_nolan", "parent_id": "100"} - } + "cursor": {"__ab_full_refresh_sync_complete": True}, + "partition": {"parent": {"name": "christopher_nolan", "parent_id": "100"}}, }, + {"cursor": {"__ab_full_refresh_sync_complete": True}, "partition": {"parent": {"name": "celine_song", "parent_id": "101"}}}, { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "celine_song", "parent_id": "101"} - } + "cursor": {"__ab_full_refresh_sync_complete": True}, + "partition": {"parent": {"name": "david_fincher", "parent_id": "102"}}, }, - { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "david_fincher", "parent_id": "102"} - } - } ] }, ] @@ -1239,46 +1238,14 @@ def test_substream_resumable_full_refresh_read_from_start(mocker): assert getattr(stream, "_read_pages").call_count == 3 expected = [ - { - "film": "interstellar", - "id": "a200", - "parent_id": "100" - }, - { - "film": "oppenheimer", - "id": "a201", - "parent_id": "100" - }, - { - "film": "inception", - "id": "a202", - "parent_id": "100" - }, - { - "film": "past_lives", - "id": "b200", - "parent_id": "101" - }, - { - "film": "materialists", - "id": "b201", - "parent_id": "101" - }, - { - "film": "the_social_network", - "id": "c200", - "parent_id": "102" - }, - { - "film": "gone_girl", - "id": "c201", - "parent_id": "102" - }, - { - "film": "the_curious_case_of_benjamin_button", - "id": "c202", - "parent_id": "102" - } + {"film": "interstellar", "id": "a200", "parent_id": "100"}, + {"film": "oppenheimer", "id": "a201", "parent_id": "100"}, + {"film": "inception", "id": "a202", "parent_id": "100"}, + {"film": "past_lives", "id": "b200", "parent_id": "101"}, + {"film": "materialists", "id": "b201", "parent_id": "101"}, + {"film": "the_social_network", "id": "c200", "parent_id": "102"}, + {"film": "gone_girl", "id": "c201", "parent_id": "102"}, + {"film": "the_curious_case_of_benjamin_button", "id": "c202", "parent_id": "102"}, ] assert records == expected @@ -1294,13 +1261,15 @@ def test_substream_resumable_full_refresh_read_from_state(mocker): parent_records = [ {"parent_id": "100", "name": "christopher_nolan"}, {"parent_id": "101", "name": "celine_song"}, - ] parent_stream = StubParentHttpStream(records=parent_records) parents_to_children_records = { - "100": [{"id": "a200", "parent_id": "100", "film": "interstellar"}, {"id": "a201", "parent_id": "100", "film": "oppenheimer"}, - {"id": "a202", "parent_id": "100", "film": "inception"}], + "100": [ + {"id": "a200", "parent_id": "100", "film": "interstellar"}, + {"id": "a201", "parent_id": "100", "film": "oppenheimer"}, + {"id": "a202", "parent_id": "100", "film": "inception"}, + ], "101": [{"id": "b200", "parent_id": "101", "film": "past_lives"}, {"id": "b201", "parent_id": "101", "film": "materialists"}], } stream = StubSubstreamResumableFullRefreshStream(parent=parent_stream, partition_id_to_child_records=parents_to_children_records) @@ -1318,15 +1287,11 @@ def test_substream_resumable_full_refresh_read_from_state(mocker): stream_state={ "states": [ { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "christopher_nolan", "parent_id": "100"} - } + "cursor": {"__ab_full_refresh_sync_complete": True}, + "partition": {"parent": {"name": "christopher_nolan", "parent_id": "100"}}, }, ] - } + }, ) next_stream_slice = checkpoint_reader.next() records = [] @@ -1335,21 +1300,10 @@ def test_substream_resumable_full_refresh_read_from_state(mocker): { "states": [ { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "christopher_nolan", "parent_id": "100"} - } + "cursor": {"__ab_full_refresh_sync_complete": True}, + "partition": {"parent": {"name": "christopher_nolan", "parent_id": "100"}}, }, - { - "cursor": { - "__ab_full_refresh_sync_complete": True - }, - "partition": { - "parent": {"name": "celine_song", "parent_id": "101"} - } - } + {"cursor": {"__ab_full_refresh_sync_complete": True}, "partition": {"parent": {"name": "celine_song", "parent_id": "101"}}}, ] }, ] @@ -1366,16 +1320,8 @@ def test_substream_resumable_full_refresh_read_from_state(mocker): assert getattr(stream, "_read_pages").call_count == 1 expected = [ - { - "film": "past_lives", - "id": "b200", - "parent_id": "101" - }, - { - "film": "materialists", - "id": "b201", - "parent_id": "101" - }, + {"film": "past_lives", "id": "b200", "parent_id": "101"}, + {"film": "materialists", "id": "b201", "parent_id": "101"}, ] assert records == expected @@ -1398,8 +1344,13 @@ def cursor_field(self) -> Union[str, List[str]]: pytest.param([], False, ResumableFullRefreshCursor(), id="test_stream_supports_resumable_full_refresh_cursor"), pytest.param(["updated_at"], False, None, id="test_incremental_stream_does_not_use_cursor"), pytest.param(["updated_at"], True, None, id="test_incremental_substream_does_not_use_cursor"), - pytest.param([], True, SubstreamResumableFullRefreshCursor(), id="test_full_refresh_substream_automatically_applies_substream_resumable_full_refresh_cursor"), - ] + pytest.param( + [], + True, + SubstreamResumableFullRefreshCursor(), + id="test_full_refresh_substream_automatically_applies_substream_resumable_full_refresh_cursor", + ), + ], ) def test_get_cursor(cursor_field, is_substream, expected_cursor): stream = StubWithCursorFields(set_cursor_field=cursor_field, has_multiple_slices=is_substream) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http_client.py b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http_client.py index efb4a9f1f4bd..f7b0a11f69f4 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/test_http_client.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/test_http_client.py @@ -48,19 +48,23 @@ def test_request_session_returns_valid_session(use_cache, expected_session): True, "https://test_base_url.com/v1/endpoint?param1=value1", {}, - "https://test_base_url.com/v1/endpoint?param1=value1", id="test_params_only_in_path" + "https://test_base_url.com/v1/endpoint?param1=value1", + id="test_params_only_in_path", ), pytest.param( True, "https://test_base_url.com/v1/endpoint", {"param1": "value1"}, - "https://test_base_url.com/v1/endpoint?param1=value1", id="test_params_only_in_path" + "https://test_base_url.com/v1/endpoint?param1=value1", + id="test_params_only_in_path", ), pytest.param( True, "https://test_base_url.com/v1/endpoint", None, - "https://test_base_url.com/v1/endpoint", id="test_params_is_none_and_no_params_in_path"), + "https://test_base_url.com/v1/endpoint", + id="test_params_is_none_and_no_params_in_path", + ), pytest.param( True, "https://test_base_url.com/v1/endpoint?param1=value1", @@ -119,7 +123,9 @@ def test_duplicate_request_params_are_deduped(deduplicate_query_params, url, par with pytest.raises(ValueError): http_client._create_prepared_request(http_method="get", url=url, dedupe_query_params=deduplicate_query_params, params=params) else: - prepared_request = http_client._create_prepared_request(http_method="get", url=url, dedupe_query_params=deduplicate_query_params, params=params) + prepared_request = http_client._create_prepared_request( + http_method="get", url=url, dedupe_query_params=deduplicate_query_params, params=params + ) assert prepared_request.url == expected_url @@ -127,7 +133,9 @@ def test_create_prepared_response_given_given_both_json_and_data_raises_request_ http_client = test_http_client() with pytest.raises(RequestBodyException): - http_client._create_prepared_request(http_method="get", url="https://test_base_url.com/v1/endpoint", json={"test": "json"}, data={"test": "data"}) + http_client._create_prepared_request( + http_method="get", url="https://test_base_url.com/v1/endpoint", json={"test": "json"}, data={"test": "data"} + ) @pytest.mark.parametrize( @@ -139,7 +147,9 @@ def test_create_prepared_response_given_given_both_json_and_data_raises_request_ ) def test_create_prepared_response_given_either_json_or_data_returns_valid_request(json, data): http_client = test_http_client() - prepared_request = http_client._create_prepared_request(http_method="get", url="https://test_base_url.com/v1/endpoint", json=json, data=data) + prepared_request = http_client._create_prepared_request( + http_method="get", url="https://test_base_url.com/v1/endpoint", json=json, data=data + ) assert prepared_request assert isinstance(prepared_request, requests.PreparedRequest) @@ -155,7 +165,9 @@ def test_valid_basic_send_request(mocker): mocked_response.status_code = 200 mocked_response.headers = {} mocker.patch.object(requests.Session, "send", return_value=mocked_response) - returned_request, returned_response = http_client.send_request(http_method="get", url="https://test_base_url.com/v1/endpoint", request_kwargs={}) + returned_request, returned_response = http_client.send_request( + http_method="get", url="https://test_base_url.com/v1/endpoint", request_kwargs={} + ) assert isinstance(returned_request, requests.PreparedRequest) assert returned_response == mocked_response @@ -166,8 +178,10 @@ def test_send_raises_airbyte_traced_exception_with_fail_response_action(): http_client = HttpClient( name="test", logger=MagicMock(), - error_handler=HttpStatusErrorHandler(logger=MagicMock(), error_mapping={400: ErrorResolution(ResponseAction.FAIL, FailureType.system_error, "test error message")}), - session=mocked_session + error_handler=HttpStatusErrorHandler( + logger=MagicMock(), error_mapping={400: ErrorResolution(ResponseAction.FAIL, FailureType.system_error, "test error message")} + ), + session=mocked_session, ) prepared_request = requests.PreparedRequest() mocked_response = requests.Response() @@ -190,8 +204,10 @@ def test_send_ignores_with_ignore_reponse_action_and_returns_response(): http_client = HttpClient( name="test", logger=mocked_logger, - error_handler=HttpStatusErrorHandler(logger=MagicMock(), error_mapping={300: ErrorResolution(ResponseAction.IGNORE, FailureType.system_error, "test ignore message")}), - session=mocked_session + error_handler=HttpStatusErrorHandler( + logger=MagicMock(), error_mapping={300: ErrorResolution(ResponseAction.IGNORE, FailureType.system_error, "test ignore message")} + ), + session=mocked_session, ) prepared_request = http_client._create_prepared_request(http_method="get", url="https://test_base_url.com/v1/endpoint") @@ -204,7 +220,6 @@ def test_send_ignores_with_ignore_reponse_action_and_returns_response(): class CustomBackoffStrategy(BackoffStrategy): - def __init__(self, backoff_time_value: float) -> None: self._backoff_time_value = backoff_time_value @@ -212,19 +227,15 @@ def backoff_time(self, *args, **kwargs) -> float: return self._backoff_time_value -@pytest.mark.parametrize( - "backoff_time_value, exception_type", - [ - (0.1, UserDefinedBackoffException), - (None, DefaultBackoffException) - ] -) +@pytest.mark.parametrize("backoff_time_value, exception_type", [(0.1, UserDefinedBackoffException), (None, DefaultBackoffException)]) def test_raises_backoff_exception_with_retry_response_action(mocker, backoff_time_value, exception_type): http_client = HttpClient( name="test", logger=MagicMock(), - error_handler=HttpStatusErrorHandler(logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.FAIL, FailureType.system_error, "test retry message")}), - backoff_strategy=CustomBackoffStrategy(backoff_time_value=backoff_time_value) + error_handler=HttpStatusErrorHandler( + logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.FAIL, FailureType.system_error, "test retry message")} + ), + backoff_strategy=CustomBackoffStrategy(backoff_time_value=backoff_time_value), ) prepared_request = http_client._create_prepared_request(http_method="get", url="https://test_base_url.com/v1/endpoint") mocked_response = MagicMock(spec=requests.Response) @@ -233,25 +244,25 @@ def test_raises_backoff_exception_with_retry_response_action(mocker, backoff_tim http_client._logger.info = MagicMock() mocker.patch.object(requests.Session, "send", return_value=mocked_response) - mocker.patch.object(http_client._error_handler, "interpret_response", return_value=ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")) + mocker.patch.object( + http_client._error_handler, + "interpret_response", + return_value=ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message"), + ) with pytest.raises(exception_type): http_client._send(prepared_request, {}) -@pytest.mark.parametrize( - "backoff_time_value, exception_type", - [ - (0.1, UserDefinedBackoffException), - (None, DefaultBackoffException) - ] -) +@pytest.mark.parametrize("backoff_time_value, exception_type", [(0.1, UserDefinedBackoffException), (None, DefaultBackoffException)]) def test_raises_backoff_exception_with_response_with_unmapped_error(mocker, backoff_time_value, exception_type): http_client = HttpClient( name="test", logger=MagicMock(), - error_handler=HttpStatusErrorHandler(logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.FAIL, FailureType.system_error, "test retry message")}), - backoff_strategy=CustomBackoffStrategy(backoff_time_value=backoff_time_value) + error_handler=HttpStatusErrorHandler( + logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.FAIL, FailureType.system_error, "test retry message")} + ), + backoff_strategy=CustomBackoffStrategy(backoff_time_value=backoff_time_value), ) prepared_request = requests.PreparedRequest() mocked_response = MagicMock(spec=requests.Response) @@ -289,8 +300,10 @@ def update_response(*args, **kwargs): http_client = HttpClient( name="test", logger=MagicMock(), - error_handler=HttpStatusErrorHandler(logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")}), - session=mocked_session + error_handler=HttpStatusErrorHandler( + logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")} + ), + session=mocked_session, ) prepared_request = requests.PreparedRequest() @@ -302,15 +315,15 @@ def update_response(*args, **kwargs): def test_session_request_exception_raises_backoff_exception(): - error_handler = HttpStatusErrorHandler(logger=MagicMock(), error_mapping={requests.exceptions.RequestException: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")}) - mocked_session = MagicMock(spec=requests.Session) - mocked_session.send.side_effect = requests.RequestException - http_client = HttpClient( - name="test", + error_handler = HttpStatusErrorHandler( logger=MagicMock(), - error_handler=error_handler, - session=mocked_session + error_mapping={ + requests.exceptions.RequestException: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message") + }, ) + mocked_session = MagicMock(spec=requests.Session) + mocked_session.send.side_effect = requests.RequestException + http_client = HttpClient(name="test", logger=MagicMock(), error_handler=error_handler, session=mocked_session) prepared_request = requests.PreparedRequest() with pytest.raises(DefaultBackoffException): @@ -347,12 +360,7 @@ def test_send_handles_response_action_given_session_send_raises_request_exceptio mocked_session = MagicMock(spec=requests.Session) mocked_session.send.side_effect = requests.RequestException - http_client = HttpClient( - name="test", - logger=MagicMock(), - error_handler=custom_error_handler, - session=mocked_session - ) + http_client = HttpClient(name="test", logger=MagicMock(), error_handler=custom_error_handler, session=mocked_session) prepared_request = requests.PreparedRequest() with pytest.raises(AirbyteTracedException) as e: @@ -383,8 +391,10 @@ def update_response(*args, **kwargs): http_client = HttpClient( name="test", logger=MagicMock(), - error_handler=HttpStatusErrorHandler(logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")}), - session=mocked_session + error_handler=HttpStatusErrorHandler( + logger=MagicMock(), error_mapping={408: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")} + ), + session=mocked_session, ) prepared_request = requests.PreparedRequest() @@ -400,7 +410,13 @@ class BackoffStrategy: def backoff_time(self, *args, **kwargs): return 0.001 - http_client = HttpClient(name="test", logger=MagicMock(), error_handler=HttpStatusErrorHandler(logger=MagicMock()), backoff_strategy=BackoffStrategy(), disable_retries=True) + http_client = HttpClient( + name="test", + logger=MagicMock(), + error_handler=HttpStatusErrorHandler(logger=MagicMock()), + backoff_strategy=BackoffStrategy(), + disable_retries=True, + ) mocked_response = MagicMock(spec=requests.Response) mocked_response.status_code = 429 @@ -421,7 +437,9 @@ class BackoffStrategy: def backoff_time(self, *args, **kwargs): return 0.001 - http_client = HttpClient(name="test", logger=MagicMock(), error_handler=HttpStatusErrorHandler(logger=MagicMock()), backoff_strategy=BackoffStrategy()) + http_client = HttpClient( + name="test", logger=MagicMock(), error_handler=HttpStatusErrorHandler(logger=MagicMock()), backoff_strategy=BackoffStrategy() + ) mocked_response = MagicMock(spec=requests.Response) mocked_response.status_code = 429 @@ -444,7 +462,12 @@ def backoff_time(self, *args, **kwargs): retries = 3 - http_client = HttpClient(name="test", logger=MagicMock(), error_handler=HttpStatusErrorHandler(logger=MagicMock(), max_retries=retries), backoff_strategy=BackoffStrategy()) + http_client = HttpClient( + name="test", + logger=MagicMock(), + error_handler=HttpStatusErrorHandler(logger=MagicMock(), max_retries=retries), + backoff_strategy=BackoffStrategy(), + ) mocked_response = MagicMock(spec=requests.Response) mocked_response.status_code = 429 @@ -461,7 +484,12 @@ def backoff_time(self, *args, **kwargs): @pytest.mark.usefixtures("mock_sleep") def test_backoff_strategy_max_time(): - error_handler = HttpStatusErrorHandler(logger=MagicMock(), error_mapping={requests.RequestException: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")}, max_retries=10, max_time=timedelta(seconds=2)) + error_handler = HttpStatusErrorHandler( + logger=MagicMock(), + error_mapping={requests.RequestException: ErrorResolution(ResponseAction.RETRY, FailureType.system_error, "test retry message")}, + max_retries=10, + max_time=timedelta(seconds=2), + ) class BackoffStrategy: def backoff_time(self, *args, **kwargs): @@ -488,7 +516,9 @@ class BackoffStrategy: def backoff_time(self, *args, **kwargs): return 0.001 - http_client = HttpClient(name="test", logger=MagicMock(), error_handler=HttpStatusErrorHandler(logger=MagicMock()), backoff_strategy=BackoffStrategy()) + http_client = HttpClient( + name="test", logger=MagicMock(), error_handler=HttpStatusErrorHandler(logger=MagicMock()), backoff_strategy=BackoffStrategy() + ) mocked_response = MagicMock(spec=requests.Response) mocked_response.status_code = 429 @@ -505,7 +535,9 @@ def backoff_time(self, *args, **kwargs): assert len(trace_messages) == mocked_send.call_count -@pytest.mark.parametrize("exit_on_rate_limit, expected_call_count, expected_error",[[True, 6, DefaultBackoffException] ,[False, 38, OverflowError]]) +@pytest.mark.parametrize( + "exit_on_rate_limit, expected_call_count, expected_error", [[True, 6, DefaultBackoffException], [False, 38, OverflowError]] +) @pytest.mark.usefixtures("mock_sleep") def test_backoff_strategy_endless(exit_on_rate_limit, expected_call_count, expected_error): http_client = HttpClient(name="test", logger=MagicMock(), error_handler=HttpStatusErrorHandler(logger=MagicMock())) @@ -519,5 +551,7 @@ def test_backoff_strategy_endless(exit_on_rate_limit, expected_call_count, expec with patch.object(requests.Session, "send", return_value=mocked_response) as mocked_send: with pytest.raises(expected_error): - http_client.send_request(http_method="get", url="https://test_base_url.com/v1/endpoint", request_kwargs={}, exit_on_rate_limit=exit_on_rate_limit) + http_client.send_request( + http_method="get", url="https://test_base_url.com/v1/endpoint", request_kwargs={}, exit_on_rate_limit=exit_on_rate_limit + ) assert mocked_send.call_count == expected_call_count diff --git a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py index b40f93ed0327..9f6f943e0840 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py @@ -470,7 +470,9 @@ def test_configured_json_schema(): }, } - configured_stream, internal_config, logger, slice_logger, message_repository, state_manager = setup_stream_dependencies(current_json_schema) + configured_stream, internal_config, logger, slice_logger, message_repository, state_manager = setup_stream_dependencies( + current_json_schema + ) records = [ {"id": 1, "partition": 1}, {"id": 2, "partition": 1}, @@ -506,7 +508,9 @@ def test_configured_json_schema_with_invalid_properties(): del stream_schema["properties"][old_user_insights] del stream_schema["properties"][old_feature_info] - configured_stream, internal_config, logger, slice_logger, message_repository, state_manager = setup_stream_dependencies(configured_json_schema) + configured_stream, internal_config, logger, slice_logger, message_repository, state_manager = setup_stream_dependencies( + configured_json_schema + ) records = [ {"id": 1, "partition": 1}, {"id": 2, "partition": 1}, @@ -521,7 +525,9 @@ def test_configured_json_schema_with_invalid_properties(): assert old_user_insights not in configured_json_schema_properties assert old_feature_info not in configured_json_schema_properties for stream_schema_property in stream_schema["properties"]: - assert stream_schema_property in configured_json_schema_properties, f"Stream schema property: {stream_schema_property} missing in configured schema" + assert ( + stream_schema_property in configured_json_schema_properties + ), f"Stream schema property: {stream_schema_property} missing in configured schema" assert stream_schema["properties"][stream_schema_property] == configured_json_schema_properties[stream_schema_property] diff --git a/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py b/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py index 019705d1cd75..9f356b5c80bb 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py @@ -46,6 +46,7 @@ class StreamStubIncremental(Stream, CheckpointMixin): """ Stub full incremental class to assist with testing. """ + _state = {} def read_records( @@ -74,6 +75,7 @@ class StreamStubResumableFullRefresh(Stream, CheckpointMixin): """ Stub full incremental class to assist with testing. """ + _state = {} def read_records( @@ -100,6 +102,7 @@ class StreamStubLegacyStateInterface(Stream): """ Stub full incremental class to assist with testing. """ + _state = {} def read_records( @@ -154,17 +157,22 @@ def url_base(self) -> str: def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: pass - def path(self, *, stream_state: Optional[Mapping[str, Any]] = None, stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None) -> str: + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: return "/stub" def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, ) -> Iterable[Mapping[str, Any]]: return [] @@ -203,17 +211,22 @@ def stream_slices( def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: pass - def path(self, *, stream_state: Optional[Mapping[str, Any]] = None, stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None) -> str: + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: return "/stub" def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, ) -> Iterable[Mapping[str, Any]]: return [] @@ -234,17 +247,22 @@ def read_records( def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: return None - def path(self, *, stream_state: Optional[Mapping[str, Any]] = None, stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None) -> str: + def path( + self, + *, + stream_state: Optional[Mapping[str, Any]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> str: return "/parent" def parse_response( - self, - response: requests.Response, - *, - stream_state: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]] = None, - next_page_token: Optional[Mapping[str, Any]] = None + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, ) -> Iterable[Mapping[str, Any]]: return [] @@ -374,11 +392,25 @@ def test_get_json_schema_is_cached(mocked_method): [ pytest.param(StreamStubIncremental(), {}, IncrementalCheckpointReader, id="test_incremental_checkpoint_reader"), pytest.param(StreamStubFullRefresh(), {}, FullRefreshCheckpointReader, id="test_full_refresh_checkpoint_reader"), - pytest.param(StreamStubResumableFullRefresh(), {}, ResumableFullRefreshCheckpointReader, id="test_resumable_full_refresh_checkpoint_reader"), - pytest.param(StreamStubLegacyStateInterface(), {}, IncrementalCheckpointReader, id="test_incremental_checkpoint_reader_with_legacy_state"), - pytest.param(CursorBasedStreamStubFullRefresh(), {"next_page_token": 10}, CursorBasedCheckpointReader, id="test_checkpoint_reader_using_rfr_cursor"), - pytest.param(LegacyCursorBasedStreamStubFullRefresh(), {}, LegacyCursorBasedCheckpointReader, id="test_full_refresh_checkpoint_reader_for_legacy_slice_format"), - ] + pytest.param( + StreamStubResumableFullRefresh(), {}, ResumableFullRefreshCheckpointReader, id="test_resumable_full_refresh_checkpoint_reader" + ), + pytest.param( + StreamStubLegacyStateInterface(), {}, IncrementalCheckpointReader, id="test_incremental_checkpoint_reader_with_legacy_state" + ), + pytest.param( + CursorBasedStreamStubFullRefresh(), + {"next_page_token": 10}, + CursorBasedCheckpointReader, + id="test_checkpoint_reader_using_rfr_cursor", + ), + pytest.param( + LegacyCursorBasedStreamStubFullRefresh(), + {}, + LegacyCursorBasedCheckpointReader, + id="test_full_refresh_checkpoint_reader_for_legacy_slice_format", + ), + ], ) def test_get_checkpoint_reader(stream: Stream, stream_state, expected_checkpoint_reader_type): checkpoint_reader = stream._get_checkpoint_reader( diff --git a/airbyte-cdk/python/unit_tests/sources/streams/utils/test_stream_helper.py b/airbyte-cdk/python/unit_tests/sources/streams/utils/test_stream_helper.py index 8cf1996853fd..da76a78714d7 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/utils/test_stream_helper.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/utils/test_stream_helper.py @@ -11,8 +11,7 @@ def __init__(self, records, exit_on_rate_limit=True): self.records = records self._exit_on_rate_limit = exit_on_rate_limit type(self).exit_on_rate_limit = property( - lambda self: self._get_exit_on_rate_limit(), - lambda self, value: self._set_exit_on_rate_limit(value) + lambda self: self._get_exit_on_rate_limit(), lambda self, value: self._set_exit_on_rate_limit(value) ) def _get_exit_on_rate_limit(self): @@ -31,7 +30,7 @@ def read_records(self, sync_mode, stream_slice): ([{"id": 1}], None, True, {"id": 1}, False), # Single record, with setter ([{"id": 1}, {"id": 2}], None, True, {"id": 1}, False), # Multiple records, with setter ([], None, True, None, True), # No records, with setter - ] + ], ) def test_get_first_record_for_slice(records, stream_slice, exit_on_rate_limit, expected_result, raises_exception): stream = MockStream(records, exit_on_rate_limit) diff --git a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py index af6a8b0a5f03..9de46b9e116f 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py @@ -198,7 +198,6 @@ def __init__(self, inputs_and_mocked_outputs: List[Tuple[Mapping[str, Any], Iter class MockStreamWithState(MockStreamWithCursor): - def __init__(self, inputs_and_mocked_outputs: List[Tuple[Mapping[str, Any], Iterable[Mapping[str, Any]]]], name: str, state=None): super().__init__(inputs_and_mocked_outputs, name) self._state = state @@ -422,7 +421,7 @@ def _as_state(stream_name: str = "", per_stream_state: Dict[str, Any] = None): state=AirbyteStateMessage( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob.parse_obj(per_stream_state) + stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob(per_stream_state) ), ), ) @@ -606,9 +605,7 @@ def test_with_state_attribute(self, mocker): input_state = [ AirbyteStateMessage( type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="s1"), stream_state=AirbyteStateBlob.parse_obj(old_state) - ), + stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name="s1"), stream_state=AirbyteStateBlob(old_state)), ), ] new_state_from_connector = {"cursor": "new_value"} @@ -860,13 +857,7 @@ def test_with_slices(self, mocker): assert messages == expected - @pytest.mark.parametrize( - "slices", - [ - pytest.param([], id="test_slices_as_list"), - pytest.param(iter([]), id="test_slices_as_iterator") - ] - ) + @pytest.mark.parametrize("slices", [pytest.param([], id="test_slices_as_list"), pytest.param(iter([]), id="test_slices_as_iterator")]) def test_no_slices(self, mocker, slices): """ Tests that an incremental read returns at least one state messages even if no records were read: @@ -876,15 +867,11 @@ def test_no_slices(self, mocker, slices): input_state = [ AirbyteStateMessage( type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="s1"), stream_state=AirbyteStateBlob.parse_obj(state) - ), + stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name="s1"), stream_state=AirbyteStateBlob(state)), ), AirbyteStateMessage( type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="s2"), stream_state=AirbyteStateBlob.parse_obj(state) - ), + stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name="s2"), stream_state=AirbyteStateBlob(state)), ), ] @@ -1185,15 +1172,12 @@ def test_without_state_attribute_for_stream_with_desc_records(self, mocker): AirbyteStateMessage( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob.parse_obj(initial_state) + stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob(initial_state) ), ), ] stream_with_cursor = MockStreamWithCursor( - [ - ( - {"sync_mode": SyncMode.incremental, "stream_slice": {}, "stream_state": initial_state}, stream_output) - ], + [({"sync_mode": SyncMode.incremental, "stream_slice": {}, "stream_state": initial_state}, stream_output)], name=stream_name, ) @@ -1201,6 +1185,7 @@ def mock_get_updated_state(current_stream, current_stream_state, latest_record): state_cursor_value = current_stream_state.get(current_stream.cursor_field, 0) latest_record_value = latest_record.get(current_stream.cursor_field) return {current_stream.cursor_field: max(latest_record_value, state_cursor_value)} + mocker.patch.object(MockStreamWithCursor, "get_updated_state", mock_get_updated_state) mocker.patch.object(MockStreamWithCursor, "get_json_schema", return_value={}) src = MockSource(streams=[stream_with_cursor]) @@ -1306,7 +1291,7 @@ def test_resumable_full_refresh_with_incoming_state(self, mocker): type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="s1"), - stream_state=AirbyteStateBlob.parse_obj({"page": 10}), + stream_state=AirbyteStateBlob({"page": 10}), ), ) ] @@ -1433,16 +1418,16 @@ def test_resumable_full_refresh_skip_prior_successful_streams(self, mocker): type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="s1"), - stream_state=AirbyteStateBlob.parse_obj({"__ab_full_refresh_sync_complete": True}), + stream_state=AirbyteStateBlob({"__ab_full_refresh_sync_complete": True}), ), ), AirbyteStateMessage( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="s2"), - stream_state=AirbyteStateBlob.parse_obj({"page": 10}), + stream_state=AirbyteStateBlob({"page": 10}), ), - ) + ), ] src = MockSource(streams=[s1, s2]) @@ -1713,8 +1698,10 @@ def test_read_nonexistent_stream_emit_incomplete_stream_status(mocker, remove_st expected = _fix_emitted_at([as_stream_status("this_stream_doesnt_exist_in_the_source", AirbyteStreamStatus.INCOMPLETE)]) - expected_error_message = "The stream 'this_stream_doesnt_exist_in_the_source' in your connection configuration was not found in the " \ - "source. Refresh the schema in your replication settings and remove this stream from future sync attempts." + expected_error_message = ( + "The stream 'this_stream_doesnt_exist_in_the_source' in your connection configuration was not found in the " + "source. Refresh the schema in your replication settings and remove this stream from future sync attempts." + ) with pytest.raises(AirbyteTracedException) as exc_info: messages = [remove_stack_trace(message) for message in src.read(logger, {}, catalog)] diff --git a/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py b/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py index bcef13b9783c..1a5526b105d5 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py +++ b/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py @@ -6,7 +6,15 @@ from typing import List import pytest -from airbyte_cdk.models import AirbyteMessage, AirbyteStateBlob, AirbyteStateMessage, AirbyteStateType, AirbyteStreamState, StreamDescriptor +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateMessageSerializer, + AirbyteStateType, + AirbyteStreamState, + StreamDescriptor, +) from airbyte_cdk.models import Type as MessageType from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager, HashableStreamDescriptor @@ -17,24 +25,24 @@ pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "actors", "namespace": "public"}, "stream_state": {"id": "mando_michael"}}, }, { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "actresses", "namespace": "public"}, "stream_state": {"id": "seehorn_rhea"}}, }, ], { - HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob.parse_obj({"id": "mando_michael"}), - HashableStreamDescriptor(name="actresses", namespace="public"): AirbyteStateBlob.parse_obj({"id": "seehorn_rhea"}), + HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob({"id": "mando_michael"}), + HashableStreamDescriptor(name="actresses", namespace="public"): AirbyteStateBlob({"id": "seehorn_rhea"}), }, does_not_raise(), id="test_incoming_per_stream_state", ), pytest.param([], {}, does_not_raise(), id="test_incoming_empty_stream_state"), pytest.param( - [{"type": AirbyteStateType.STREAM, "stream": {"stream_descriptor": {"name": "actresses", "namespace": "public"}}}], + [{"type": "STREAM", "stream": {"stream_descriptor": {"name": "actresses", "namespace": "public"}}}], {HashableStreamDescriptor(name="actresses", namespace="public"): None}, does_not_raise(), id="test_stream_states_that_have_none_state_blob", @@ -42,25 +50,25 @@ pytest.param( [ { - "type": AirbyteStateType.GLOBAL, + "type": "GLOBAL", "global": { "shared_state": {"television": "better_call_saul"}, "stream_states": [ { - "stream_descriptor": StreamDescriptor(name="actors", namespace="public"), - "stream_state": AirbyteStateBlob.parse_obj({"id": "mando_michael"}), + "stream_descriptor": {"name": "actors", "namespace": "public"}, + "stream_state": {"id": "mando_michael"}, }, { - "stream_descriptor": StreamDescriptor(name="actresses", namespace="public"), - "stream_state": AirbyteStateBlob.parse_obj({"id": "seehorn_rhea"}), + "stream_descriptor": {"name": "actresses", "namespace": "public"}, + "stream_state": {"id": "seehorn_rhea"}, }, ], }, }, ], { - HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob.parse_obj({"id": "mando_michael"}), - HashableStreamDescriptor(name="actresses", namespace="public"): AirbyteStateBlob.parse_obj({"id": "seehorn_rhea"}), + HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob({"id": "mando_michael"}), + HashableStreamDescriptor(name="actresses", namespace="public"): AirbyteStateBlob({"id": "seehorn_rhea"}), }, pytest.raises(ValueError), id="test_incoming_global_state_with_shared_state_throws_error", @@ -68,7 +76,7 @@ pytest.param( [ { - "type": AirbyteStateType.GLOBAL, + "type": "GLOBAL", "global": { "stream_states": [ {"stream_descriptor": {"name": "actors", "namespace": "public"}, "stream_state": {"id": "mando_michael"}}, @@ -77,7 +85,7 @@ }, ], { - HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob.parse_obj({"id": "mando_michael"}), + HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob({"id": "mando_michael"}), }, does_not_raise(), id="test_incoming_global_state_without_shared", @@ -85,20 +93,20 @@ pytest.param( [ { - "type": AirbyteStateType.GLOBAL, + "type": "GLOBAL", "global": { "shared_state": None, "stream_states": [ { - "stream_descriptor": StreamDescriptor(name="actors", namespace="public"), - "stream_state": AirbyteStateBlob.parse_obj({"id": "mando_michael"}), + "stream_descriptor": {"name": "actors", "namespace": "public"}, + "stream_state": {"id": "mando_michael"}, }, ], }, }, ], { - HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob.parse_obj({"id": "mando_michael"}), + HashableStreamDescriptor(name="actors", namespace="public"): AirbyteStateBlob({"id": "mando_michael"}), }, does_not_raise(), id="test_incoming_global_state_with_none_shared", @@ -106,7 +114,7 @@ pytest.param( [ { - "type": AirbyteStateType.GLOBAL, + "type": "GLOBAL", "global": { "stream_states": [ {"stream_descriptor": {"name": "actresses", "namespace": "public"}}, @@ -122,7 +130,7 @@ ) def test_initialize_state_manager(input_stream_state, expected_stream_state, expected_error): if isinstance(input_stream_state, List): - input_stream_state = [AirbyteStateMessage.parse_obj(state_obj) for state_obj in list(input_stream_state)] + input_stream_state = [AirbyteStateMessageSerializer.load(state_obj) for state_obj in list(input_stream_state)] with expected_error: state_manager = ConnectorStateManager(input_stream_state) @@ -136,11 +144,11 @@ def test_initialize_state_manager(input_stream_state, expected_stream_state, exp pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "users", "namespace": "public"}, "stream_state": {"created_at": 12345}}, }, { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "accounts", "namespace": "public"}, "stream_state": {"id": "abc"}}, }, ], @@ -152,10 +160,10 @@ def test_initialize_state_manager(input_stream_state, expected_stream_state, exp pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "users"}, "stream_state": {"created_at": 12345}}, }, - {"type": AirbyteStateType.STREAM, "stream": {"stream_descriptor": {"name": "accounts"}, "stream_state": {"id": "abc"}}}, + {"type": "STREAM", "stream": {"stream_descriptor": {"name": "accounts"}, "stream_state": {"id": "abc"}}}, ], "users", None, @@ -164,8 +172,8 @@ def test_initialize_state_manager(input_stream_state, expected_stream_state, exp ), pytest.param( [ - {"type": AirbyteStateType.STREAM, "stream": {"stream_descriptor": {"name": "users"}}}, - {"type": AirbyteStateType.STREAM, "stream": {"stream_descriptor": {"name": "accounts"}, "stream_state": {"id": "abc"}}}, + {"type": "STREAM", "stream": {"stream_descriptor": {"name": "users"}}}, + {"type": "STREAM", "stream": {"stream_descriptor": {"name": "accounts"}, "stream_state": {"id": "abc"}}}, ], "users", None, @@ -175,11 +183,11 @@ def test_initialize_state_manager(input_stream_state, expected_stream_state, exp pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "users", "namespace": "public"}, "stream_state": {"created_at": 12345}}, }, { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "accounts", "namespace": "public"}, "stream_state": {"id": "abc"}}, }, ], @@ -191,11 +199,11 @@ def test_initialize_state_manager(input_stream_state, expected_stream_state, exp pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "users", "namespace": "public"}, "stream_state": {"created_at": 12345}}, }, { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "accounts", "namespace": "public"}, "stream_state": {"id": "abc"}}, }, ], @@ -208,7 +216,7 @@ def test_initialize_state_manager(input_stream_state, expected_stream_state, exp pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "users", "namespace": "public"}, "stream_state": None}, }, ], @@ -220,7 +228,7 @@ def test_initialize_state_manager(input_stream_state, expected_stream_state, exp ], ) def test_get_stream_state(input_state, stream_name, namespace, expected_state): - state_messages = [AirbyteStateMessage.parse_obj(state_obj) for state_obj in list(input_state)] + state_messages = [AirbyteStateMessageSerializer.load(state_obj) for state_obj in list(input_state)] state_manager = ConnectorStateManager(state_messages) actual_state = state_manager.get_stream_state(stream_name, namespace) @@ -234,7 +242,7 @@ def test_get_state_returns_deep_copy(): type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="episodes", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"id": [109]}), + stream_state=AirbyteStateBlob({"id": [109]}), ), ) ] @@ -252,11 +260,11 @@ def test_get_state_returns_deep_copy(): pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "actors", "namespace": "public"}, "stream_state": {"id": "mckean_michael"}}, }, { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "actresses", "namespace": "public"}, "stream_state": {"id": "seehorn_rhea"}}, }, ], @@ -275,7 +283,7 @@ def test_get_state_returns_deep_copy(): pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "actresses", "namespace": "public"}, "stream_state": {"id": "seehorn_rhea"}}, } ], @@ -287,7 +295,7 @@ def test_get_state_returns_deep_copy(): pytest.param( [ { - "type": AirbyteStateType.STREAM, + "type": "STREAM", "stream": {"stream_descriptor": {"name": "actresses", "namespace": "public"}, "stream_state": {"id": "seehorn_rhea"}}, } ], @@ -299,14 +307,14 @@ def test_get_state_returns_deep_copy(): ], ) def test_update_state_for_stream(start_state, update_name, update_namespace, update_value): - state_messages = [AirbyteStateMessage.parse_obj(state_obj) for state_obj in list(start_state)] + state_messages = [AirbyteStateMessage(state_obj) for state_obj in list(start_state)] state_manager = ConnectorStateManager(state_messages) state_manager.update_state_for_stream(update_name, update_namespace, update_value) - assert state_manager.per_stream_states[ - HashableStreamDescriptor(name=update_name, namespace=update_namespace) - ] == AirbyteStateBlob.parse_obj(update_value) + assert state_manager.per_stream_states[HashableStreamDescriptor(name=update_name, namespace=update_namespace)] == AirbyteStateBlob( + update_value + ) @pytest.mark.parametrize( @@ -318,14 +326,14 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="episodes", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"created_at": "2022_05_22"}), + stream_state=AirbyteStateBlob({"created_at": "2022_05_22"}), ), ), AirbyteStateMessage( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="seasons", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"id": 1}), + stream_state=AirbyteStateBlob({"id": 1}), ), ), ], @@ -337,7 +345,7 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="episodes", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"created_at": "2022_05_22"}), + stream_state=AirbyteStateBlob({"created_at": "2022_05_22"}), ), ), ), @@ -373,7 +381,7 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="episodes", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"id": 507}), + stream_state=AirbyteStateBlob({"id": 507}), ), ) ], @@ -396,7 +404,7 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="episodes", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"id": 507}), + stream_state=AirbyteStateBlob({"id": 507}), ), ) ], diff --git a/airbyte-cdk/python/unit_tests/sources/test_source.py b/airbyte-cdk/python/unit_tests/sources/test_source.py index c7b8e884653b..d548a51b1ebb 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_source.py @@ -13,9 +13,11 @@ AirbyteGlobalState, AirbyteStateBlob, AirbyteStateMessage, + AirbyteStateMessageSerializer, AirbyteStateType, AirbyteStreamState, ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, StreamDescriptor, SyncMode, Type, @@ -24,7 +26,8 @@ from airbyte_cdk.sources.streams.core import Stream from airbyte_cdk.sources.streams.http.http import HttpStream from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer -from pydantic import ValidationError +from orjson import orjson +from serpyco_rs import SchemaValidationError class MockSource(Source): @@ -74,7 +77,7 @@ def catalog(): }, ] } - return ConfiguredAirbyteCatalog.model_validate(configured_catalog) + return ConfiguredAirbyteCatalogSerializer.load(configured_catalog) @pytest.fixture @@ -154,7 +157,7 @@ def streams(self, config): type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="movies", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"created_at": "2009-07-19"}), + stream_state=AirbyteStateBlob({"created_at": "2009-07-19"}), ), ) ], @@ -190,21 +193,21 @@ def streams(self, config): type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="movies", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"created_at": "2009-07-19"}), + stream_state=AirbyteStateBlob({"created_at": "2009-07-19"}), ), ), AirbyteStateMessage( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="directors", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"id": "villeneuve_denis"}), + stream_state=AirbyteStateBlob({"id": "villeneuve_denis"}), ), ), AirbyteStateMessage( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="actors", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"created_at": "1995-12-27"}), + stream_state=AirbyteStateBlob({"created_at": "1995-12-27"}), ), ), ], @@ -224,19 +227,17 @@ def streams(self, config): } ], [ - AirbyteStateMessage.parse_obj( - { - "type": AirbyteStateType.GLOBAL, - "global": AirbyteGlobalState( - shared_state=AirbyteStateBlob.parse_obj({"shared_key": "shared_val"}), - stream_states=[ - AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="movies", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"created_at": "2009-07-19"}), - ) - ], - ), - } + AirbyteStateMessage( + type=AirbyteStateType.GLOBAL, + global_=AirbyteGlobalState( + shared_state=AirbyteStateBlob({"shared_key": "shared_val"}), + stream_states=[ + AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="movies", namespace="public"), + stream_state=AirbyteStateBlob({"created_at": "2009-07-19"}), + ) + ], + ), ), ], does_not_raise(), @@ -255,19 +256,19 @@ def streams(self, config): } ], None, - pytest.raises(ValidationError), + pytest.raises(SchemaValidationError), id="test_invalid_stream_state_invalid_type", ), pytest.param( [{"type": "STREAM", "stream": {"stream_state": {"created_at": "2009-07-19"}}}], None, - pytest.raises(ValidationError), + pytest.raises(SchemaValidationError), id="test_invalid_stream_state_missing_descriptor", ), pytest.param( [{"type": "GLOBAL", "global": {"shared_state": {"shared_key": "shared_val"}}}], None, - pytest.raises(ValidationError), + pytest.raises(SchemaValidationError), id="test_invalid_global_state_missing_streams", ), pytest.param( @@ -284,7 +285,7 @@ def streams(self, config): } ], None, - pytest.raises(ValidationError), + pytest.raises(SchemaValidationError), id="test_invalid_global_state_streams_not_list", ), ], @@ -295,7 +296,8 @@ def test_read_state(source, incoming_state, expected_state, expected_error): state_file.flush() with expected_error: actual = source.read_state(state_file.name) - assert actual == expected_state + if expected_state and actual: + assert AirbyteStateMessageSerializer.dump(actual[0]) == AirbyteStateMessageSerializer.dump(expected_state[0]) def test_read_invalid_state(source): @@ -330,9 +332,9 @@ def test_read_catalog(source): } ] } - expected = ConfiguredAirbyteCatalog.parse_obj(configured_catalog) + expected = ConfiguredAirbyteCatalogSerializer.load(configured_catalog) with tempfile.NamedTemporaryFile("w") as catalog_file: - catalog_file.write(expected.json(exclude_unset=True)) + catalog_file.write(orjson.dumps(ConfiguredAirbyteCatalogSerializer.dump(expected)).decode()) catalog_file.flush() actual = source.read_catalog(catalog_file.name) assert actual == expected diff --git a/airbyte-cdk/python/unit_tests/sources/test_source_read.py b/airbyte-cdk/python/unit_tests/sources/test_source_read.py index 00471ae86f82..05c71d1eae39 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_source_read.py +++ b/airbyte-cdk/python/unit_tests/sources/test_source_read.py @@ -309,11 +309,19 @@ def test_concurrent_source_yields_the_same_messages_as_abstract_source_when_an_e def _assert_status_messages(messages_from_abstract_source, messages_from_concurrent_source): - status_from_concurrent_source = [message for message in messages_from_concurrent_source if message.type == MessageType.TRACE and message.trace.type == TraceType.STREAM_STATUS] + status_from_concurrent_source = [ + message + for message in messages_from_concurrent_source + if message.type == MessageType.TRACE and message.trace.type == TraceType.STREAM_STATUS + ] assert status_from_concurrent_source _verify_messages( - [message for message in messages_from_abstract_source if message.type == MessageType.TRACE and message.trace.type == TraceType.STREAM_STATUS], + [ + message + for message in messages_from_abstract_source + if message.type == MessageType.TRACE and message.trace.type == TraceType.STREAM_STATUS + ], status_from_concurrent_source, ) @@ -329,8 +337,14 @@ def _assert_record_messages(messages_from_abstract_source, messages_from_concurr def _assert_errors(messages_from_abstract_source, messages_from_concurrent_source): - errors_from_concurrent_source = [message for message in messages_from_concurrent_source if message.type == MessageType.TRACE and message.trace.type == TraceType.ERROR] - errors_from_abstract_source = [message for message in messages_from_abstract_source if message.type == MessageType.TRACE and message.trace.type == TraceType.ERROR] + errors_from_concurrent_source = [ + message + for message in messages_from_concurrent_source + if message.type == MessageType.TRACE and message.trace.type == TraceType.ERROR + ] + errors_from_abstract_source = [ + message for message in messages_from_abstract_source if message.type == MessageType.TRACE and message.trace.type == TraceType.ERROR + ] assert errors_from_concurrent_source # exceptions might differ from both framework hence we only assert the count @@ -352,7 +366,13 @@ def _init_sources(stream_slice_to_partitions, state, logger): def _init_source(stream_slice_to_partitions, state, logger, source): streams = [ - StreamFacade.create_from_stream(_MockStream(stream_slices, f"stream{i}"), source, logger, state, FinalStateCursor(stream_name=f"stream{i}", stream_namespace=None, message_repository=InMemoryMessageRepository())) + StreamFacade.create_from_stream( + _MockStream(stream_slices, f"stream{i}"), + source, + logger, + state, + FinalStateCursor(stream_name=f"stream{i}", stream_namespace=None, message_repository=InMemoryMessageRepository()), + ) for i, stream_slices in enumerate(stream_slice_to_partitions) ] source.set_streams(streams) diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py b/airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py deleted file mode 100644 index 8f4862332ea8..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_catalog_helpers.py +++ /dev/null @@ -1,30 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.models import AirbyteCatalog, AirbyteStream, SyncMode -from airbyte_cdk.sources.utils.catalog_helpers import CatalogHelper - - -def test_coerce_catalog_as_full_refresh(): - incremental = AirbyteStream( - name="1", - json_schema={"k": "v"}, - supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh], - source_defined_cursor=True, - default_cursor_field=["cursor"], - ) - full_refresh = AirbyteStream( - name="2", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh], source_defined_cursor=False - ) - input = AirbyteCatalog(streams=[incremental, full_refresh]) - - expected = AirbyteCatalog( - streams=[ - AirbyteStream(name="1", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh], source_defined_cursor=False), - full_refresh, - ] - ) - - assert CatalogHelper.coerce_catalog_as_full_refresh(input) == expected diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py b/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py index 0b76f5eef5c2..76b7a9b1c772 100644 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py +++ b/airbyte-cdk/python/unit_tests/sources/utils/test_schema_helpers.py @@ -14,7 +14,7 @@ import jsonref import pytest -from airbyte_cdk.models.airbyte_protocol import ConnectorSpecification, FailureType +from airbyte_cdk.models import ConnectorSpecification, ConnectorSpecificationSerializer, FailureType from airbyte_cdk.sources.utils.schema_helpers import InternalConfig, ResourceSchemaLoader, check_config_against_spec_or_exit from airbyte_cdk.utils.traced_exception import AirbyteTracedException from pytest import fixture @@ -42,7 +42,7 @@ def create_schema(name: str, content: Mapping): @fixture -def spec_object(): +def spec_object() -> ConnectorSpecification: spec = { "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", @@ -54,7 +54,7 @@ def spec_object(): }, }, } - yield ConnectorSpecification.parse_obj(spec) + yield ConnectorSpecificationSerializer.load(spec) def test_check_config_against_spec_or_exit_does_not_print_schema(capsys, spec_object): diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_models.py b/airbyte-cdk/python/unit_tests/sources/utils/test_schema_models.py deleted file mode 100644 index 1ef6b23349e7..000000000000 --- a/airbyte-cdk/python/unit_tests/sources/utils/test_schema_models.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import List, Optional - -from airbyte_cdk.sources.utils.schema_models import AllOptional, BaseSchemaModel - - -class InnerClass(BaseSchemaModel): - field1: Optional[str] - field2: int - - -class SchemaWithFewNullables(BaseSchemaModel): - name: Optional[str] - optional_item: Optional[InnerClass] - items: List[InnerClass] - - -class SchemaWithAllOptional(BaseSchemaModel, metaclass=AllOptional): - object_id: int - item: InnerClass - - -class TestSchemaWithFewNullables: - EXPECTED_SCHEMA = { - "type": "object", - "properties": { - "name": {"type": ["null", "string"]}, - "optional_item": { - "oneOf": [ - {"type": "null"}, - {"type": "object", "properties": {"field1": {"type": ["null", "string"]}, "field2": {"type": "integer"}}}, - ] - }, - "items": { - "type": "array", - "items": {"type": "object", "properties": {"field1": {"type": ["null", "string"]}, "field2": {"type": "integer"}}}, - }, - }, - } - - def test_schema_postprocessing(self): - schema = SchemaWithFewNullables.schema() - assert schema == self.EXPECTED_SCHEMA - - -class TestSchemaWithAllOptional: - EXPECTED_SCHEMA = { - "type": "object", - "properties": { - "object_id": {"type": ["null", "integer"]}, - "item": { - "oneOf": [ - {"type": "null"}, - {"type": "object", "properties": {"field1": {"type": ["null", "string"]}, "field2": {"type": "integer"}}}, - ] - }, - }, - } - - def test_schema_postprocessing(self): - schema = SchemaWithAllOptional.schema() - assert schema == self.EXPECTED_SCHEMA diff --git a/airbyte-cdk/python/unit_tests/test/mock_http/test_response_builder.py b/airbyte-cdk/python/unit_tests/test/mock_http/test_response_builder.py index 328db535ca36..c8ccdc41b9bf 100644 --- a/airbyte-cdk/python/unit_tests/test/mock_http/test_response_builder.py +++ b/airbyte-cdk/python/unit_tests/test/mock_http/test_response_builder.py @@ -46,9 +46,7 @@ def _any_record_builder() -> RecordBuilder: def _response_builder( - response_template: Dict[str, Any], - records_path: Union[FieldPath, NestedPath], - pagination_strategy: Optional[PaginationStrategy] = None + response_template: Dict[str, Any], records_path: Union[FieldPath, NestedPath], pagination_strategy: Optional[PaginationStrategy] = None ) -> HttpResponseBuilder: return create_response_builder(deepcopy(response_template), records_path, pagination_strategy=pagination_strategy) @@ -64,7 +62,9 @@ def test_given_with_id_when_build_then_set_id(self) -> None: assert record[_ID_FIELD] == "another id" def test_given_nested_id_when_build_then_set_id(self) -> None: - builder = _record_builder({_RECORDS_FIELD: [{"nested": {_ID_FIELD: "id"}}]}, FieldPath(_RECORDS_FIELD), NestedPath(["nested", _ID_FIELD])) + builder = _record_builder( + {_RECORDS_FIELD: [{"nested": {_ID_FIELD: "id"}}]}, FieldPath(_RECORDS_FIELD), NestedPath(["nested", _ID_FIELD]) + ) record = builder.with_id("another id").build() assert record["nested"][_ID_FIELD] == "another id" @@ -79,9 +79,7 @@ def test_given_no_id_in_template_for_path_when_build_then_raise_error(self) -> N def test_given_with_cursor_when_build_then_set_id(self) -> None: builder = _record_builder( - {_RECORDS_FIELD: [{_CURSOR_FIELD: "a cursor"}]}, - FieldPath(_RECORDS_FIELD), - record_cursor_path=FieldPath(_CURSOR_FIELD) + {_RECORDS_FIELD: [{_CURSOR_FIELD: "a cursor"}]}, FieldPath(_RECORDS_FIELD), record_cursor_path=FieldPath(_CURSOR_FIELD) ) record = builder.with_cursor("another cursor").build() assert record[_CURSOR_FIELD] == "another cursor" @@ -90,7 +88,7 @@ def test_given_nested_cursor_when_build_then_set_cursor(self) -> None: builder = _record_builder( {_RECORDS_FIELD: [{"nested": {_CURSOR_FIELD: "a cursor"}}]}, FieldPath(_RECORDS_FIELD), - record_cursor_path=NestedPath(["nested", _CURSOR_FIELD]) + record_cursor_path=NestedPath(["nested", _CURSOR_FIELD]), ) record = builder.with_cursor("another cursor").build() assert record["nested"][_CURSOR_FIELD] == "another cursor" @@ -115,7 +113,7 @@ def test_given_no_cursor_in_template_for_path_when_build_then_raise_error(self) _record_builder( {_RECORDS_FIELD: [{"record without cursor": "should fail"}]}, FieldPath(_RECORDS_FIELD), - record_cursor_path=FieldPath(_ID_FIELD) + record_cursor_path=FieldPath(_ID_FIELD), ) @@ -150,7 +148,7 @@ def test_given_pagination_with_strategy_when_build_then_apply_strategy(self) -> builder = _response_builder( {"has_more_pages": False} | _SOME_RECORDS, FieldPath(_RECORDS_FIELD), - pagination_strategy=FieldUpdatePaginationStrategy(FieldPath("has_more_pages"), "yes more page") + pagination_strategy=FieldUpdatePaginationStrategy(FieldPath("has_more_pages"), "yes more page"), ) response = builder.with_pagination().build() diff --git a/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py b/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py index 8e0bbe9fc93c..11dfc5877572 100644 --- a/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py +++ b/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py @@ -7,32 +7,40 @@ from unittest import TestCase from unittest.mock import Mock, patch -from airbyte_cdk.sources.abstract_source import AbstractSource -from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, discover, read -from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import ( +from airbyte_cdk.models import ( AirbyteAnalyticsTraceMessage, AirbyteCatalog, AirbyteErrorTraceMessage, AirbyteLogMessage, AirbyteMessage, + AirbyteMessageSerializer, AirbyteRecordMessage, AirbyteStateBlob, AirbyteStateMessage, AirbyteStreamState, + AirbyteStreamStateSerializer, AirbyteStreamStatus, AirbyteStreamStatusTraceMessage, AirbyteTraceMessage, - ConfiguredAirbyteCatalog, + ConfiguredAirbyteCatalogSerializer, Level, StreamDescriptor, TraceType, Type, ) +from airbyte_cdk.sources.abstract_source import AbstractSource +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, discover, read +from airbyte_cdk.test.state_builder import StateBuilder +from orjson import orjson def _a_state_message(stream_name: str, stream_state: Mapping[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob(**stream_state)))) + return AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob(**stream_state)) + ), + ) def _a_status_message(stream_name: str, status: AirbyteStreamStatus) -> AirbyteMessage: @@ -77,7 +85,7 @@ def _a_status_message(stream_name: str, status: AirbyteStreamStatus) -> AirbyteM _A_STREAM_NAME = "a stream name" _A_CONFIG = {"config_key": "config_value"} -_A_CATALOG = ConfiguredAirbyteCatalog.parse_obj( +_A_CATALOG = ConfiguredAirbyteCatalogSerializer.load( { "streams": [ { @@ -97,7 +105,7 @@ def _a_status_message(stream_name: str, status: AirbyteStreamStatus) -> AirbyteM def _to_entrypoint_output(messages: List[AirbyteMessage]) -> Iterator[str]: - return (message.json(exclude_unset=True) for message in messages) + return (orjson.dumps(AirbyteMessageSerializer.dump(message)).decode() for message in messages) def _a_mocked_source() -> AbstractSource: @@ -112,7 +120,11 @@ def _validate_tmp_json_file(expected, file_path) -> None: def _validate_tmp_catalog(expected, file_path) -> None: - assert ConfiguredAirbyteCatalog.parse_file(file_path) == expected + assert ConfiguredAirbyteCatalogSerializer.load( + orjson.loads( + open(file_path).read() + ) + ) == expected def _create_tmp_file_validation(entrypoint, expected_config, expected_catalog: Optional[Any] = None, expected_state: Optional[Any] = None): @@ -176,19 +188,19 @@ def _do_some_logging(self): def test_given_record_when_discover_then_output_has_record(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_CATALOG_MESSAGE]) output = discover(self._a_source, _A_CONFIG) - assert output.catalog == _A_CATALOG_MESSAGE + assert AirbyteMessageSerializer.dump(output.catalog) == AirbyteMessageSerializer.dump(_A_CATALOG_MESSAGE) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_log_when_discover_then_output_has_log(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_LOG]) output = discover(self._a_source, _A_CONFIG) - assert output.logs == [_A_LOG] + assert AirbyteMessageSerializer.dump(output.logs[0]) == AirbyteMessageSerializer.dump(_A_LOG) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_trace_message_when_discover_then_output_has_trace_messages(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_AN_ANALYTIC_MESSAGE]) output = discover(self._a_source, _A_CONFIG) - assert output.analytics_messages == [_AN_ANALYTIC_MESSAGE] + assert AirbyteMessageSerializer.dump(output.analytics_messages[0]) == AirbyteMessageSerializer.dump(_AN_ANALYTIC_MESSAGE) @patch("airbyte_cdk.test.entrypoint_wrapper.print", create=True) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") @@ -254,41 +266,45 @@ def _do_some_logging(self): def test_given_record_when_read_then_output_has_record(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_RECORD]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) - assert output.records == [_A_RECORD] + assert AirbyteMessageSerializer.dump(output.records[0]) == AirbyteMessageSerializer.dump(_A_RECORD) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_state_message_when_read_then_output_has_state_message(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_STATE_MESSAGE]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) - assert output.state_messages == [_A_STATE_MESSAGE] + assert AirbyteMessageSerializer.dump(output.state_messages[0]) == AirbyteMessageSerializer.dump(_A_STATE_MESSAGE) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_state_message_and_records_when_read_then_output_has_records_and_state_message(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_RECORD, _A_STATE_MESSAGE]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) - assert output.records_and_state_messages == [_A_RECORD, _A_STATE_MESSAGE] + assert [AirbyteMessageSerializer.dump(message) for message in output.records_and_state_messages] == [ + AirbyteMessageSerializer.dump(message) for message in (_A_RECORD, _A_STATE_MESSAGE) + ] @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_many_state_messages_and_records_when_read_then_output_has_records_and_state_message(self, entrypoint): state_value = {"state_key": "last state value"} - last_emitted_state = AirbyteStreamState(stream_descriptor=StreamDescriptor(name="stream_name"), stream_state=AirbyteStateBlob(**state_value)) + last_emitted_state = AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="stream_name"), stream_state=AirbyteStateBlob(**state_value) + ) entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_STATE_MESSAGE, _a_state_message("stream_name", state_value)]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) - assert output.most_recent_state == last_emitted_state + assert AirbyteStreamStateSerializer.dump(output.most_recent_state) == AirbyteStreamStateSerializer.dump(last_emitted_state) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_log_when_read_then_output_has_log(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_LOG]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) - assert output.logs == [_A_LOG] + assert AirbyteMessageSerializer.dump(output.logs[0]) == AirbyteMessageSerializer.dump(_A_LOG) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_trace_message_when_read_then_output_has_trace_messages(self, entrypoint): entrypoint.return_value.run.return_value = _to_entrypoint_output([_AN_ANALYTIC_MESSAGE]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) - assert output.analytics_messages == [_AN_ANALYTIC_MESSAGE] + assert AirbyteMessageSerializer.dump(output.analytics_messages[0]) == AirbyteMessageSerializer.dump(_AN_ANALYTIC_MESSAGE) @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_stream_statuses_when_read_then_return_statuses(self, entrypoint): diff --git a/airbyte-cdk/python/unit_tests/test_connector.py b/airbyte-cdk/python/unit_tests/test_connector.py index 444397b4b0d6..ea7de2e40695 100644 --- a/airbyte-cdk/python/unit_tests/test_connector.py +++ b/airbyte-cdk/python/unit_tests/test_connector.py @@ -15,7 +15,6 @@ import yaml from airbyte_cdk import Connector from airbyte_cdk.models import AirbyteConnectionStatus -from pydantic import AnyUrl logger = logging.getLogger("airbyte") @@ -113,7 +112,7 @@ def use_yaml_spec(self): def test_spec_from_json_file(self, integration, use_json_spec): connector_spec = integration.spec(logger) - assert connector_spec.documentationUrl == AnyUrl("https://airbyte.com/#json") + assert connector_spec.documentationUrl == "https://airbyte.com/#json" assert connector_spec.connectionSpecification == self.CONNECTION_SPECIFICATION def test_spec_from_improperly_formatted_json_file(self, integration, use_invalid_json_spec): @@ -122,7 +121,7 @@ def test_spec_from_improperly_formatted_json_file(self, integration, use_invalid def test_spec_from_yaml_file(self, integration, use_yaml_spec): connector_spec = integration.spec(logger) - assert connector_spec.documentationUrl == AnyUrl("https://airbyte.com/#yaml") + assert connector_spec.documentationUrl == "https://airbyte.com/#yaml" assert connector_spec.connectionSpecification == self.CONNECTION_SPECIFICATION def test_multiple_spec_files_raises_exception(self, integration, use_yaml_spec, use_json_spec): diff --git a/airbyte-cdk/python/unit_tests/test_entrypoint.py b/airbyte-cdk/python/unit_tests/test_entrypoint.py index 1c5f8427bbb0..571042e202d1 100644 --- a/airbyte-cdk/python/unit_tests/test_entrypoint.py +++ b/airbyte-cdk/python/unit_tests/test_entrypoint.py @@ -20,9 +20,11 @@ AirbyteControlConnectorConfigMessage, AirbyteControlMessage, AirbyteMessage, + AirbyteMessageSerializer, AirbyteRecordMessage, AirbyteStateBlob, AirbyteStateMessage, + AirbyteStateStats, AirbyteStateType, AirbyteStream, AirbyteStreamState, @@ -37,10 +39,10 @@ TraceType, Type, ) -from airbyte_cdk.models.airbyte_protocol import AirbyteStateStats from airbyte_cdk.sources import Source from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor from airbyte_cdk.utils import AirbyteTracedException +from orjson import orjson class MockSource(Source): @@ -106,14 +108,14 @@ def test_airbyte_entrypoint_init(mocker): ("check", {"config": "config_path"}, {"command": "check", "config": "config_path", "debug": False}), ("discover", {"config": "config_path", "debug": ""}, {"command": "discover", "config": "config_path", "debug": True}), ( - "read", - {"config": "config_path", "catalog": "catalog_path", "state": "None"}, - {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "None", "debug": False}, + "read", + {"config": "config_path", "catalog": "catalog_path", "state": "None"}, + {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "None", "debug": False}, ), ( - "read", - {"config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": ""}, - {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": True}, + "read", + {"config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": ""}, + {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": True}, ), ], ) @@ -152,7 +154,7 @@ def _wrap_message(submessage: Union[AirbyteConnectionStatus, ConnectorSpecificat else: raise Exception(f"Unknown message type: {submessage}") - return message.json(exclude_unset=True) + return orjson.dumps(AirbyteMessageSerializer.dump(message)).decode() def test_run_spec(entrypoint: AirbyteEntrypoint, mocker): @@ -162,7 +164,7 @@ def test_run_spec(entrypoint: AirbyteEntrypoint, mocker): messages = list(entrypoint.run(parsed_args)) - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True), _wrap_message(expected)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode(), _wrap_message(expected)] == messages @pytest.fixture @@ -181,9 +183,9 @@ def config_mock(mocker, request): ({"username": "fake"}, {"type": "object", "properties": {"user": {"type": "string"}}}, True), ({"username": "fake"}, {"type": "object", "properties": {"user": {"type": "string", "airbyte_secret": True}}}, True), ( - {"username": "fake", "_limit": 22}, - {"type": "object", "properties": {"username": {"type": "string"}}, "additionalProperties": False}, - True, + {"username": "fake", "_limit": 22}, + {"type": "object", "properties": {"username": {"type": "string"}}, "additionalProperties": False}, + True, ), ], indirect=["config_mock"], @@ -196,14 +198,14 @@ def test_config_validate(entrypoint: AirbyteEntrypoint, mocker, config_mock, sch messages = list(entrypoint.run(parsed_args)) if config_valid: - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True), _wrap_message(check_value)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode(), _wrap_message(check_value)] == messages else: assert len(messages) == 2 - assert messages[0] == MESSAGE_FROM_REPOSITORY.json(exclude_unset=True) - connection_status_message = AirbyteMessage.parse_raw(messages[1]) - assert connection_status_message.type == Type.CONNECTION_STATUS - assert connection_status_message.connectionStatus.status == Status.FAILED - assert connection_status_message.connectionStatus.message.startswith("Config validation error:") + assert messages[0] == orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode() + connection_status_message = AirbyteMessage(**orjson.loads(messages[1])) + assert connection_status_message.type == Type.CONNECTION_STATUS.value + assert connection_status_message.connectionStatus.get("status") == Status.FAILED.value + assert connection_status_message.connectionStatus.get("message").startswith("Config validation error:") def test_run_check(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock): @@ -213,7 +215,7 @@ def test_run_check(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock messages = list(entrypoint.run(parsed_args)) - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True), _wrap_message(check_value)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode(), _wrap_message(check_value)] == messages assert spec_mock.called @@ -223,7 +225,7 @@ def test_run_check_with_exception(entrypoint: AirbyteEntrypoint, mocker, spec_mo with pytest.raises(ValueError): messages = list(entrypoint.run(parsed_args)) - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode()] == messages def test_run_discover(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock): @@ -233,7 +235,7 @@ def test_run_discover(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_m messages = list(entrypoint.run(parsed_args)) - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True), _wrap_message(expected)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode(), _wrap_message(expected)] == messages assert spec_mock.called @@ -243,7 +245,7 @@ def test_run_discover_with_exception(entrypoint: AirbyteEntrypoint, mocker, spec with pytest.raises(ValueError): messages = list(entrypoint.run(parsed_args)) - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode()] == messages def test_run_read(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock): @@ -255,18 +257,18 @@ def test_run_read(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock) messages = list(entrypoint.run(parsed_args)) - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True), _wrap_message(expected)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode(), _wrap_message(expected)] == messages assert spec_mock.called def test_given_message_emitted_during_config_when_read_then_emit_message_before_next_steps( - entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock + entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock ): parsed_args = Namespace(command="read", config="config_path", state="statepath", catalog="catalogpath") mocker.patch.object(MockSource, "read_catalog", side_effect=ValueError) messages = entrypoint.run(parsed_args) - assert next(messages) == MESSAGE_FROM_REPOSITORY.json(exclude_unset=True) + assert next(messages) == orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode() with pytest.raises(ValueError): next(messages) @@ -279,7 +281,7 @@ def test_run_read_with_exception(entrypoint: AirbyteEntrypoint, mocker, spec_moc with pytest.raises(ValueError): messages = list(entrypoint.run(parsed_args)) - assert [MESSAGE_FROM_REPOSITORY.json(exclude_unset=True)] == messages + assert [orjson.dumps(AirbyteMessageSerializer.dump(MESSAGE_FROM_REPOSITORY)).decode()] == messages def test_invalid_command(entrypoint: AirbyteEntrypoint, config_mock): @@ -334,12 +336,26 @@ def test_filter_internal_requests(deployment_mode, url, expected_error): id="test_handle_record_message", ), pytest.param( - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02") + ), + ), + ), {HashableStreamDescriptor(name="customers"): 100.0}, - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")), - sourceStats=AirbyteStateStats(recordCount=100.0))), + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02") + ), + sourceStats=AirbyteStateStats(recordCount=100.0), + ), + ), {HashableStreamDescriptor(name="customers"): 0.0}, id="test_handle_state_message", ), @@ -351,15 +367,27 @@ def test_filter_internal_requests(deployment_mode, url, expected_error): id="test_handle_first_record_message", ), pytest.param( - AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.STREAM_STATUS, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="customers"), - status=AirbyteStreamStatus.COMPLETE), emitted_at=1)), + AirbyteMessage( + type=Type.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.STREAM_STATUS, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="customers"), status=AirbyteStreamStatus.COMPLETE + ), + emitted_at=1, + ), + ), {HashableStreamDescriptor(name="customers"): 5.0}, - AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.STREAM_STATUS, - stream_status=AirbyteStreamStatusTraceMessage( - stream_descriptor=StreamDescriptor(name="customers"), - status=AirbyteStreamStatus.COMPLETE), emitted_at=1)), + AirbyteMessage( + type=Type.TRACE, + trace=AirbyteTraceMessage( + type=TraceType.STREAM_STATUS, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="customers"), status=AirbyteStreamStatus.COMPLETE + ), + emitted_at=1, + ), + ), {HashableStreamDescriptor(name="customers"): 5.0}, id="test_handle_other_message_type", ), @@ -371,48 +399,96 @@ def test_filter_internal_requests(deployment_mode, url, expected_error): id="test_handle_record_message_for_other_stream", ), pytest.param( - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02") + ), + ), + ), {HashableStreamDescriptor(name="customers"): 100.0, HashableStreamDescriptor(name="others"): 27.0}, - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")), - sourceStats=AirbyteStateStats(recordCount=27.0))), + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02") + ), + sourceStats=AirbyteStateStats(recordCount=27.0), + ), + ), {HashableStreamDescriptor(name="customers"): 100.0, HashableStreamDescriptor(name="others"): 0.0}, id="test_handle_state_message_for_other_stream", ), pytest.param( - AirbyteMessage(type=Type.RECORD, - record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1)), + AirbyteMessage( + type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1) + ), {HashableStreamDescriptor(name="customers", namespace="public"): 100.0}, - AirbyteMessage(type=Type.RECORD, - record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1)), + AirbyteMessage( + type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1) + ), {HashableStreamDescriptor(name="customers", namespace="public"): 101.0}, id="test_handle_record_message_with_descriptor", ), pytest.param( - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="customers", namespace="public"), - stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + ), + ), {HashableStreamDescriptor(name="customers", namespace="public"): 100.0}, - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="customers", namespace="public"), - stream_state=AirbyteStateBlob(updated_at="2024-02-02")), sourceStats=AirbyteStateStats(recordCount=100.0))), + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + sourceStats=AirbyteStateStats(recordCount=100.0), + ), + ), {HashableStreamDescriptor(name="customers", namespace="public"): 0.0}, id="test_handle_state_message_with_descriptor", ), pytest.param( - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="others", namespace="public"), - stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + ), + ), {HashableStreamDescriptor(name="customers", namespace="public"): 100.0}, - AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="others", namespace="public"), - stream_state=AirbyteStateBlob(updated_at="2024-02-02")), sourceStats=AirbyteStateStats(recordCount=0.0))), - {HashableStreamDescriptor(name="customers", namespace="public"): 100.0, - HashableStreamDescriptor(name="others", namespace="public"): 0.0}, + AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + sourceStats=AirbyteStateStats(recordCount=0.0), + ), + ), + { + HashableStreamDescriptor(name="customers", namespace="public"): 100.0, + HashableStreamDescriptor(name="others", namespace="public"): 0.0, + }, id="test_handle_state_message_no_records", ), - ] + ], ) def test_handle_record_counts(incoming_message, stream_message_count, expected_message, expected_records_by_stream): entrypoint = AirbyteEntrypoint(source=MockSource()) diff --git a/airbyte-cdk/python/unit_tests/test_exception_handler.py b/airbyte-cdk/python/unit_tests/test_exception_handler.py index 42819942ade1..f135c19fd5a9 100644 --- a/airbyte-cdk/python/unit_tests/test_exception_handler.py +++ b/airbyte-cdk/python/unit_tests/test_exception_handler.py @@ -9,7 +9,17 @@ import pytest from airbyte_cdk.exception_handler import assemble_uncaught_exception -from airbyte_cdk.models import AirbyteErrorTraceMessage, AirbyteLogMessage, AirbyteMessage, AirbyteTraceMessage +from airbyte_cdk.models import ( + AirbyteErrorTraceMessage, + AirbyteLogMessage, + AirbyteMessage, + AirbyteMessageSerializer, + AirbyteTraceMessage, + FailureType, + Level, + TraceType, +) +from airbyte_cdk.models import Type as MessageType from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage from airbyte_cdk.utils.traced_exception import AirbyteTracedException @@ -43,16 +53,16 @@ def test_uncaught_exception_handler(): ) expected_log_message = AirbyteMessage( - type="LOG", log=AirbyteLogMessage(level="FATAL", message=f"{exception_message}\n{exception_trace}") + type=MessageType.LOG, log=AirbyteLogMessage(level=Level.FATAL, message=f"{exception_message}\n{exception_trace}") ) expected_trace_message = AirbyteMessage( - type="TRACE", + type=MessageType.TRACE, trace=AirbyteTraceMessage( - type="ERROR", + type=TraceType.ERROR, emitted_at=0.0, error=AirbyteErrorTraceMessage( - failure_type="system_error", + failure_type=FailureType.system_error, message="Something went wrong in the connector. See the logs for more details.", internal_message=exception_message, stack_trace=f"{exception_trace}\n", @@ -70,10 +80,10 @@ def test_uncaught_exception_handler(): log_output, trace_output = stdout_lines - out_log_message = AirbyteMessage.parse_obj(json.loads(log_output)) + out_log_message = AirbyteMessageSerializer.load(json.loads(log_output)) assert out_log_message == expected_log_message, "Log message should be emitted in expected form" - out_trace_message = AirbyteMessage.parse_obj(json.loads(trace_output)) + out_trace_message = AirbyteMessageSerializer.load(json.loads(trace_output)) assert out_trace_message.trace.emitted_at > 0 out_trace_message.trace.emitted_at = 0.0 # set a specific emitted_at value for testing assert out_trace_message == expected_trace_message, "Trace message should be emitted in expected form" diff --git a/airbyte-cdk/python/unit_tests/utils/test_datetime_format_inferrer.py b/airbyte-cdk/python/unit_tests/utils/test_datetime_format_inferrer.py index 766007467184..5e76b9cfa193 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_datetime_format_inferrer.py +++ b/airbyte-cdk/python/unit_tests/utils/test_datetime_format_inferrer.py @@ -5,7 +5,7 @@ from typing import Dict, List import pytest -from airbyte_cdk.models.airbyte_protocol import AirbyteRecordMessage +from airbyte_cdk.models import AirbyteRecordMessage from airbyte_cdk.utils.datetime_format_inferrer import DatetimeFormatInferrer NOW = 1234567 diff --git a/airbyte-cdk/python/unit_tests/utils/test_message_utils.py b/airbyte-cdk/python/unit_tests/utils/test_message_utils.py index 496360ea46f3..84fabf1a8fa8 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_message_utils.py +++ b/airbyte-cdk/python/unit_tests/utils/test_message_utils.py @@ -1,9 +1,7 @@ # Copyright (c) 2024 Airbyte, Inc., all rights reserved. import pytest -from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor -from airbyte_cdk.utils.message_utils import get_stream_descriptor -from airbyte_protocol.models import ( +from airbyte_cdk.models import ( AirbyteControlConnectorConfigMessage, AirbyteControlMessage, AirbyteMessage, @@ -17,6 +15,8 @@ StreamDescriptor, Type, ) +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor +from airbyte_cdk.utils.message_utils import get_stream_descriptor def test_get_record_message_stream_descriptor(): @@ -36,9 +36,7 @@ def test_get_record_message_stream_descriptor(): def test_get_record_message_stream_descriptor_no_namespace(): message = AirbyteMessage( type=Type.RECORD, - record=AirbyteRecordMessage( - stream="test_stream", data={"id": "12345"}, emitted_at=1 - ), + record=AirbyteRecordMessage(stream="test_stream", data={"id": "12345"}, emitted_at=1), ) expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace=None) assert get_stream_descriptor(message) == expected_descriptor @@ -50,9 +48,7 @@ def test_get_state_message_stream_descriptor(): state=AirbyteStateMessage( type=AirbyteStateType.STREAM, stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor( - name="test_stream", namespace="test_namespace" - ), + stream_descriptor=StreamDescriptor(name="test_stream", namespace="test_namespace"), stream_state=AirbyteStateBlob(updated_at="2024-02-02"), ), sourceStats=AirbyteStateStats(recordCount=27.0), diff --git a/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py b/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py index 51a055401a29..98d227c40ec6 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py +++ b/airbyte-cdk/python/unit_tests/utils/test_schema_inferrer.py @@ -5,7 +5,7 @@ from typing import List, Mapping import pytest -from airbyte_cdk.models.airbyte_protocol import AirbyteRecordMessage +from airbyte_cdk.models import AirbyteRecordMessage from airbyte_cdk.utils.schema_inferrer import SchemaInferrer, SchemaValidationException NOW = 1234567 @@ -133,7 +133,10 @@ { "my_stream": { "field_A": {"type": ["string", "null"]}, - "nested": {"type": ["array", "null"], "items": {"type": ["object", "null"], "properties": {"field_C": {"type": ["string", "null"]}}}}, + "nested": { + "type": ["array", "null"], + "items": {"type": ["object", "null"], "properties": {"field_C": {"type": ["string", "null"]}}}, + }, } }, id="test_array_nested_null", @@ -146,7 +149,10 @@ { "my_stream": { "field_A": {"type": ["string", "null"]}, - "nested": {"type": ["array", "null"], "items": {"type": ["object", "null"], "properties": {"field_C": {"type": ["string", "null"]}}}}, + "nested": { + "type": ["array", "null"], + "items": {"type": ["object", "null"], "properties": {"field_C": {"type": ["string", "null"]}}}, + }, } }, id="test_array_top_level_null", @@ -166,80 +172,42 @@ "data": { "root_property_object": { "property_array": [ - { - "title": "Nested_1", - "type": "multi-value", - "value": ["XL"] - }, + {"title": "Nested_1", "type": "multi-value", "value": ["XL"]}, { "title": "Nested_2", "type": "location", - "value": { - "nested_key_1": "GB", - "nested_key_2": "United Kingdom" - } - } + "value": {"nested_key_1": "GB", "nested_key_2": "United Kingdom"}, + }, ], } - } + }, }, ], { "data_with_nested_arrays": { "root_property_object": { - "type": [ - "object", - "null" - ], + "type": ["object", "null"], "properties": { "property_array": { - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { - "type": [ - "object", - "null" - ], + "type": ["object", "null"], "properties": { - "title": { - "type": [ - "string", - "null" - ] - }, - "type": { - "type": [ - "string", - "null" - ] - }, + "title": {"type": ["string", "null"]}, + "type": {"type": ["string", "null"]}, "value": { "anyOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, + {"type": "array", "items": {"type": "string"}}, { "type": "object", - "properties": { - "nested_key_1": { - "type": "string" - }, - "nested_key_2": { - "type": "string" - } - } - } + "properties": {"nested_key_1": {"type": "string"}, "nested_key_2": {"type": "string"}}, + }, ] - } - } - } + }, + }, + }, } - } + }, } } }, @@ -277,7 +245,7 @@ def _create_inferrer_with_required_field(is_pk: bool, field: List[List[str]]) -> [ pytest.param(_IS_PK, id="required_field_is_pk"), pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), - ] + ], ) def test_field_is_on_root(is_pk: bool): inferrer = _create_inferrer_with_required_field(is_pk, [["property"]]) @@ -293,7 +261,7 @@ def test_field_is_on_root(is_pk: bool): [ pytest.param(_IS_PK, id="required_field_is_pk"), pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), - ] + ], ) def test_field_is_nested(is_pk: bool): inferrer = _create_inferrer_with_required_field(is_pk, [["property", "nested_property"]]) @@ -310,11 +278,13 @@ def test_field_is_nested(is_pk: bool): [ pytest.param(_IS_PK, id="required_field_is_pk"), pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), - ] + ], ) def test_field_is_composite(is_pk: bool): inferrer = _create_inferrer_with_required_field(is_pk, [["property 1"], ["property 2"]]) - inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"property 1": _ANY_VALUE, "property 2": _ANY_VALUE}, emitted_at=NOW)) + inferrer.accumulate( + AirbyteRecordMessage(stream=_STREAM_NAME, data={"property 1": _ANY_VALUE, "property 2": _ANY_VALUE}, emitted_at=NOW) + ) assert inferrer.get_stream_schema(_STREAM_NAME)["required"] == ["property 1", "property 2"] @@ -323,12 +293,14 @@ def test_field_is_composite(is_pk: bool): [ pytest.param(_IS_PK, id="required_field_is_pk"), pytest.param(_IS_CURSOR_FIELD, id="required_field_is_cursor_field"), - ] + ], ) def test_field_is_composite_and_nested(is_pk: bool): inferrer = _create_inferrer_with_required_field(is_pk, [["property 1", "nested"], ["property 2"]]) - inferrer.accumulate(AirbyteRecordMessage(stream=_STREAM_NAME, data={"property 1": {"nested": _ANY_VALUE}, "property 2": _ANY_VALUE}, emitted_at=NOW)) + inferrer.accumulate( + AirbyteRecordMessage(stream=_STREAM_NAME, data={"property 1": {"nested": _ANY_VALUE}, "property 2": _ANY_VALUE}, emitted_at=NOW) + ) assert inferrer.get_stream_schema(_STREAM_NAME)["required"] == ["property 1", "property 2"] assert inferrer.get_stream_schema(_STREAM_NAME)["properties"]["property 1"]["type"] == "object" diff --git a/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py b/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py index e0d3b9a50353..ea559a319467 100644 --- a/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py +++ b/airbyte-cdk/python/unit_tests/utils/test_traced_exception.py @@ -2,20 +2,21 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import json import pytest -from airbyte_cdk.models.airbyte_protocol import ( +from airbyte_cdk.models import ( AirbyteErrorTraceMessage, AirbyteMessage, + AirbyteMessageSerializer, AirbyteTraceMessage, FailureType, Status, + StreamDescriptor, TraceType, ) -from airbyte_cdk.models.airbyte_protocol import Type as MessageType +from airbyte_cdk.models import Type as MessageType from airbyte_cdk.utils.traced_exception import AirbyteTracedException -from airbyte_protocol.models import StreamDescriptor +from orjson import orjson _AN_EXCEPTION = ValueError("An exception") _A_STREAM_DESCRIPTOR = StreamDescriptor(name="a_stream") @@ -90,12 +91,12 @@ def test_emit_message(capsys): ) expected_message = AirbyteMessage( - type="TRACE", + type=MessageType.TRACE, trace=AirbyteTraceMessage( - type="ERROR", + type=TraceType.ERROR, emitted_at=0.0, error=AirbyteErrorTraceMessage( - failure_type="system_error", + failure_type=FailureType.system_error, message="user-friendly message", internal_message="internal message", stack_trace="RuntimeError: oh no\n", @@ -106,9 +107,8 @@ def test_emit_message(capsys): traced_exc.emit_message() stdout = capsys.readouterr().out - printed_message = AirbyteMessage.parse_obj(json.loads(stdout)) + printed_message = AirbyteMessageSerializer.load(orjson.loads(stdout)) printed_message.trace.emitted_at = 0.0 - assert printed_message == expected_message From b1df9b8e48c2dfe9113654593634c0544ac5c61b Mon Sep 17 00:00:00 2001 From: artem1205 Date: Mon, 2 Sep 2024 15:51:47 +0000 Subject: [PATCH 02/51] =?UTF-8?q?=F0=9F=A4=96=20major=20bump=20Python=20CD?= =?UTF-8?q?K=20to=20version=205.0.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index d2ef5a9f8cf7..475af6171c39 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 5.0.0 +Replace pydantic BaseModel with dataclass + ## 4.6.2 use orjson instead of json to speed up JSON parsing diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index 4b07bed0626f..8cf6ef4b3007 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "airbyte-cdk" -version = "4.6.2" +version = "5.0.0" description = "A framework for writing Airbyte Connectors." authors = ["Airbyte "] license = "MIT" From 6eb0d36ae431d84dd32b45d6ab5ccf67d5e1c930 Mon Sep 17 00:00:00 2001 From: artem1205 Date: Mon, 2 Sep 2024 15:58:16 +0000 Subject: [PATCH 03/51] =?UTF-8?q?=F0=9F=A4=96=20Cut=20version=205.0.0=20of?= =?UTF-8?q?=20source-declarative-manifest?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-declarative-manifest/metadata.yaml | 2 +- .../source-declarative-manifest/poetry.lock | 81 ++++++++++++++++--- .../pyproject.toml | 4 +- docs/integrations/sources/low-code.md | 1 + 4 files changed, 72 insertions(+), 16 deletions(-) diff --git a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml index 223ef9e08551..23b73ffdba61 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml +++ b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml @@ -8,7 +8,7 @@ data: connectorType: source definitionId: 64a2f99c-542f-4af8-9a6f-355f1217b436 # This version should not be updated manually - it is updated by the CDK release workflow. - dockerImageTag: 4.6.2 + dockerImageTag: 5.0.0 dockerRepository: airbyte/source-declarative-manifest # This page is hidden from the docs for now, since the connector is not in any Airbyte registries. documentationUrl: https://docs.airbyte.com/integrations/sources/low-code diff --git a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock index 9a2a9d659bd2..f340c8c6cbbd 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock +++ b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock @@ -2,17 +2,17 @@ [[package]] name = "airbyte-cdk" -version = "4.6.2" +version = "5.0.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-4.6.2-py3-none-any.whl", hash = "sha256:3a37bd96c4b4f874b15fc18839b1e163eb30d1e4ef80d7dde2854e6a48efe934"}, - {file = "airbyte_cdk-4.6.2.tar.gz", hash = "sha256:c034f11ba6abe73dd7346ce2bc7017ff71ef0db1fd1ae86fb86beaeae35d8baf"}, + {file = "airbyte_cdk-5.0.0-py3-none-any.whl", hash = "sha256:2d6b311702720914d058ea5c73f7a66fbf50f763544a6f77d088122bd272e4bd"}, + {file = "airbyte_cdk-5.0.0.tar.gz", hash = "sha256:72e0f445328bfb37529b485d3f3ef6fddbdb9af653542b44c7ba3792b3636326"}, ] [package.dependencies] -airbyte-protocol-models-pdv2 = ">=0.12.2,<0.13.0" +airbyte-protocol-models-dataclasses = ">=0.13,<0.14" backoff = "*" cachetools = "*" cryptography = ">=42.0.5,<43.0.0" @@ -35,6 +35,7 @@ pytz = "2024.1" PyYAML = ">=6.0.1,<7.0.0" requests = "*" requests_cache = "*" +serpyco-rs = ">=1.10.2,<2.0.0" wcmatch = "8.4" [package.extras] @@ -43,19 +44,16 @@ sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] -name = "airbyte-protocol-models-pdv2" -version = "0.12.2" -description = "Declares the Airbyte Protocol." +name = "airbyte-protocol-models-dataclasses" +version = "0.13.0" +description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclasses in Python have less performance overhead compared to Pydantic models, making them a more efficient choice for scenarios where speed and memory usage are critical" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models_pdv2-0.12.2-py3-none-any.whl", hash = "sha256:8b3f9d0388928547cdf2e9134c0d589e4bcaa6f63bf71a21299f6824bfb7ad0e"}, - {file = "airbyte_protocol_models_pdv2-0.12.2.tar.gz", hash = "sha256:130c9ab289f3f53749ce63ff1abbfb67a44b7e5bd2794865315a2976138b672b"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0-py3-none-any.whl", hash = "sha256:0aedb99ffc4f9aab0ce91bba2c292fa17cd8fd4b42eeba196d6a16c20bbbd7a5"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0.tar.gz", hash = "sha256:72e67850d661e2808406aec5839b3158ebb94d3553b798dbdae1b4a278548d2f"}, ] -[package.dependencies] -pydantic = ">=2.7.2,<3.0.0" - [[package]] name = "annotated-types" version = "0.7.0" @@ -67,6 +65,17 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[[package]] +name = "attributes-doc" +version = "0.4.0" +description = "PEP 224 implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "attributes-doc-0.4.0.tar.gz", hash = "sha256:b1576c94a714e9fc2c65c47cf10d0c8e1a5f7c4f5ae7f69006be108d95cbfbfb"}, + {file = "attributes_doc-0.4.0-py2.py3-none-any.whl", hash = "sha256:4c3007d9e58f3a6cb4b9c614c4d4ce2d92161581f28e594ddd8241cc3a113bdd"}, +] + [[package]] name = "attrs" version = "23.2.0" @@ -1339,6 +1348,52 @@ requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] +[[package]] +name = "serpyco-rs" +version = "1.10.2" +description = "" +optional = false +python-versions = ">=3.9" +files = [ + {file = "serpyco_rs-1.10.2-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e01d824fdebb9bded57ec40b9ac0ca3b312ad617fd5deba61113a3b23bcb915d"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef9a31f8d62c17b1ccfffb3e91c5aed2d6fd2187c7611ee3ca1b572046150cd"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aab2241b2d87bca5f15d5d34a3948b1c9ad1724cc55d1332e0c5325aff02635f"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:87d8118e9ba6e37aee1b0f7c14b19fe494f1589dc81ae0cc5168812779e1bfab"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d18a77d23aeb49904b2462410e57b4027511158845291bf6251e5857a881d60"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8da7ff487ada75f6b724d6ef9e40cde5cf703a2b89e6a3f466a8db0049e153a"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5212fa00ff8874ecabca0cf5f11eb7c1291b55ec9ee6aa7ee3ae2ec344abcf7f"}, + {file = "serpyco_rs-1.10.2-cp310-none-win_amd64.whl", hash = "sha256:ff83f5296f0ab08e77d09a4888020e636d4385a642fec52eacd2ab480d0ec22c"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d0e6d6546145ba30d6032381b27261e338f7c1b96b9fb0773a481970a809827"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4d5c69d1fcd7007b7792cb5ea62a0702822f6f8982349f44b795677ab7414c"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fc4c1241c0707bfdd93991c0a2cea3f51a17acad343d9b5c296fc0a9f044d78"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:413fe29db4cab826269371a89ff9ccbd897ee7ff0eaaf1090362fdb86d5b8beb"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ce4d5ac0ac4d62911998bfba1ac149a61c43f5dbfa23f831f0d87290c1861a"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9c8a31440a3158c601fdcd523e77cd5fefa2ae5be061a4151c38a7a6060624"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8e323f5420c3e6f99627291a2d47d7fcd7f5c4433aaa6cc35e15d5b22ba19d6"}, + {file = "serpyco_rs-1.10.2-cp311-none-win_amd64.whl", hash = "sha256:743c1e1943f51883cb498c2c16c5f49bab2adb991c842077fcd0fa5a1658da25"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6379d789daff44e5f535d7e1c0131b30cee86988e9561cc9d98e87021188220d"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805162d7b67fd08b04b1e2ef1deeaedc37c7ee24a200f24778fb98b9fe7f5cdd"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1366df15ae2548a8a063eca84b9a8c2af92ac55df73ce60a7c4f2dfe71e2526b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:35d0a1a1a69ae074b123f6ad1487dc67717727d9dce4f95a393298743d60aafb"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a79517070e0b021803cafdf11d326e1149eac4a226443040e9fa1492c74337b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bdd2b8d3b9160ddcab0400ca5e258c16e870ae49c6586ed5405c18e8910c957b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:045965a32c651797a73c7b7165165ed0d78efc233af4bf24c47acd41d222fae8"}, + {file = "serpyco_rs-1.10.2-cp312-none-win_amd64.whl", hash = "sha256:c6c95f6c9e04af94c33e4e514291df7380c3960a155e9fe264ccaaa46d4d0de8"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f19a82836699d102b288b17ae370dd4d37af60ccd2254f5bfdbd053d168cecee"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3830bb3f6a342825e27592e86baa46774bfb1f08c82dbf561b5f1380a18b48"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f726392e6380b1e7d642d7633ac27929c8616a59db0a54632f5a9ab80987e071"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9ce029f8f29f4f335d0f3c9e005b71d7e8a934735d9654e3f03ccc54d50c107a"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1f011370259602b55141ce866bf31dcdc9d8b68105c32f18ee442bc651ee880"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14d9e22016e2860c1f524aa123cfadd4a4eea25af10d1be76cc3d97d9c85c2e2"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441b8045f91f30120c00a1f617a0ad6f22c1753c6b98899e8476d6e7775a3667"}, + {file = "serpyco_rs-1.10.2-cp39-none-win_amd64.whl", hash = "sha256:a124608cc998e3854fc743dea5dd7d948edbeaa70c1c1777b6dbb4b64ce465b0"}, + {file = "serpyco_rs-1.10.2.tar.gz", hash = "sha256:9cf06956eb14b326e522c9665aa5136f8fd7ece2df8a393c2e84bee8204362d0"}, +] + +[package.dependencies] +attributes-doc = "*" +typing-extensions = "*" + [[package]] name = "setuptools" version = "72.1.0" @@ -1550,4 +1605,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "d4d9e3b95d04f7697dceb79e70c3705d9519801d056f0e75664d3f5fbc8ac14f" +content-hash = "a350f1c03f61ff0e4ff194913dde979aaf1cafb2cb7f2ea7219478bcba1b8981" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml index 804f2b68f497..93a587a6b0f3 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml +++ b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml @@ -3,7 +3,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.6.2" +version = "5.0.0" name = "source-declarative-manifest" description = "Base source implementation for low-code sources." authors = ["Airbyte "] @@ -17,7 +17,7 @@ include = "source_declarative_manifest" [tool.poetry.dependencies] python = "^3.10,<3.12" -airbyte-cdk = "4.6.2" +airbyte-cdk = "5.0.0" [tool.poetry.scripts] source-declarative-manifest = "source_declarative_manifest.run:run" diff --git a/docs/integrations/sources/low-code.md b/docs/integrations/sources/low-code.md index 001c00b7b990..842ef74cb32d 100644 --- a/docs/integrations/sources/low-code.md +++ b/docs/integrations/sources/low-code.md @@ -9,6 +9,7 @@ The changelog below is automatically updated by the `bump_version` command as pa | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------- | +| 5.0.0 | 2024-09-02 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.0 | | 4.6.2 | 2024-08-29 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 4.6.2 | | 4.6.1 | 2024-08-28 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 4.6.1 | | 4.6.0 | 2024-08-28 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 4.6.0 | From fa6aa2b62c1e2018459df3b8bfa9f65e7a95be53 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:13:43 +0200 Subject: [PATCH 04/51] fix(source-declarative-manifest): fix models (#45088) Signed-off-by: Artem Inzhyyants --- .../source_declarative_manifest/run.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/run.py b/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/run.py index feb3f64b410f..4ef4c1908425 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/run.py +++ b/airbyte-integrations/connectors/source-declarative-manifest/source_declarative_manifest/run.py @@ -4,7 +4,6 @@ from __future__ import annotations import json -import os import pkgutil import sys from pathlib import Path @@ -12,7 +11,7 @@ from airbyte_cdk.connector import BaseConnector from airbyte_cdk.entrypoint import AirbyteEntrypoint, launch -from airbyte_cdk.models import AirbyteMessage, ConnectorSpecification, Type +from airbyte_cdk.models import AirbyteMessage, ConnectorSpecificationSerializer, Type from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource @@ -64,7 +63,7 @@ def handle_remote_manifest_command(args: List[str]) -> None: if args[0] == "spec": json_spec = pkgutil.get_data("source_declarative_manifest", "spec.json") spec_obj = json.loads(json_spec) - spec = ConnectorSpecification.parse_obj(spec_obj) + spec = ConnectorSpecificationSerializer.load(spec_obj) message = AirbyteMessage(type=Type.SPEC, spec=spec) print(AirbyteEntrypoint.airbyte_message_to_string(message)) From 844f52b3f153f72250b61ec0ad30e4d72a80f967 Mon Sep 17 00:00:00 2001 From: artem1205 Date: Tue, 3 Sep 2024 10:19:48 +0000 Subject: [PATCH 05/51] =?UTF-8?q?=F0=9F=A4=96=20patch=20bump=20Python=20CD?= =?UTF-8?q?K=20to=20version=205.0.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 475af6171c39..fe8f4ca4aa44 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 5.0.1 +Fix source-declarative-manifest + ## 5.0.0 Replace pydantic BaseModel with dataclass diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index 8cf6ef4b3007..2c5d56c676fd 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "airbyte-cdk" -version = "5.0.0" +version = "5.0.1" description = "A framework for writing Airbyte Connectors." authors = ["Airbyte "] license = "MIT" From b4ccb270d7aa3fb19ad6063e0d0f8a56a446b9a6 Mon Sep 17 00:00:00 2001 From: artem1205 Date: Tue, 3 Sep 2024 10:26:01 +0000 Subject: [PATCH 06/51] =?UTF-8?q?=F0=9F=A4=96=20Cut=20version=205.0.1=20of?= =?UTF-8?q?=20source-declarative-manifest?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-declarative-manifest/metadata.yaml | 2 +- .../connectors/source-declarative-manifest/poetry.lock | 8 ++++---- .../connectors/source-declarative-manifest/pyproject.toml | 4 ++-- docs/integrations/sources/low-code.md | 1 + 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml index 23b73ffdba61..727ed9bb937f 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml +++ b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml @@ -8,7 +8,7 @@ data: connectorType: source definitionId: 64a2f99c-542f-4af8-9a6f-355f1217b436 # This version should not be updated manually - it is updated by the CDK release workflow. - dockerImageTag: 5.0.0 + dockerImageTag: 5.0.1 dockerRepository: airbyte/source-declarative-manifest # This page is hidden from the docs for now, since the connector is not in any Airbyte registries. documentationUrl: https://docs.airbyte.com/integrations/sources/low-code diff --git a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock index f340c8c6cbbd..efd0ccc8b9bf 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock +++ b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "5.0.0" +version = "5.0.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-5.0.0-py3-none-any.whl", hash = "sha256:2d6b311702720914d058ea5c73f7a66fbf50f763544a6f77d088122bd272e4bd"}, - {file = "airbyte_cdk-5.0.0.tar.gz", hash = "sha256:72e0f445328bfb37529b485d3f3ef6fddbdb9af653542b44c7ba3792b3636326"}, + {file = "airbyte_cdk-5.0.1-py3-none-any.whl", hash = "sha256:9925a07a4f15c2d61775c09cf9359d477f7bd4414cc3986b643cfb2b8325b33d"}, + {file = "airbyte_cdk-5.0.1.tar.gz", hash = "sha256:cc1b6c55dbc4709784552132aeac06ab0ace97dcafdd9ed4db6738619fdab260"}, ] [package.dependencies] @@ -1605,4 +1605,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "a350f1c03f61ff0e4ff194913dde979aaf1cafb2cb7f2ea7219478bcba1b8981" +content-hash = "fbf3a4e72f12e002577854e63cb758d4bad25d928595a272cc023c859f62c1be" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml index 93a587a6b0f3..da92760e60ae 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml +++ b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml @@ -3,7 +3,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "5.0.0" +version = "5.0.1" name = "source-declarative-manifest" description = "Base source implementation for low-code sources." authors = ["Airbyte "] @@ -17,7 +17,7 @@ include = "source_declarative_manifest" [tool.poetry.dependencies] python = "^3.10,<3.12" -airbyte-cdk = "5.0.0" +airbyte-cdk = "5.0.1" [tool.poetry.scripts] source-declarative-manifest = "source_declarative_manifest.run:run" diff --git a/docs/integrations/sources/low-code.md b/docs/integrations/sources/low-code.md index 842ef74cb32d..39211aba113e 100644 --- a/docs/integrations/sources/low-code.md +++ b/docs/integrations/sources/low-code.md @@ -9,6 +9,7 @@ The changelog below is automatically updated by the `bump_version` command as pa | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------- | +| 5.0.1 | 2024-09-03 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.1 | | 5.0.0 | 2024-09-02 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.0 | | 4.6.2 | 2024-08-29 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 4.6.2 | | 4.6.1 | 2024-08-28 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 4.6.1 | From f80584275520c5d169aac595c20c275bf764e93d Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Tue, 3 Sep 2024 13:21:53 +0200 Subject: [PATCH 07/51] feat(source-hardcoded-records): bump CDK version (#45097) Signed-off-by: Artem Inzhyyants --- .../source-hardcoded-records/metadata.yaml | 2 +- .../source-hardcoded-records/poetry.lock | 87 +++++++++++++++---- .../source-hardcoded-records/pyproject.toml | 2 +- .../integrations/sources/hardcoded-records.md | 19 ++-- 4 files changed, 83 insertions(+), 27 deletions(-) diff --git a/airbyte-integrations/connectors/source-hardcoded-records/metadata.yaml b/airbyte-integrations/connectors/source-hardcoded-records/metadata.yaml index a3f11f854934..2e9c64784dc6 100644 --- a/airbyte-integrations/connectors/source-hardcoded-records/metadata.yaml +++ b/airbyte-integrations/connectors/source-hardcoded-records/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: api connectorType: source definitionId: 084124ab-22db-4019-b36d-630418541bf7 - dockerImageTag: 0.0.9 + dockerImageTag: 0.0.10 dockerRepository: airbyte/source-hardcoded-records documentationUrl: https://docs.airbyte.com/integrations/sources/hardcoded-records githubIssueLabel: source-hardcoded-records diff --git a/airbyte-integrations/connectors/source-hardcoded-records/poetry.lock b/airbyte-integrations/connectors/source-hardcoded-records/poetry.lock index 05217867f899..a40c537e2405 100644 --- a/airbyte-integrations/connectors/source-hardcoded-records/poetry.lock +++ b/airbyte-integrations/connectors/source-hardcoded-records/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "4.6.2" +version = "5.0.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-4.6.2-py3-none-any.whl", hash = "sha256:3a37bd96c4b4f874b15fc18839b1e163eb30d1e4ef80d7dde2854e6a48efe934"}, - {file = "airbyte_cdk-4.6.2.tar.gz", hash = "sha256:c034f11ba6abe73dd7346ce2bc7017ff71ef0db1fd1ae86fb86beaeae35d8baf"}, + {file = "airbyte_cdk-5.0.0-py3-none-any.whl", hash = "sha256:2d6b311702720914d058ea5c73f7a66fbf50f763544a6f77d088122bd272e4bd"}, + {file = "airbyte_cdk-5.0.0.tar.gz", hash = "sha256:72e0f445328bfb37529b485d3f3ef6fddbdb9af653542b44c7ba3792b3636326"}, ] [package.dependencies] -airbyte-protocol-models-pdv2 = ">=0.12.2,<0.13.0" +airbyte-protocol-models-dataclasses = ">=0.13,<0.14" backoff = "*" cachetools = "*" cryptography = ">=42.0.5,<43.0.0" @@ -35,6 +35,7 @@ pytz = "2024.1" PyYAML = ">=6.0.1,<7.0.0" requests = "*" requests_cache = "*" +serpyco-rs = ">=1.10.2,<2.0.0" wcmatch = "8.4" [package.extras] @@ -43,19 +44,16 @@ sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] -name = "airbyte-protocol-models-pdv2" -version = "0.12.2" -description = "Declares the Airbyte Protocol." +name = "airbyte-protocol-models-dataclasses" +version = "0.13.0" +description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclasses in Python have less performance overhead compared to Pydantic models, making them a more efficient choice for scenarios where speed and memory usage are critical" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models_pdv2-0.12.2-py3-none-any.whl", hash = "sha256:8b3f9d0388928547cdf2e9134c0d589e4bcaa6f63bf71a21299f6824bfb7ad0e"}, - {file = "airbyte_protocol_models_pdv2-0.12.2.tar.gz", hash = "sha256:130c9ab289f3f53749ce63ff1abbfb67a44b7e5bd2794865315a2976138b672b"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0-py3-none-any.whl", hash = "sha256:0aedb99ffc4f9aab0ce91bba2c292fa17cd8fd4b42eeba196d6a16c20bbbd7a5"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0.tar.gz", hash = "sha256:72e67850d661e2808406aec5839b3158ebb94d3553b798dbdae1b4a278548d2f"}, ] -[package.dependencies] -pydantic = ">=2.7.2,<3.0.0" - [[package]] name = "annotated-types" version = "0.7.0" @@ -99,6 +97,17 @@ files = [ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, ] +[[package]] +name = "attributes-doc" +version = "0.4.0" +description = "PEP 224 implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "attributes-doc-0.4.0.tar.gz", hash = "sha256:b1576c94a714e9fc2c65c47cf10d0c8e1a5f7c4f5ae7f69006be108d95cbfbfb"}, + {file = "attributes_doc-0.4.0-py2.py3-none-any.whl", hash = "sha256:4c3007d9e58f3a6cb4b9c614c4d4ce2d92161581f28e594ddd8241cc3a113bdd"}, +] + [[package]] name = "attrs" version = "24.2.0" @@ -1449,15 +1458,61 @@ redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] yaml = ["pyyaml (>=6.0.1)"] +[[package]] +name = "serpyco-rs" +version = "1.10.2" +description = "" +optional = false +python-versions = ">=3.9" +files = [ + {file = "serpyco_rs-1.10.2-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e01d824fdebb9bded57ec40b9ac0ca3b312ad617fd5deba61113a3b23bcb915d"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef9a31f8d62c17b1ccfffb3e91c5aed2d6fd2187c7611ee3ca1b572046150cd"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aab2241b2d87bca5f15d5d34a3948b1c9ad1724cc55d1332e0c5325aff02635f"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:87d8118e9ba6e37aee1b0f7c14b19fe494f1589dc81ae0cc5168812779e1bfab"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d18a77d23aeb49904b2462410e57b4027511158845291bf6251e5857a881d60"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8da7ff487ada75f6b724d6ef9e40cde5cf703a2b89e6a3f466a8db0049e153a"}, + {file = "serpyco_rs-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5212fa00ff8874ecabca0cf5f11eb7c1291b55ec9ee6aa7ee3ae2ec344abcf7f"}, + {file = "serpyco_rs-1.10.2-cp310-none-win_amd64.whl", hash = "sha256:ff83f5296f0ab08e77d09a4888020e636d4385a642fec52eacd2ab480d0ec22c"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d0e6d6546145ba30d6032381b27261e338f7c1b96b9fb0773a481970a809827"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4d5c69d1fcd7007b7792cb5ea62a0702822f6f8982349f44b795677ab7414c"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fc4c1241c0707bfdd93991c0a2cea3f51a17acad343d9b5c296fc0a9f044d78"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:413fe29db4cab826269371a89ff9ccbd897ee7ff0eaaf1090362fdb86d5b8beb"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ce4d5ac0ac4d62911998bfba1ac149a61c43f5dbfa23f831f0d87290c1861a"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9c8a31440a3158c601fdcd523e77cd5fefa2ae5be061a4151c38a7a6060624"}, + {file = "serpyco_rs-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8e323f5420c3e6f99627291a2d47d7fcd7f5c4433aaa6cc35e15d5b22ba19d6"}, + {file = "serpyco_rs-1.10.2-cp311-none-win_amd64.whl", hash = "sha256:743c1e1943f51883cb498c2c16c5f49bab2adb991c842077fcd0fa5a1658da25"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6379d789daff44e5f535d7e1c0131b30cee86988e9561cc9d98e87021188220d"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805162d7b67fd08b04b1e2ef1deeaedc37c7ee24a200f24778fb98b9fe7f5cdd"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1366df15ae2548a8a063eca84b9a8c2af92ac55df73ce60a7c4f2dfe71e2526b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:35d0a1a1a69ae074b123f6ad1487dc67717727d9dce4f95a393298743d60aafb"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a79517070e0b021803cafdf11d326e1149eac4a226443040e9fa1492c74337b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bdd2b8d3b9160ddcab0400ca5e258c16e870ae49c6586ed5405c18e8910c957b"}, + {file = "serpyco_rs-1.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:045965a32c651797a73c7b7165165ed0d78efc233af4bf24c47acd41d222fae8"}, + {file = "serpyco_rs-1.10.2-cp312-none-win_amd64.whl", hash = "sha256:c6c95f6c9e04af94c33e4e514291df7380c3960a155e9fe264ccaaa46d4d0de8"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f19a82836699d102b288b17ae370dd4d37af60ccd2254f5bfdbd053d168cecee"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3830bb3f6a342825e27592e86baa46774bfb1f08c82dbf561b5f1380a18b48"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f726392e6380b1e7d642d7633ac27929c8616a59db0a54632f5a9ab80987e071"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9ce029f8f29f4f335d0f3c9e005b71d7e8a934735d9654e3f03ccc54d50c107a"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1f011370259602b55141ce866bf31dcdc9d8b68105c32f18ee442bc651ee880"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14d9e22016e2860c1f524aa123cfadd4a4eea25af10d1be76cc3d97d9c85c2e2"}, + {file = "serpyco_rs-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441b8045f91f30120c00a1f617a0ad6f22c1753c6b98899e8476d6e7775a3667"}, + {file = "serpyco_rs-1.10.2-cp39-none-win_amd64.whl", hash = "sha256:a124608cc998e3854fc743dea5dd7d948edbeaa70c1c1777b6dbb4b64ce465b0"}, + {file = "serpyco_rs-1.10.2.tar.gz", hash = "sha256:9cf06956eb14b326e522c9665aa5136f8fd7ece2df8a393c2e84bee8204362d0"}, +] + +[package.dependencies] +attributes-doc = "*" +typing-extensions = "*" + [[package]] name = "setuptools" -version = "74.0.0" +version = "74.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, + {file = "setuptools-74.1.0-py3-none-any.whl", hash = "sha256:cee604bd76cc092355a4e43ec17aee5369095974f41f088676724dc6bc2c9ef8"}, + {file = "setuptools-74.1.0.tar.gz", hash = "sha256:bea195a800f510ba3a2bc65645c88b7e016fe36709fefc58a880c4ae8a0138d7"}, ] [package.extras] diff --git a/airbyte-integrations/connectors/source-hardcoded-records/pyproject.toml b/airbyte-integrations/connectors/source-hardcoded-records/pyproject.toml index 91f163ba88b2..a0113159019e 100644 --- a/airbyte-integrations/connectors/source-hardcoded-records/pyproject.toml +++ b/airbyte-integrations/connectors/source-hardcoded-records/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.0.9" +version = "0.0.10" name = "source-hardcoded-records" description = "Source implementation for hardcoded recprds." authors = [ "Airbyte ",] diff --git a/docs/integrations/sources/hardcoded-records.md b/docs/integrations/sources/hardcoded-records.md index a9ed57ca5be5..bd271069d86e 100644 --- a/docs/integrations/sources/hardcoded-records.md +++ b/docs/integrations/sources/hardcoded-records.md @@ -149,14 +149,15 @@ None! | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-------------------------| -| 0.0.9 | 2024-08-31 | [45003](https://github.com/airbytehq/airbyte/pull/45003) | Update dependencies | -| 0.0.8 | 2024-08-24 | [44630](https://github.com/airbytehq/airbyte/pull/44630) | Update dependencies | -| 0.0.7 | 2024-08-17 | [44331](https://github.com/airbytehq/airbyte/pull/44331) | Update dependencies | -| 0.0.6 | 2024-08-12 | [43823](https://github.com/airbytehq/airbyte/pull/43823) | Update dependencies | -| 0.0.5 | 2024-08-10 | [43645](https://github.com/airbytehq/airbyte/pull/43645) | Update dependencies | -| 0.0.4 | 2024-08-03 | [43244](https://github.com/airbytehq/airbyte/pull/43244) | Update dependencies | -| 0.0.3 | 2024-07-29 | [42850](https://github.com/airbytehq/airbyte/pull/42850) | Update Airbyte CDK to v4 | -| 0.0.2 | 2024-07-27 | [42828](https://github.com/airbytehq/airbyte/pull/42828) | Update dependencies | -| 0.0.1 | 2024-07-23 | [42434](https://github.com/airbytehq/airbyte/pull/42434) | Initial Release | +| 0.0.10 | 2024-09-03 | [45097](https://github.com/airbytehq/airbyte/pull/45097) | Update CDK version to ^5 | +| 0.0.9 | 2024-08-31 | [45003](https://github.com/airbytehq/airbyte/pull/45003) | Update dependencies | +| 0.0.8 | 2024-08-24 | [44630](https://github.com/airbytehq/airbyte/pull/44630) | Update dependencies | +| 0.0.7 | 2024-08-17 | [44331](https://github.com/airbytehq/airbyte/pull/44331) | Update dependencies | +| 0.0.6 | 2024-08-12 | [43823](https://github.com/airbytehq/airbyte/pull/43823) | Update dependencies | +| 0.0.5 | 2024-08-10 | [43645](https://github.com/airbytehq/airbyte/pull/43645) | Update dependencies | +| 0.0.4 | 2024-08-03 | [43244](https://github.com/airbytehq/airbyte/pull/43244) | Update dependencies | +| 0.0.3 | 2024-07-29 | [42850](https://github.com/airbytehq/airbyte/pull/42850) | Update Airbyte CDK to v4 | +| 0.0.2 | 2024-07-27 | [42828](https://github.com/airbytehq/airbyte/pull/42828) | Update dependencies | +| 0.0.1 | 2024-07-23 | [42434](https://github.com/airbytehq/airbyte/pull/42434) | Initial Release | From 5d1c2bf93b40a7704c0d924321361c2df09d3033 Mon Sep 17 00:00:00 2001 From: Malik Diarra Date: Tue, 3 Sep 2024 09:15:17 -0700 Subject: [PATCH 08/51] doc: app-name is now called appName (#44882) --- docs/enterprise-setup/implementation-guide.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/enterprise-setup/implementation-guide.md b/docs/enterprise-setup/implementation-guide.md index e36cbb69a4a6..866a957e973d 100644 --- a/docs/enterprise-setup/implementation-guide.md +++ b/docs/enterprise-setup/implementation-guide.md @@ -233,8 +233,8 @@ auth: secretName: airbyte-config-secrets ## Name of your Kubernetes secret. oidc: domain: ## e.g. company.example - app-name: ## e.g. airbyte - display-name: ## e.g. Company SSO - optional, falls back to app-name if not provided + appName: ## e.g. airbyte + display-name: ## e.g. Company SSO - optional, falls back to appName if not provided clientIdSecretKey: client-id clientSecretSecretKey: client-secret ``` @@ -265,8 +265,8 @@ global: secretName: airbyte-config-secrets ## Name of your Kubernetes secret. oidc: domain: ## e.g. company.example - app-name: ## e.g. airbyte - display-name: ## e.g. Company SSO - optional, falls back to app-name if not provided + appName: ## e.g. airbyte + display-name: ## e.g. Company SSO - optional, falls back to appName if not provided clientIdSecretKey: client-id clientSecretSecretKey: client-secret ``` From 09fef1c27af1abd05553b581e702bebb44dcf768 Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:47:48 -0400 Subject: [PATCH 09/51] Fix pagination issue while setting is_resumable properly (#45101) --- airbyte-integrations/connectors/source-stripe/metadata.yaml | 2 +- .../connectors/source-stripe/pyproject.toml | 2 +- .../connectors/source-stripe/source_stripe/streams.py | 6 ++++++ docs/integrations/sources/stripe.md | 1 + 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-stripe/metadata.yaml b/airbyte-integrations/connectors/source-stripe/metadata.yaml index 18346e5c79d8..e65e8e2993d9 100644 --- a/airbyte-integrations/connectors/source-stripe/metadata.yaml +++ b/airbyte-integrations/connectors/source-stripe/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: e094cb9a-26de-4645-8761-65c0c425d1de - dockerImageTag: 5.5.2 + dockerImageTag: 5.5.3 dockerRepository: airbyte/source-stripe documentationUrl: https://docs.airbyte.com/integrations/sources/stripe erdUrl: https://dbdocs.io/airbyteio/source-stripe?view=relationships diff --git a/airbyte-integrations/connectors/source-stripe/pyproject.toml b/airbyte-integrations/connectors/source-stripe/pyproject.toml index b19c3ad6e47b..117551f89db3 100644 --- a/airbyte-integrations/connectors/source-stripe/pyproject.toml +++ b/airbyte-integrations/connectors/source-stripe/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "5.5.2" +version = "5.5.3" name = "source-stripe" description = "Source implementation for Stripe." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py b/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py index cae3f9509135..5b13588403f2 100644 --- a/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py +++ b/airbyte-integrations/connectors/source-stripe/source_stripe/streams.py @@ -93,6 +93,7 @@ def extract_records( class StripeStream(HttpStream, ABC): + is_resumable = False url_base = "https://api.stripe.com/v1/" DEFAULT_SLICE_RANGE = 365 transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) @@ -234,6 +235,7 @@ def get_parent_stream(self, stream_state: Mapping[str, Any]) -> StripeStream: class CreatedCursorIncrementalStripeStream(StripeStream): + is_resumable = True # Stripe returns most recently created objects first, so we don't want to persist state until the entire stream has been read state_checkpoint_interval = math.inf @@ -337,6 +339,7 @@ def path(self, **kwargs): class UpdatedCursorIncrementalStripeStream(StripeStream): + is_resumable = True """ `CreatedCursorIncrementalStripeStream` does not provide a way to read updated data since given date because the API does not allow to do this. It only returns newly created entities since given date. So to have all the updated data as well we need to make use of the Events API, @@ -452,6 +455,7 @@ def get_parent_stream(self, stream_state: Mapping[str, Any]) -> StripeStream: class IncrementalStripeStream(StripeStream): + is_resumable = True """ This class combines both normal incremental sync and event based sync. It uses common endpoints for sliced data syncs in the full refresh sync mode and initial incremental sync. For incremental syncs with a state, event based sync comes into action. @@ -720,6 +724,7 @@ def get_parent_stream(self, stream_state: Mapping[str, Any]) -> StripeStream: class UpdatedCursorIncrementalStripeLazySubStream(StripeStream, ABC): + is_resumable = True """ This stream uses StripeLazySubStream under the hood to run full refresh or initial incremental syncs. In case of subsequent incremental syncs, it uses the UpdatedCursorIncrementalStripeStream class. @@ -793,6 +798,7 @@ def read_records( class ParentIncrementalStipeSubStream(StripeSubStream): + is_resumable = True """ This stream differs from others in that it runs parent stream in exactly same sync mode it is run itself to generate stream slices. It also uses regular /v1 API endpoints to sync data no matter what the sync mode is. This means that the event-based API can only diff --git a/docs/integrations/sources/stripe.md b/docs/integrations/sources/stripe.md index c157fc3b1816..ab2d04acc7cb 100644 --- a/docs/integrations/sources/stripe.md +++ b/docs/integrations/sources/stripe.md @@ -236,6 +236,7 @@ Each record is marked with `is_deleted` flag when the appropriate event happens | Version | Date | Pull Request | Subject | |:--------|:-----------| :-------------------------------------------------------- |:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 5.5.3 | 2024-09-03 | [45101](https://github.com/airbytehq/airbyte/pull/45101) | Fix regression followoing pagination issue fix | | 5.5.2 | 2024-08-28 | [44862](https://github.com/airbytehq/airbyte/pull/44862) | Fix RFR pagination issue | | 5.5.1 | 2024-08-10 | [43105](https://github.com/airbytehq/airbyte/pull/43105) | Update dependencies | | 5.5.0 | 2024-08-08 | [43302](https://github.com/airbytehq/airbyte/pull/43302) | Fix problem with state not updating and upgrade cdk 4 From f85baeb02a8c6ac24906e0aa18fc606b3f3ee1c2 Mon Sep 17 00:00:00 2001 From: Patrick Nilan Date: Tue, 3 Sep 2024 13:13:23 -0700 Subject: [PATCH 10/51] (source-s3) - Upgrade CDK 3.9.6 (#44908) --- .../connectors/source-s3/metadata.yaml | 2 +- .../connectors/source-s3/poetry.lock | 389 ++++++++---------- .../connectors/source-s3/pyproject.toml | 12 +- docs/integrations/sources/s3.md | 1 + 4 files changed, 181 insertions(+), 223 deletions(-) diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index c12e665aaa5e..ae8674372ad8 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 - dockerImageTag: 4.7.8 + dockerImageTag: 4.8.0 dockerRepository: airbyte/source-s3 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 githubIssueLabel: source-s3 diff --git a/airbyte-integrations/connectors/source-s3/poetry.lock b/airbyte-integrations/connectors/source-s3/poetry.lock index a660ac3bff76..786fc1cec03d 100644 --- a/airbyte-integrations/connectors/source-s3/poetry.lock +++ b/airbyte-integrations/connectors/source-s3/poetry.lock @@ -204,10 +204,7 @@ files = [ [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = [ - {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, -] +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] crt = ["awscrt (==0.21.2)"] @@ -873,25 +870,6 @@ files = [ {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] -[[package]] -name = "importlib-metadata" -version = "8.4.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, - {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1051,13 +1029,13 @@ six = "*" [[package]] name = "langsmith" -version = "0.1.108" +version = "0.1.110" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.108-py3-none-any.whl", hash = "sha256:407f318b0989e33f2cd30bc2fbd443e4ddfa7c2a93de7f795fb6b119b015583c"}, - {file = "langsmith-0.1.108.tar.gz", hash = "sha256:42f603e2d5770ba36093951bdb29eaab22451cb12ab8c062340c722cf60d4cec"}, + {file = "langsmith-0.1.110-py3-none-any.whl", hash = "sha256:316d279e3853f5e90e462f9c035eeb468d042f2a21a269c1102d65f3dccdc334"}, + {file = "langsmith-0.1.110.tar.gz", hash = "sha256:9a619dfe22a67a05a05091f0677b9c842499faec5f051b31afcd901b6627d0a3"}, ] [package.dependencies] @@ -1231,9 +1209,6 @@ files = [ {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - [package.extras] docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] @@ -1530,70 +1505,75 @@ files = [ [[package]] name = "pandas" -version = "2.0.3" +version = "2.2.2" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] [package.dependencies] numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "pdf2image" @@ -2345,119 +2325,119 @@ files = [ [[package]] name = "rapidfuzz" -version = "3.9.6" +version = "3.9.7" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7ed0d0b9c85720f0ae33ac5efc8dc3f60c1489dad5c29d735fbdf2f66f0431f"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f3deff6ab7017ed21b9aec5874a07ad13e6b2a688af055837f88b743c7bfd947"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3f9fc060160507b2704f7d1491bd58453d69689b580cbc85289335b14fe8ca"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e86c2b3827fa6169ad6e7d4b790ce02a20acefb8b78d92fa4249589bbc7a2c"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f982e1aafb4bd8207a5e073b1efef9e68a984e91330e1bbf364f9ed157ed83f0"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9196a51d0ec5eaaaf5bca54a85b7b1e666fc944c332f68e6427503af9fb8c49e"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5a514064e02585b1cc09da2fe406a6dc1a7e5f3e92dd4f27c53e5f1465ec81"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3a4244f65dbc3580b1275480118c3763f9dc29fc3dd96610560cb5e140a4d4a"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6ebb910a702e41641e1e1dada3843bc11ba9107a33c98daef6945a885a40a07"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:624fbe96115fb39addafa288d583b5493bc76dab1d34d0ebba9987d6871afdf9"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1c59f1c1507b7a557cf3c410c76e91f097460da7d97e51c985343798e9df7a3c"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f0256cb27b6a0fb2e1918477d1b56473cd04acfa245376a342e7c15806a396"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-win32.whl", hash = "sha256:24d473d00d23a30a85802b502b417a7f5126019c3beec91a6739fe7b95388b24"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:248f6d2612e661e2b5f9a22bbd5862a1600e720da7bb6ad8a55bb1548cdfa423"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-win_arm64.whl", hash = "sha256:e03fdf0e74f346ed7e798135df5f2a0fb8d6b96582b00ebef202dcf2171e1d1d"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52e4675f642fbc85632f691b67115a243cd4d2a47bdcc4a3d9a79e784518ff97"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1f93a2f13038700bd245b927c46a2017db3dcd4d4ff94687d74b5123689b873b"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b70500bca460264b8141d8040caee22e9cf0418c5388104ff0c73fb69ee28f"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1e037fb89f714a220f68f902fc6300ab7a33349f3ce8ffae668c3b3a40b0b06"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6792f66d59b86ccfad5e247f2912e255c85c575789acdbad8e7f561412ffed8a"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68d9cffe710b67f1969cf996983608cee4490521d96ea91d16bd7ea5dc80ea98"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63daaeeea76da17fa0bbe7fb05cba8ed8064bb1a0edf8360636557f8b6511961"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d214e063bffa13e3b771520b74f674b22d309b5720d4df9918ff3e0c0f037720"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ed443a2062460f44c0346cb9d269b586496b808c2419bbd6057f54061c9b9c75"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5b0c9b227ee0076fb2d58301c505bb837a290ae99ee628beacdb719f0626d749"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:82c9722b7dfaa71e8b61f8c89fed0482567fb69178e139fe4151fc71ed7df782"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c18897c95c0a288347e29537b63608a8f63a5c3cb6da258ac46fcf89155e723e"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-win32.whl", hash = "sha256:3e910cf08944da381159587709daaad9e59d8ff7bca1f788d15928f3c3d49c2a"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:59c4a61fab676d37329fc3a671618a461bfeef53a4d0b8b12e3bc24a14e166f8"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-win_arm64.whl", hash = "sha256:8b4afea244102332973377fddbe54ce844d0916e1c67a5123432291717f32ffa"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:70591b28b218fff351b88cdd7f2359a01a71f9f7f5a2e465ce3715ed4b3c422b"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee2d8355c7343c631a03e57540ea06e8717c19ecf5ff64ea07e0498f7f161457"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:708fb675de0f47b9635d1cc6fbbf80d52cb710d0a1abbfae5c84c46e3abbddc3"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d66c247c2d3bb7a9b60567c395a15a929d0ebcc5f4ceedb55bfa202c38c6e0c"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15146301b32e6e3d2b7e8146db1a26747919d8b13690c7f83a4cb5dc111b3a08"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7a03da59b6c7c97e657dd5cd4bcaab5fe4a2affd8193958d6f4d938bee36679"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d2c2fe19e392dbc22695b6c3b2510527e2b774647e79936bbde49db7742d6f1"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:91aaee4c94cb45930684f583ffc4e7c01a52b46610971cede33586cf8a04a12e"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3f5702828c10768f9281180a7ff8597da1e5002803e1304e9519dd0f06d79a85"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ccd1763b608fb4629a0b08f00b3c099d6395e67c14e619f6341b2c8429c2f310"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc7a0d4b2cb166bc46d02c8c9f7551cde8e2f3c9789df3827309433ee9771163"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7496f53d40560a58964207b52586783633f371683834a8f719d6d965d223a2eb"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-win32.whl", hash = "sha256:5eb1a9272ca71bc72be5415c2fa8448a6302ea4578e181bb7da9db855b367df0"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-win_amd64.whl", hash = "sha256:0d21fc3c0ca507a1180152a6dbd129ebaef48facde3f943db5c1055b6e6be56a"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-win_arm64.whl", hash = "sha256:43bb27a57c29dc5fa754496ba6a1a508480d21ae99ac0d19597646c16407e9f3"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:83a5ac6547a9d6eedaa212975cb8f2ce2aa07e6e30833b40e54a52b9f9999aa4"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10f06139142ecde67078ebc9a745965446132b998f9feebffd71acdf218acfcc"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74720c3f24597f76c7c3e2c4abdff55f1664f4766ff5b28aeaa689f8ffba5fab"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2bce52b5c150878e558a0418c2b637fb3dbb6eb38e4eb27d24aa839920483e"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1611199f178793ca9a060c99b284e11f6d7d124998191f1cace9a0245334d219"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0308b2ad161daf502908a6e21a57c78ded0258eba9a8f5e2545e2dafca312507"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eda91832201b86e3b70835f91522587725bec329ec68f2f7faf5124091e5ca7"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ece873c093aedd87fc07c2a7e333d52e458dc177016afa1edaf157e82b6914d8"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d97d3c9d209d5c30172baea5966f2129e8a198fec4a1aeb2f92abb6e82a2edb1"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6c4550d0db4931f5ebe9f0678916d1b06f06f5a99ba0b8a48b9457fd8959a7d4"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b6b8dd4af6324fc325d9483bec75ecf9be33e590928c9202d408e4eafff6a0a6"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16122ae448bc89e2bea9d81ce6cb0f751e4e07da39bd1e70b95cae2493857853"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-win32.whl", hash = "sha256:71cc168c305a4445109cd0d4925406f6e66bcb48fde99a1835387c58af4ecfe9"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-win_amd64.whl", hash = "sha256:59ee78f2ecd53fef8454909cda7400fe2cfcd820f62b8a5d4dfe930102268054"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-win_arm64.whl", hash = "sha256:58b4ce83f223605c358ae37e7a2d19a41b96aa65b1fede99cc664c9053af89ac"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f469dbc9c4aeaac7dd005992af74b7dff94aa56a3ea063ce64e4b3e6736dd2f"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a9ed7ad9adb68d0fe63a156fe752bbf5f1403ed66961551e749641af2874da92"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ffe48ffbeedf78d120ddfb9d583f2ca906712159a4e9c3c743c9f33e7b1775"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8502ccdea9084d54b6f737d96a3b60a84e3afed9d016686dc979b49cdac71613"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a4bec4956e06b170ca896ba055d08d4c457dac745548172443982956a80e118"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c0488b1c273be39e109ff885ccac0448b2fa74dea4c4dc676bcf756c15f16d6"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0542c036cb6acf24edd2c9e0411a67d7ba71e29e4d3001a082466b86fc34ff30"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0a96b52c9f26857bf009e270dcd829381e7a634f7ddd585fa29b87d4c82146d9"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6edd3cd7c4aa8c68c716d349f531bd5011f2ca49ddade216bb4429460151559f"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:50b2fb55d7ed58c66d49c9f954acd8fc4a3f0e9fd0ff708299bd8abb68238d0e"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:32848dfe54391636b84cda1823fd23e5a6b1dbb8be0e9a1d80e4ee9903820994"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:29146cb7a1bf69c87e928b31bffa54f066cb65639d073b36e1425f98cccdebc6"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-win32.whl", hash = "sha256:aed13e5edacb0ecadcc304cc66e93e7e77ff24f059c9792ee602c0381808e10c"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:af440e36b828922256d0b4d79443bf2cbe5515fc4b0e9e96017ec789b36bb9fc"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:efa674b407424553024522159296690d99d6e6b1192cafe99ca84592faff16b4"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0b40ff76ee19b03ebf10a0a87938f86814996a822786c41c3312d251b7927849"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16a6c7997cb5927ced6f617122eb116ba514ec6b6f60f4803e7925ef55158891"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3f42504bdc8d770987fc3d99964766d42b2a03e4d5b0f891decdd256236bae0"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9462aa2be9f60b540c19a083471fdf28e7cf6434f068b631525b5e6251b35e"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1629698e68f47609a73bf9e73a6da3a4cac20bc710529215cbdf111ab603665b"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68bc7621843d8e9a7fd1b1a32729465bf94b47b6fb307d906da168413331f8d6"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c6254c50f15bc2fcc33cb93a95a81b702d9e6590f432a7f7822b8c7aba9ae288"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7e535a114fa575bc143e175e4ca386a467ec8c42909eff500f5f0f13dc84e3e0"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d50acc0e9d67e4ba7a004a14c42d1b1e8b6ca1c515692746f4f8e7948c673167"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fa742ec60bec53c5a211632cf1d31b9eb5a3c80f1371a46a23ac25a1fa2ab209"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c256fa95d29cbe5aa717db790b231a9a5b49e5983d50dc9df29d364a1db5e35b"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-win32.whl", hash = "sha256:89acbf728b764421036c173a10ada436ecca22999851cdc01d0aa904c70d362d"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:c608fcba8b14d86c04cb56b203fed31a96e8a1ebb4ce99e7b70313c5bf8cf497"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-win_arm64.whl", hash = "sha256:d41c00ded0e22e9dba88ff23ebe0dc9d2a5f21ba2f88e185ea7374461e61daa9"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a65c2f63218ea2dedd56fc56361035e189ca123bd9c9ce63a9bef6f99540d681"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:680dc78a5f889d3b89f74824b89fe357f49f88ad10d2c121e9c3ad37bac1e4eb"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ca862927a0b05bd825e46ddf82d0724ea44b07d898ef639386530bf9b40f15"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2116fa1fbff21fa52cd46f3cfcb1e193ba1d65d81f8b6e123193451cd3d6c15e"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dcb7d9afd740370a897c15da61d3d57a8d54738d7c764a99cedb5f746d6a003"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1a5bd6401bb489e14cbb5981c378d53ede850b7cc84b2464cad606149cc4e17d"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:29fda70b9d03e29df6fc45cc27cbcc235534b1b0b2900e0a3ae0b43022aaeef5"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:88144f5f52ae977df9352029488326afadd7a7f42c6779d486d1f82d43b2b1f2"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:715aeaabafba2709b9dd91acb2a44bad59d60b4616ef90c08f4d4402a3bbca60"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af26ebd3714224fbf9bebbc27bdbac14f334c15f5d7043699cd694635050d6ca"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101bd2df438861a005ed47c032631b7857dfcdb17b82beeeb410307983aac61d"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2185e8e29809b97ad22a7f99281d1669a89bdf5fa1ef4ef1feca36924e675367"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9e53c72d08f0e9c6e4a369e52df5971f311305b4487690c62e8dd0846770260c"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a0cb157162f0cdd62e538c7bd298ff669847fc43a96422811d5ab933f4c16c3a"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bb5ff2bd48132ed5e7fbb8f619885facb2e023759f2519a448b2c18afe07e5d"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dc37f601865e8407e3a8037ffbc3afe0b0f837b2146f7632bd29d087385babe"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a657eee4b94668faf1fa2703bdd803654303f7e468eb9ba10a664d867ed9e779"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:51be6ab5b1d5bb32abd39718f2a5e3835502e026a8272d139ead295c224a6f5e"}, - {file = "rapidfuzz-3.9.6.tar.gz", hash = "sha256:5cf2a7d621e4515fee84722e93563bf77ff2cbe832a77a48b81f88f9e23b9e8d"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ccf68e30b80e903f2309f90a438dbd640dd98e878eeb5ad361a288051ee5b75c"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:696a79018ef989bf1c9abd9005841cee18005ccad4748bad8a4c274c47b6241a"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eebf6c93af0ae866c22b403a84747580bb5c10f0d7b51c82a87f25405d4dcb"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e9125377fa3d21a8abd4fbdbcf1c27be73e8b1850f0b61b5b711364bf3b59db"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c12d180b17a22d107c8747de9c68d0b9c1d15dcda5445ff9bf9f4ccfb67c3e16"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1318d42610c26dcd68bd3279a1bf9e3605377260867c9a8ed22eafc1bd93a7c"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5fa6e3c6e0333051c1f3a49f0807b3366f4131c8d6ac8c3e05fd0d0ce3755c"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fcf79b686962d7bec458a0babc904cb4fa319808805e036b9d5a531ee6b9b835"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8b01153c7466d0bad48fba77a303d5a768e66f24b763853469f47220b3de4661"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:94baaeea0b4f8632a6da69348b1e741043eba18d4e3088d674d3f76586b6223d"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6c5b32875646cb7f60c193ade99b2e4b124f19583492115293cd00f6fb198b17"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:110b6294396bc0a447648627479c9320f095c2034c0537f687592e0f58622638"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-win32.whl", hash = "sha256:3445a35c4c8d288f2b2011eb61bce1227c633ce85a3154e727170f37c0266bb2"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:0d1415a732ee75e74a90af12020b77a0b396b36c60afae1bde3208a78cd2c9fc"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-win_arm64.whl", hash = "sha256:836f4d88b8bd0fff2ebe815dcaab8aa6c8d07d1d566a7e21dd137cf6fe11ed5b"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d098ce6162eb5e48fceb0745455bc950af059df6113eec83e916c129fca11408"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:048d55d36c02c6685a2b2741688503c3d15149694506655b6169dcfd3b6c2585"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c33211cfff9aec425bb1bfedaf94afcf337063aa273754f22779d6dadebef4c2"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6d9db2fa4e9be171e9bb31cf2d2575574774966b43f5b951062bb2e67885852"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4e049d5ad61448c9a020d1061eba20944c4887d720c4069724beb6ea1692507"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cfa74aac64c85898b93d9c80bb935a96bf64985e28d4ee0f1a3d1f3bf11a5106"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965693c2e9efd425b0f059f5be50ef830129f82892fa1858e220e424d9d0160f"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8501000a5eb8037c4b56857724797fe5a8b01853c363de91c8d0d0ad56bef319"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d92c552c6b7577402afdd547dcf5d31ea6c8ae31ad03f78226e055cfa37f3c6"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1ee2086f490cb501d86b7e386c1eb4e3a0ccbb0c99067089efaa8c79012c8952"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1de91e7fd7f525e10ea79a6e62c559d1b0278ec097ad83d9da378b6fab65a265"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4da514d13f4433e16960a17f05b67e0af30ac771719c9a9fb877e5004f74477"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-win32.whl", hash = "sha256:a40184c67db8252593ec518e17fb8a6e86d7259dc9f2d6c0bf4ff4db8cf1ad4b"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:c4f28f1930b09a2c300357d8465b388cecb7e8b2f454a5d5425561710b7fd07f"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-win_arm64.whl", hash = "sha256:675b75412a943bb83f1f53e2e54fd18c80ef15ed642dc6eb0382d1949419d904"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1ef6a1a8f0b12f8722f595f15c62950c9a02d5abc64742561299ffd49f6c6944"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32532af1d70c6ec02ea5ac7ee2766dfff7c8ae8c761abfe8da9e527314e634e8"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1a38bade755aa9dd95a81cda949e1bf9cd92b79341ccc5e2189c9e7bdfc5ec"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73ee2df41224c87336448d279b5b6a3a75f36e41dd3dcf538c0c9cce36360d8"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be3a1fc3e2ab3bdf93dc0c83c00acca8afd2a80602297d96cf4a0ba028333cdf"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603f48f621272a448ff58bb556feb4371252a02156593303391f5c3281dfaeac"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:268f8e1ca50fc61c0736f3fe9d47891424adf62d96ed30196f30f4bd8216b41f"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f8bf3f0d02935751d8660abda6044821a861f6229f7d359f98bcdcc7e66c39b"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b997ff3b39d4cee9fb025d6c46b0a24bd67595ce5a5b652a97fb3a9d60beb651"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca66676c8ef6557f9b81c5b2b519097817a7c776a6599b8d6fcc3e16edd216fe"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:35d3044cb635ca6b1b2b7b67b3597bd19f34f1753b129eb6d2ae04cf98cd3945"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a93c9e60904cb76e7aefef67afffb8b37c4894f81415ed513db090f29d01101"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-win32.whl", hash = "sha256:579d107102c0725f7c79b4e79f16d3cf4d7c9208f29c66b064fa1fd4641d5155"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:953b3780765c8846866faf891ee4290f6a41a6dacf4fbcd3926f78c9de412ca6"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-win_arm64.whl", hash = "sha256:7c20c1474b068c4bd45bf2fd0ad548df284f74e9a14a68b06746c56e3aa8eb70"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fde81b1da9a947f931711febe2e2bee694e891f6d3e6aa6bc02c1884702aea19"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47e92c155a14f44511ea8ebcc6bc1535a1fe8d0a7d67ad3cc47ba61606df7bcf"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8772b745668260c5c4d069c678bbaa68812e6c69830f3771eaad521af7bc17f8"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578302828dd97ee2ba507d2f71d62164e28d2fc7bc73aad0d2d1d2afc021a5d5"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc3e6081069eea61593f1d6839029da53d00c8c9b205c5534853eaa3f031085c"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b1c2d504eddf97bc0f2eba422c8915576dbf025062ceaca2d68aecd66324ad9"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb76e5a21034f0307c51c5a2fc08856f698c53a4c593b17d291f7d6e9d09ca3"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d4ba2318ef670ce505f42881a5d2af70f948124646947341a3c6ccb33cd70369"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:057bb03f39e285047d7e9412e01ecf31bb2d42b9466a5409d715d587460dd59b"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a8feac9006d5c9758438906f093befffc4290de75663dbb2098461df7c7d28dd"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95b8292383e717e10455f2c917df45032b611141e43d1adf70f71b1566136b11"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e9fbf659537d246086d0297628b3795dc3e4a384101ecc01e5791c827b8d7345"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-win32.whl", hash = "sha256:1dc516ac6d32027be2b0196bedf6d977ac26debd09ca182376322ad620460feb"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-win_amd64.whl", hash = "sha256:b4f86e09d3064dca0b014cd48688964036a904a2d28048f00c8f4640796d06a8"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-win_arm64.whl", hash = "sha256:19c64d8ddb2940b42a4567b23f1681af77f50a5ff6c9b8e85daba079c210716e"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbda3dd68d8b28ccb20ffb6f756fefd9b5ba570a772bedd7643ed441f5793308"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2379e0b2578ad3ac7004f223251550f08bca873ff76c169b09410ec562ad78d8"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d1eff95362f993b0276fd3839aee48625b09aac8938bb0c23b40d219cba5dc5"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd9360e30041690912525a210e48a897b49b230768cc8af1c702e5395690464f"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a93cd834b3c315ab437f0565ee3a2f42dd33768dc885ccbabf9710b131cf70d2"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff196996240db7075f62c7bc4506f40a3c80cd4ae3ab0e79ac6892283a90859"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948dcee7aaa1cd14358b2a7ef08bf0be42bf89049c3a906669874a715fc2c937"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95751f505a301af1aaf086c19f34536056d6c8efa91b2240de532a3db57b543"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:90db86fa196eecf96cb6db09f1083912ea945c50c57188039392d810d0b784e1"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:3171653212218a162540a3c8eb8ae7d3dcc8548540b69eaecaf3b47c14d89c90"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:36dd6e820379c37a1ffefc8a52b648758e867cd9d78ee5b5dc0c9a6a10145378"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7b702de95666a1f7d5c6b47eacadfe2d2794af3742d63d2134767d13e5d1c713"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-win32.whl", hash = "sha256:9030e7238c0df51aed5c9c5ed8eee2bdd47a2ae788e562c1454af2851c3d1906"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:f847fb0fbfb72482b1c05c59cbb275c58a55b73708a7f77a83f8035ee3c86497"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97f2ce529d2a70a60c290f6ab269a2bbf1d3b47b9724dccc84339b85f7afb044"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2957fdad10bb83b1982b02deb3604a3f6911a5e545f518b59c741086f92d152"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d5262383634626eb45c536017204b8163a03bc43bda880cf1bdd7885db9a163"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:364587827d7cbd41afa0782adc2d2d19e3f07d355b0750a02a8e33ad27a9c368"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecc24af7f905f3d6efb371a01680116ffea8d64e266618fb9ad1602a9b4f7934"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dc86aa6b29d174713c5f4caac35ffb7f232e3e649113e8d13812b35ab078228"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3dcfbe7266e74a707173a12a7b355a531f2dcfbdb32f09468e664330da14874"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b23806fbdd6b510ba9ac93bb72d503066263b0fba44b71b835be9f063a84025f"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5551d68264c1bb6943f542da83a4dc8940ede52c5847ef158698799cc28d14f5"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:13d8675a1fa7e2b19650ca7ef9a6ec01391d4bb12ab9e0793e8eb024538b4a34"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9b6a5de507b9be6de688dae40143b656f7a93b10995fb8bd90deb555e7875c60"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:111a20a3c090cf244d9406e60500b6c34b2375ba3a5009e2b38fd806fe38e337"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-win32.whl", hash = "sha256:22589c0b8ccc6c391ce7f776c93a8c92c96ab8d34e1a19f1bd2b12a235332632"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:6f83221db5755b8f34222e40607d87f1176a8d5d4dbda4a55a0f0b67d588a69c"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-win_arm64.whl", hash = "sha256:3665b92e788578c3bb334bd5b5fa7ee1a84bafd68be438e3110861d1578c63a0"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7df9c2194c7ec930b33c991c55dbd0c10951bd25800c0b7a7b571994ebbced5"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68bd888eafd07b09585dcc8bc2716c5ecdb7eed62827470664d25588982b2873"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1230e0f9026851a6a432beaa0ce575dda7b39fe689b576f99a0704fbb81fc9c"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b36e1c61b796ae1777f3e9e11fd39898b09d351c9384baf6e3b7e6191d8ced"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dba13d86806fcf3fe9c9919f58575e0090eadfb89c058bde02bcc7ab24e4548"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1f1a33e84056b7892c721d84475d3bde49a145126bc4c6efe0d6d0d59cb31c29"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3492c7a42b7fa9f0051d7fcce9893e95ed91c97c9ec7fb64346f3e070dd318ed"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ece45eb2af8b00f90d10f7419322e8804bd42fb1129026f9bfe712c37508b514"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcd14cf4876f04b488f6e54a7abd3e9b31db5f5a6aba0ce90659917aaa8c088"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:521c58c72ed8a612b25cda378ff10dee17e6deb4ee99a070b723519a345527b9"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18669bb6cdf7d40738526d37e550df09ba065b5a7560f3d802287988b6cb63cf"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7abe2dbae81120a64bb4f8d3fcafe9122f328c9f86d7f327f174187a5af4ed86"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a3c0783910911f4f24655826d007c9f4360f08107410952c01ee3df98c713eb2"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:03126f9a040ff21d2a110610bfd6b93b79377ce8b4121edcb791d61b7df6eec5"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591908240f4085e2ade5b685c6e8346e2ed44932cffeaac2fb32ddac95b55c7f"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9012d86c6397edbc9da4ac0132de7f8ee9d6ce857f4194d5684c4ddbcdd1c5c"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df596ddd3db38aa513d4c0995611267b3946e7cbe5a8761b50e9306dfec720ee"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3ed5adb752f4308fcc8f4fb6f8eb7aa4082f9d12676fda0a74fa5564242a8107"}, + {file = "rapidfuzz-3.9.7.tar.gz", hash = "sha256:f1c7296534c1afb6f495aa95871f14ccdc197c6db42965854e483100df313030"}, ] [package.extras] @@ -2789,13 +2769,13 @@ torch = ["safetensors[numpy]", "torch (>=1.10)"] [[package]] name = "setuptools" -version = "74.0.0" +version = "74.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, + {file = "setuptools-74.1.1-py3-none-any.whl", hash = "sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766"}, + {file = "setuptools-74.1.1.tar.gz", hash = "sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847"}, ] [package.extras] @@ -3412,26 +3392,7 @@ files = [ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] -[[package]] -name = "zipp" -version = "3.20.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.0" -python-versions = "^3.9,<3.12" -content-hash = "79dddd06e60616e81ba4c1ea72a3719adf7a0aa4c92b48b8073d3b4e1ef3556c" +python-versions = "^3.10,<3.12" +content-hash = "6b300d00c5b0aed457fd473975e621d4d40b8b2ccb9ed2c1fe9f6c64f204b319" diff --git a/airbyte-integrations/connectors/source-s3/pyproject.toml b/airbyte-integrations/connectors/source-s3/pyproject.toml index f22bf30b3041..70a91db27c9a 100644 --- a/airbyte-integrations/connectors/source-s3/pyproject.toml +++ b/airbyte-integrations/connectors/source-s3/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.7.8" +version = "4.8.0" name = "source-s3" description = "Source implementation for S3." authors = [ "Airbyte ",] @@ -16,22 +16,18 @@ repository = "https://github.com/airbytehq/airbyte" include = "source_s3" [tool.poetry.dependencies] -python = "^3.9,<3.12" +python = "^3.10,<3.12" pytz = "==2024.1" wcmatch = "==8.4" python-snappy = "==0.6.1" dill = "==0.3.4" -# override transitive dependency that had a vulnerability https://nvd.nist.gov/vuln/detail/CVE-2023-6730 transformers = "4.38.2" urllib3 = "<2" +airbyte-cdk = {extras = ["file-based"], version = "^3"} [tool.poetry.scripts] source-s3 = "source_s3.run:run" -[tool.poetry.dependencies.airbyte-cdk] -extras = [ "file-based",] -version = "^3" - [tool.poetry.dependencies.smart-open] extras = [ "s3",] version = "==5.1.0" @@ -42,4 +38,4 @@ moto = "==4.2.14" docker = "^7.0.0" pytest-mock = "^3.6.1" requests-mock = "^1.9.3" -pandas = "==2.0.3" +pandas = "^2.0.3" diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 690a99d8daf4..8c76051a012f 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -339,6 +339,7 @@ This connector utilizes the open source [Unstructured](https://unstructured-io.g | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| +| 4.8.0 | 2024-09-03 | [44908](https://github.com/airbytehq/airbyte/pull/44908) | Migrate to CDK v3 | | 4.7.8 | 2024-08-31 | [45009](https://github.com/airbytehq/airbyte/pull/45009) | Update dependencies | | 4.7.7 | 2024-08-24 | [44732](https://github.com/airbytehq/airbyte/pull/44732) | Update dependencies | | 4.7.6 | 2024-08-19 | [44380](https://github.com/airbytehq/airbyte/pull/44380) | Update dependencies | From be6b5ef1cb3cc58f657c2a4450ecb1cd3194c402 Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Tue, 3 Sep 2024 13:40:06 -0700 Subject: [PATCH 11/51] bulk-cdk-toolkits-extract-jdbc: noop JdbcCheckQueries when no queries defined (#44842) --- .../main/kotlin/io/airbyte/cdk/check/JdbcCheckQueries.kt | 2 +- .../kotlin/io/airbyte/cdk/check/JdbcCheckQueriesTest.kt | 6 ++++++ .../io/airbyte/cdk/discover/JdbcMetadataQuerierTest.kt | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/check/JdbcCheckQueries.kt b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/check/JdbcCheckQueries.kt index f90cd191b508..522bcde0f077 100644 --- a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/check/JdbcCheckQueries.kt +++ b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/check/JdbcCheckQueries.kt @@ -30,7 +30,7 @@ const val CHECK_QUERIES_PREFIX = "airbyte.connector.check.jdbc" class JdbcCheckQueries { // Micronaut configuration objects work better with mutable properties. - lateinit var queries: List + protected var queries: List = emptyList() private val log = KotlinLogging.logger {} diff --git a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/check/JdbcCheckQueriesTest.kt b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/check/JdbcCheckQueriesTest.kt index ca8f20a6e9be..a96da2b2c676 100644 --- a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/check/JdbcCheckQueriesTest.kt +++ b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/check/JdbcCheckQueriesTest.kt @@ -21,6 +21,12 @@ class JdbcCheckQueriesTest { @Inject lateinit var checkQueries: JdbcCheckQueries + @Test + fun testEmpty() { + val empty = JdbcCheckQueries() + Assertions.assertDoesNotThrow { h2.createConnection().use { empty.executeAll(it) } } + } + @Test @Property(name = "$Q[0]", value = "SELECT DATABASE_PATH() FROM DUAL") fun testPass() { diff --git a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/discover/JdbcMetadataQuerierTest.kt b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/discover/JdbcMetadataQuerierTest.kt index e3d46ce89192..93e9c94a85c0 100644 --- a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/discover/JdbcMetadataQuerierTest.kt +++ b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/test/kotlin/io/airbyte/cdk/discover/JdbcMetadataQuerierTest.kt @@ -23,7 +23,7 @@ class JdbcMetadataQuerierTest { JdbcMetadataQuerier.Factory( selectQueryGenerator = H2SourceOperations(), fieldTypeMapper = H2SourceOperations(), - checkQueries = JdbcCheckQueries().apply { queries = listOf() }, + checkQueries = JdbcCheckQueries(), constants = DefaultJdbcConstants(), ) From 2073513689fb16bc73ce358114888301332e67d8 Mon Sep 17 00:00:00 2001 From: Teal Larson Date: Tue, 3 Sep 2024 17:16:49 -0400 Subject: [PATCH 12/51] docs: remove lingering mentions of dbt core (#45110) --- .../on-kubernetes-via-helm.md | 7 --- docs/understanding-airbyte/operations.md | 55 ------------------- docusaurus/sidebars.js | 1 - 3 files changed, 63 deletions(-) delete mode 100644 docs/understanding-airbyte/operations.md diff --git a/docs/deploying-airbyte/on-kubernetes-via-helm.md b/docs/deploying-airbyte/on-kubernetes-via-helm.md index e8fc93457ef7..cff290b8922c 100644 --- a/docs/deploying-airbyte/on-kubernetes-via-helm.md +++ b/docs/deploying-airbyte/on-kubernetes-via-helm.md @@ -17,13 +17,6 @@ Kubernetes cluster on AWS. Follow [this guide](on-restack.md) to get started. :::note -Airbyte running on Self-Hosted Kubernetes doesn't support DBT Transformations. Please refer to -[#5901](https://github.com/airbytehq/airbyte/issues/5091) - -::: - -:::note - Airbyte Kubernetes Community Edition does not support basic auth by default. To enable basic auth, consider adding a reverse proxy in front of Airbyte. diff --git a/docs/understanding-airbyte/operations.md b/docs/understanding-airbyte/operations.md deleted file mode 100644 index 50ce80e2c2c8..000000000000 --- a/docs/understanding-airbyte/operations.md +++ /dev/null @@ -1,55 +0,0 @@ -# Operations - -Airbyte [connections](/using-airbyte/core-concepts/sync-modes/) support configuring additional transformations that execute after the sync. Useful applications could be: - -- Customized normalization to better fit the requirements of your own business context. -- Business transformations from a technical data representation into a more logical and business oriented data structure. This can facilitate usage by end-users, non-technical operators, and executives looking to generate Business Intelligence dashboards and reports. -- Data Quality, performance optimization, alerting and monitoring, etc. -- Integration with other tools from your data stack \(orchestration, data visualization, etc.\) - -## Supported Operations - -### dbt transformations - -#### - git repository url: - -A url to a git repository to \(shallow\) clone the latest dbt project code from. - -The project versioned in the repository is expected to: - -- be a valid dbt package with a `dbt_project.yml` file at its root. -- have a `dbt_project.yml` with a "profile" name declared as described [here](https://docs.getdbt.com/dbt-cli/configure-your-profile). - -When using the dbt CLI, dbt checks your `profiles.yml` file for a profile with the same name. A profile contains all the details required to connect to your data warehouse. This file generally lives outside of your dbt project to avoid sensitive credentials being checked in to version control. Therefore, a `profiles.yml` will be generated according to the configured destination from the Airbyte UI. - -Note that if you prefer to use your own `profiles.yml` stored in the git repository or in the Docker image, then you can specify an override with `--profiles-dir=` in the dbt CLI arguments. - -#### - git repository branch \(optional\): - -The name of the branch to use when cloning the git repository. If left empty, git will use the default branch of your repository. - -#### - docker image: - -A Docker image and tag to run dbt commands from. The Docker image should have `/bin/bash` and `dbt` installed for this operation type to work. - -A typical value for this field would be for example: `fishtownanalytics/dbt:1.0.0` from [dbt dockerhub](https://hub.docker.com/r/fishtownanalytics/dbt/tags?page=1&ordering=last_updated). - -This field lets you configure the version of dbt that your custom dbt project requires and the loading of additional software and packages necessary for your transformations \(other than your dbt `packages.yml` file\). - -#### - dbt cli arguments - -This operation type is aimed at running the dbt cli. - -A typical value for this field would be "run" and the actual command invoked would as a result be: `dbt run` in the docker container. - -One thing to consider is that dbt allows for vast configuration of the run command, for example, allowing you to select a subset of models. You can find the [dbt reference docs](https://docs.getdbt.com/reference/dbt-commands) which describes this set of available commands and options. - -## Future Operations - -- Docker/Script operations: Execute a generic script in a custom Docker container. -- Webhook operations: Trigger API or hooks from other providers. -- Airflow operations: To use a specialized orchestration tool that lets you schedule and manage more advanced/complex sequences of operations in your sync workflow. - -## Going Further - -In the meantime, please feel free to react, comment, and share your thoughts/use cases with us. We would be glad to hear your feedback and ideas as they will help shape the next set of features and our roadmap for the future. You can head to our GitHub and participate in the corresponding issue or discussions. Thank you! diff --git a/docusaurus/sidebars.js b/docusaurus/sidebars.js index 3a5d46a2c1cb..62962795c9cf 100644 --- a/docusaurus/sidebars.js +++ b/docusaurus/sidebars.js @@ -495,7 +495,6 @@ const understandingAirbyte = { "understanding-airbyte/database-data-catalog", "understanding-airbyte/beginners-guide-to-catalog", "understanding-airbyte/supported-data-types", - "understanding-airbyte/operations", "understanding-airbyte/secrets", "understanding-airbyte/cdc", "understanding-airbyte/resumability", From 095d30e6743d9b2b899405c6d06e999e0ac07204 Mon Sep 17 00:00:00 2001 From: Johnny Schmidt Date: Tue, 3 Sep 2024 14:45:50 -0700 Subject: [PATCH 13/51] State manager test, rational range behavior throughout (#44945) --- .../cdk/message/AirbyteStateMessageFactory.kt | 75 --- .../airbyte/cdk/message/MessageConverter.kt | 76 +++ .../airbyte/cdk/message/MessageQueueWriter.kt | 2 +- .../io/airbyte/cdk/state/StateManager.kt | 134 +++-- .../io/airbyte/cdk/state/StreamManager.kt | 12 +- .../io/airbyte/cdk/state/StateManagerTest.kt | 462 ++++++++++++++++++ 6 files changed, 629 insertions(+), 132 deletions(-) delete mode 100644 airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/AirbyteStateMessageFactory.kt create mode 100644 airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageConverter.kt create mode 100644 airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/AirbyteStateMessageFactory.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/AirbyteStateMessageFactory.kt deleted file mode 100644 index ed88daa07553..000000000000 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/AirbyteStateMessageFactory.kt +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.message - -import io.airbyte.protocol.models.v0.AirbyteGlobalState -import io.airbyte.protocol.models.v0.AirbyteStateMessage -import io.airbyte.protocol.models.v0.AirbyteStateStats -import io.airbyte.protocol.models.v0.AirbyteStreamState -import io.airbyte.protocol.models.v0.StreamDescriptor -import jakarta.inject.Singleton - -/** - * Converts the internal @[DestinationStateMessage] case class to the Protocol state messages - * required by @[io.airbyte.cdk.output.OutputConsumer] - */ -interface AirbyteStateMessageFactory { - fun fromDestinationStateMessage(message: DestinationStateMessage): AirbyteStateMessage -} - -@Singleton -class DefaultAirbyteStateMessageFactory : AirbyteStateMessageFactory { - override fun fromDestinationStateMessage( - message: DestinationStateMessage - ): AirbyteStateMessage { - return when (message) { - is DestinationStreamState -> - AirbyteStateMessage() - .withSourceStats( - AirbyteStateStats() - .withRecordCount(message.sourceStats.recordCount.toDouble()) - ) - .withDestinationStats( - message.destinationStats?.let { - AirbyteStateStats().withRecordCount(it.recordCount.toDouble()) - } - ?: throw IllegalStateException( - "Destination stats must be provided for DestinationStreamState" - ) - ) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) - .withStream(fromStreamState(message.streamState)) - is DestinationGlobalState -> - AirbyteStateMessage() - .withSourceStats( - AirbyteStateStats() - .withRecordCount(message.sourceStats.recordCount.toDouble()) - ) - .withDestinationStats( - message.destinationStats?.let { - AirbyteStateStats().withRecordCount(it.recordCount.toDouble()) - } - ) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) - .withGlobal( - AirbyteGlobalState() - .withSharedState(message.state) - .withStreamStates(message.streamStates.map { fromStreamState(it) }) - ) - } - } - - private fun fromStreamState( - streamState: DestinationStateMessage.StreamState - ): AirbyteStreamState { - return AirbyteStreamState() - .withStreamDescriptor( - StreamDescriptor() - .withNamespace(streamState.stream.descriptor.namespace) - .withName(streamState.stream.descriptor.name) - ) - .withStreamState(streamState.state) - } -} diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageConverter.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageConverter.kt new file mode 100644 index 000000000000..97d40900b210 --- /dev/null +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageConverter.kt @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.message + +import io.airbyte.protocol.models.v0.AirbyteGlobalState +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteStateMessage +import io.airbyte.protocol.models.v0.AirbyteStateStats +import io.airbyte.protocol.models.v0.AirbyteStreamState +import io.airbyte.protocol.models.v0.StreamDescriptor +import jakarta.inject.Singleton + +/** + * Converts the internal @[DestinationStateMessage] case class to the Protocol state messages + * required by @[io.airbyte.cdk.output.OutputConsumer] + */ +interface MessageConverter { + fun from(message: T): U +} + +@Singleton +class DefaultMessageConverter : MessageConverter { + override fun from(message: DestinationStateMessage): AirbyteMessage { + val state = + when (message) { + is DestinationStreamState -> + AirbyteStateMessage() + .withSourceStats( + AirbyteStateStats() + .withRecordCount(message.sourceStats.recordCount.toDouble()) + ) + .withDestinationStats( + message.destinationStats?.let { + AirbyteStateStats().withRecordCount(it.recordCount.toDouble()) + } + ?: throw IllegalStateException( + "Destination stats must be provided for DestinationStreamState" + ) + ) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(fromStreamState(message.streamState)) + is DestinationGlobalState -> + AirbyteStateMessage() + .withSourceStats( + AirbyteStateStats() + .withRecordCount(message.sourceStats.recordCount.toDouble()) + ) + .withDestinationStats( + message.destinationStats?.let { + AirbyteStateStats().withRecordCount(it.recordCount.toDouble()) + } + ) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + AirbyteGlobalState() + .withSharedState(message.state) + .withStreamStates(message.streamStates.map { fromStreamState(it) }) + ) + } + return AirbyteMessage().withState(state) + } + + private fun fromStreamState( + streamState: DestinationStateMessage.StreamState + ): AirbyteStreamState { + return AirbyteStreamState() + .withStreamDescriptor( + StreamDescriptor() + .withNamespace(streamState.stream.descriptor.namespace) + .withName(streamState.stream.descriptor.name) + ) + .withStreamState(streamState.state) + } +} diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt index 898ce683ba2f..50c9f637ef88 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt @@ -31,7 +31,7 @@ class DestinationMessageQueueWriter( private val catalog: DestinationCatalog, private val messageQueue: MessageQueue, private val streamsManager: StreamsManager, - private val stateManager: StateManager + private val stateManager: StateManager ) : MessageQueueWriter { /** * Deserialize and route the message to the appropriate channel. diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StateManager.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StateManager.kt index e6c47ecd6dd5..9c900b4e4379 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StateManager.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StateManager.kt @@ -6,34 +6,29 @@ package io.airbyte.cdk.state import io.airbyte.cdk.command.DestinationCatalog import io.airbyte.cdk.command.DestinationStream -import io.airbyte.cdk.message.AirbyteStateMessageFactory import io.airbyte.cdk.message.DestinationStateMessage -import io.airbyte.cdk.output.OutputConsumer +import io.airbyte.cdk.message.MessageConverter +import io.airbyte.protocol.models.v0.AirbyteMessage import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.core.util.clhm.ConcurrentLinkedHashMap import jakarta.inject.Singleton import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.atomic.AtomicReference +import java.util.function.Consumer /** * Interface for state management. Should accept stream and global state, as well as requests to * flush all data-sufficient states. */ -interface StateManager { - fun addStreamState( - stream: DestinationStream, - index: Long, - stateMessage: DestinationStateMessage - ) - fun addGlobalState( - streamIndexes: List>, - stateMessage: DestinationStateMessage - ) +interface StateManager { + fun addStreamState(key: K, index: Long, stateMessage: T) + fun addGlobalState(keyIndexes: List>, stateMessage: T) fun flushStates() } /** - * Destination state manager. + * Message-type agnostic streams state manager. * * Accepts global and stream states, and enforces that stream and global state are not mixed. * Determines ready states by querying the StreamsManager for the state of the record index range @@ -44,50 +39,73 @@ interface StateManager { * TODO: Ensure that state is flushed at the end, and require that all state be flushed before the * destination can succeed. */ -@Singleton -class DefaultStateManager( - private val catalog: DestinationCatalog, - private val streamsManager: StreamsManager, - private val stateMessageFactory: AirbyteStateMessageFactory, - private val outputConsumer: OutputConsumer -) : StateManager { +abstract class StreamsStateManager() : StateManager { private val log = KotlinLogging.logger {} - data class GlobalState( + abstract val catalog: DestinationCatalog + abstract val streamsManager: StreamsManager + abstract val outputFactory: MessageConverter + abstract val outputConsumer: Consumer + + data class GlobalState( val streamIndexes: List>, - val stateMessage: DestinationStateMessage + val stateMessage: T ) private val stateIsGlobal: AtomicReference = AtomicReference(null) private val streamStates: - ConcurrentHashMap> = + ConcurrentHashMap> = ConcurrentHashMap() - private val globalStates: ConcurrentLinkedQueue = ConcurrentLinkedQueue() - - override fun addStreamState( - stream: DestinationStream, - index: Long, - stateMessage: DestinationStateMessage - ) { - if (stateIsGlobal.getAndSet(false) != false) { + private val globalStates: ConcurrentLinkedQueue> = ConcurrentLinkedQueue() + + override fun addStreamState(key: DestinationStream, index: Long, stateMessage: T) { + if (stateIsGlobal.updateAndGet { it == true } != false) { throw IllegalStateException("Global state cannot be mixed with non-global state") } - val streamStates = streamStates.getOrPut(stream) { LinkedHashMap() } - streamStates[index] = stateMessage - log.info { "Added state for stream: $stream at index: $index" } + streamStates.compute(key) { _, indexToMessage -> + val map = + if (indexToMessage == null) { + // If the map doesn't exist yet, build it. + ConcurrentLinkedHashMap.Builder().maximumWeightedCapacity(1000).build() + } else { + if (indexToMessage.isNotEmpty()) { + // Make sure the messages are coming in order + val oldestIndex = indexToMessage.ascendingKeySet().first() + if (oldestIndex > index) { + throw IllegalStateException( + "State message received out of order ($oldestIndex before $index)" + ) + } + } + indexToMessage + } + // Actually add the message + map[index] = stateMessage + map + } + + log.info { "Added state for stream: $key at index: $index" } } - override fun addGlobalState( - streamIndexes: List>, - stateMessage: DestinationStateMessage - ) { - if (stateIsGlobal.getAndSet(true) != true) { + // TODO: Is it an error if we don't get all the streams every time? + override fun addGlobalState(keyIndexes: List>, stateMessage: T) { + if (stateIsGlobal.updateAndGet { it != false } != true) { throw IllegalStateException("Global state cannot be mixed with non-global state") } - globalStates.add(GlobalState(streamIndexes, stateMessage)) - log.info { "Added global state with stream indexes: $streamIndexes" } + val head = globalStates.peek() + if (head != null) { + val keyIndexesByStream = keyIndexes.associate { it.first to it.second } + head.streamIndexes.forEach { + if (keyIndexesByStream[it.first]!! < it.second) { + throw IllegalStateException("Global state message received out of order") + } + } + } + + globalStates.add(GlobalState(keyIndexes, stateMessage)) + log.info { "Added global state with stream indexes: $keyIndexes" } } override fun flushStates() { @@ -105,19 +123,19 @@ class DefaultStateManager( } private fun flushGlobalStates() { - if (globalStates.isEmpty()) { - return - } - - val head = globalStates.peek() - val allStreamsPersisted = - head.streamIndexes.all { (stream, index) -> - streamsManager.getManager(stream).areRecordsPersistedUntil(index) + while (!globalStates.isEmpty()) { + val head = globalStates.peek() + val allStreamsPersisted = + head.streamIndexes.all { (stream, index) -> + streamsManager.getManager(stream).areRecordsPersistedUntil(index) + } + if (allStreamsPersisted) { + globalStates.poll() + val outMessage = outputFactory.from(head.stateMessage) + outputConsumer.accept(outMessage) + } else { + break } - if (allStreamsPersisted) { - globalStates.poll() - val outMessage = stateMessageFactory.fromDestinationStateMessage(head.stateMessage) - outputConsumer.accept(outMessage) } } @@ -131,7 +149,7 @@ class DefaultStateManager( streamStates.remove(index) ?: throw IllegalStateException("State not found for index: $index") log.info { "Flushing state for stream: $stream at index: $index" } - val outMessage = stateMessageFactory.fromDestinationStateMessage(stateMessage) + val outMessage = outputFactory.from(stateMessage) outputConsumer.accept(outMessage) } else { break @@ -140,3 +158,11 @@ class DefaultStateManager( } } } + +@Singleton +class DefaultStateManager( + override val catalog: DestinationCatalog, + override val streamsManager: StreamsManager, + override val outputFactory: MessageConverter, + override val outputConsumer: Consumer +) : StreamsStateManager() diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamManager.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamManager.kt index b0faf0188d08..4b74df5bf3ba 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamManager.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamManager.kt @@ -100,7 +100,15 @@ class DefaultStreamManager( rangesState[batch.batch.state] ?: throw IllegalArgumentException("Invalid batch state: ${batch.batch.state}") - stateRanges.addAll(batch.ranges) + // Force the ranges to overlap at their endpoints, in order to work around + // the behavior of `.encloses`, which otherwise would not consider adjacent ranges as + // contiguous. + // This ensures that a state message received at eg, index 10 (after messages 0..9 have + // been received), will pass `{'[0..5]','[6..9]'}.encloses('[0..10)')`. + val expanded = + batch.ranges.asRanges().map { it.span(Range.singleton(it.upperEndpoint() + 1)) } + + stateRanges.addAll(expanded) log.info { "Updated ranges for $stream[${batch.batch.state}]: $stateRanges" } } @@ -108,7 +116,7 @@ class DefaultStreamManager( private fun isProcessingCompleteForState(index: Long, state: Batch.State): Boolean { val completeRanges = rangesState[state]!! - return completeRanges.encloses(Range.closed(0L, index - 1)) + return completeRanges.encloses(Range.closedOpen(0L, index)) } /** True if all records have associated [Batch.State.COMPLETE] batches. */ diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt new file mode 100644 index 000000000000..5c34cd8446e8 --- /dev/null +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt @@ -0,0 +1,462 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.state + +import com.google.common.collect.Range +import com.google.common.collect.RangeSet +import com.google.common.collect.TreeRangeSet +import io.airbyte.cdk.command.DestinationCatalog +import io.airbyte.cdk.command.DestinationCatalogFactory +import io.airbyte.cdk.command.DestinationStream +import io.airbyte.cdk.message.Batch +import io.airbyte.cdk.message.BatchEnvelope +import io.airbyte.cdk.message.MessageConverter +import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Prototype +import io.micronaut.context.annotation.Replaces +import io.micronaut.context.annotation.Requires +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import jakarta.inject.Inject +import jakarta.inject.Singleton +import java.util.function.Consumer +import java.util.stream.Stream +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.extension.ExtensionContext +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.ArgumentsProvider +import org.junit.jupiter.params.provider.ArgumentsSource + +@MicronautTest +class StateManagerTest { + @Inject lateinit var stateManager: TestStateManager + + companion object { + val stream1 = DestinationStream(DestinationStream.Descriptor("test", "stream1")) + val stream2 = DestinationStream(DestinationStream.Descriptor("test", "stream2")) + } + + @Factory + @Replaces(factory = DestinationCatalogFactory::class) + class MockCatalogFactory { + @Singleton + @Requires(env = ["test"]) + fun make(): DestinationCatalog { + return DestinationCatalog(streams = listOf(stream1, stream2)) + } + } + + /** + * Test state messages. + * + * StateIn: What is passed to the manager. StateOut: What is sent from the manager to the output + * consumer. + */ + sealed class MockStateIn + data class MockStreamStateIn(val stream: DestinationStream, val payload: Int) : MockStateIn() + data class MockGlobalStateIn(val payload: Int) : MockStateIn() + + sealed class MockStateOut + data class MockStreamStateOut(val stream: DestinationStream, val payload: String) : + MockStateOut() + data class MockGlobalStateOut(val payload: String) : MockStateOut() + + @Singleton + class MockStateMessageFactory : MessageConverter { + override fun from(message: MockStateIn): MockStateOut { + return when (message) { + is MockStreamStateIn -> + MockStreamStateOut(message.stream, message.payload.toString()) + is MockGlobalStateIn -> MockGlobalStateOut(message.payload.toString()) + } + } + } + + @Prototype + class MockOutputConsumer : Consumer { + val collectedStreamOutput = mutableMapOf>() + val collectedGlobalOutput = mutableListOf() + override fun accept(t: MockStateOut) { + when (t) { + is MockStreamStateOut -> + collectedStreamOutput.getOrPut(t.stream) { mutableListOf() }.add(t.payload) + is MockGlobalStateOut -> collectedGlobalOutput.add(t.payload) + } + } + } + + /** + * The only thing we really need is `areRecordsPersistedUntil`. (Technically we're emulating the + * @[StreamManager] behavior here, since the state manager doesn't actually know what ranges are + * closed, but less than that would make the test unrealistic.) + */ + class MockStreamManager : StreamManager { + var persistedRanges: RangeSet = TreeRangeSet.create() + + override fun countRecordIn(sizeBytes: Long): Long { + throw NotImplementedError() + } + + override fun markCheckpoint(): Pair { + throw NotImplementedError() + } + + override fun updateBatchState(batch: BatchEnvelope) { + throw NotImplementedError() + } + + override fun isBatchProcessingComplete(): Boolean { + throw NotImplementedError() + } + + override fun areRecordsPersistedUntil(index: Long): Boolean { + return persistedRanges.encloses(Range.closedOpen(0, index)) + } + + override fun markClosed() { + throw NotImplementedError() + } + + override fun streamIsClosed(): Boolean { + throw NotImplementedError() + } + + override suspend fun awaitStreamClosed() { + throw NotImplementedError() + } + } + + @Prototype + class MockStreamsManager(catalog: DestinationCatalog) : StreamsManager { + private val mockManagers = catalog.streams.associateWith { MockStreamManager() } + + fun addPersistedRanges(stream: DestinationStream, ranges: List>) { + mockManagers[stream]!!.persistedRanges.addAll(ranges) + } + + override fun getManager(stream: DestinationStream): StreamManager { + return mockManagers[stream] + ?: throw IllegalArgumentException("Stream not found: $stream") + } + + override suspend fun awaitAllStreamsComplete() { + throw NotImplementedError() + } + } + + @Prototype + class TestStateManager( + override val catalog: DestinationCatalog, + override val streamsManager: MockStreamsManager, + override val outputFactory: MessageConverter, + override val outputConsumer: MockOutputConsumer + ) : StreamsStateManager() + + sealed class TestEvent + data class TestStreamMessage(val stream: DestinationStream, val index: Long, val message: Int) : + TestEvent() { + fun toMockStateIn() = MockStreamStateIn(stream, message) + } + data class TestGlobalMessage( + val streamIndexes: List>, + val message: Int + ) : TestEvent() { + fun toMockStateIn() = MockGlobalStateIn(message) + } + data class FlushPoint( + val persistedRanges: Map>> = mapOf() + ) : TestEvent() + + data class TestCase( + val name: String, + val events: List, + // Order matters, but only per stream + val expectedStreamOutput: Map> = mapOf(), + val expectedGlobalOutput: List = listOf(), + val expectedException: Class? = null + ) + + class StateManagerTestArgumentsProvider : ArgumentsProvider { + override fun provideArguments(context: ExtensionContext?): Stream { + return listOf( + TestCase( + name = + "One stream, two stream messages, flush all if all ranges are persisted", + events = + listOf( + TestStreamMessage(stream1, 10L, 1), + TestStreamMessage(stream1, 20L, 2), + FlushPoint( + persistedRanges = + mapOf(stream1 to listOf(Range.closed(0L, 20L))) + ) + ), + expectedStreamOutput = mapOf(stream1 to listOf("1", "2")) + ), + TestCase( + name = "One stream, two messages, flush only the first", + events = + listOf( + TestStreamMessage(stream1, 10L, 1), + TestStreamMessage(stream1, 20L, 2), + FlushPoint( + persistedRanges = + mapOf(stream1 to listOf(Range.closed(0L, 10L))) + ) + ), + expectedStreamOutput = mapOf(stream1 to listOf("1")) + ), + TestCase( + name = "Two streams, two messages each, flush all", + events = + listOf( + TestStreamMessage(stream1, 10L, 11), + TestStreamMessage(stream2, 30L, 21), + TestStreamMessage(stream1, 20L, 12), + TestStreamMessage(stream2, 40L, 22), + FlushPoint( + persistedRanges = + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + stream2 to listOf(Range.closed(0L, 40L)) + ) + ) + ), + expectedStreamOutput = + mapOf(stream1 to listOf("11", "12"), stream2 to listOf("22", "21")) + ), + TestCase( + name = "One stream, only later range persisted", + events = + listOf( + TestStreamMessage(stream1, 10L, 1), + TestStreamMessage(stream1, 20L, 2), + FlushPoint( + persistedRanges = + mapOf(stream1 to listOf(Range.closed(10L, 20L))) + ) + ), + expectedStreamOutput = mapOf() + ), + TestCase( + name = "One stream, out of order (should fail)", + events = + listOf( + TestStreamMessage(stream1, 20L, 2), + TestStreamMessage(stream1, 10L, 1), + FlushPoint( + persistedRanges = + mapOf(stream1 to listOf(Range.closed(0L, 20L))) + ) + ), + expectedException = IllegalStateException::class.java + ), + TestCase( + name = "Global state, two messages, flush all", + events = + listOf( + TestGlobalMessage(listOf(stream1 to 10L, stream2 to 20L), 1), + TestGlobalMessage(listOf(stream1 to 20L, stream2 to 30L), 2), + FlushPoint( + persistedRanges = + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + stream2 to listOf(Range.closed(0L, 30L)) + ) + ) + ), + expectedGlobalOutput = listOf("1", "2") + ), + TestCase( + name = "Global state, two messages, range only covers the first", + events = + listOf( + TestGlobalMessage(listOf(stream1 to 10L, stream2 to 20L), 1), + TestGlobalMessage(listOf(stream1 to 20L, stream2 to 30L), 2), + FlushPoint( + persistedRanges = + mapOf( + stream1 to listOf(Range.closed(0L, 10L)), + stream2 to listOf(Range.closed(0L, 20L)) + ) + ) + ), + expectedGlobalOutput = listOf("1") + ), + TestCase( + name = + "Global state, two messages, where the range only covers *one stream*", + events = + listOf( + TestGlobalMessage(listOf(stream1 to 10L, stream2 to 20L), 1), + TestGlobalMessage(listOf(stream1 to 20L, stream2 to 30L), 2), + FlushPoint( + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + stream2 to listOf(Range.closed(0L, 20L)) + ) + ) + ), + expectedGlobalOutput = listOf("1") + ), + TestCase( + name = "Global state, out of order (should fail)", + events = + listOf( + TestGlobalMessage(listOf(stream1 to 20L, stream2 to 30L), 2), + TestGlobalMessage(listOf(stream1 to 10L, stream2 to 20L), 1), + FlushPoint( + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + stream2 to listOf(Range.closed(0L, 30L)) + ) + ), + ), + expectedException = IllegalStateException::class.java + ), + TestCase( + name = "Mixed: first stream state, then global (should fail)", + events = + listOf( + TestStreamMessage(stream1, 10L, 1), + TestGlobalMessage(listOf(stream1 to 20L, stream2 to 30L), 2), + FlushPoint( + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + stream2 to listOf(Range.closed(0L, 30L)) + ) + ) + ), + expectedException = IllegalStateException::class.java + ), + TestCase( + name = "Mixed: first global, then stream state (should fail)", + events = + listOf( + TestGlobalMessage(listOf(stream1 to 10L, stream2 to 20L), 1), + TestStreamMessage(stream1, 20L, 2), + FlushPoint( + persistedRanges = + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + stream2 to listOf(Range.closed(0L, 30L)) + ) + ) + ), + expectedException = IllegalStateException::class.java + ), + TestCase( + name = "No messages, just a flush", + events = listOf(FlushPoint()), + expectedStreamOutput = mapOf(), + expectedGlobalOutput = listOf() + ), + TestCase( + name = "Two stream messages, flush against empty ranges", + events = + listOf( + TestStreamMessage(stream1, 10L, 1), + TestStreamMessage(stream1, 20L, 2), + FlushPoint() + ), + expectedStreamOutput = mapOf() + ), + TestCase( + name = "Stream state, multiple flush points", + events = + listOf( + TestStreamMessage(stream1, 10L, 1), + FlushPoint(), + TestStreamMessage(stream1, 20L, 2), + FlushPoint(mapOf(stream1 to listOf(Range.closed(0L, 10L)))), + TestStreamMessage(stream1, 30L, 3), + FlushPoint(mapOf(stream1 to listOf(Range.closed(10L, 30L)))) + ), + expectedStreamOutput = mapOf(stream1 to listOf("1", "2", "3")) + ), + TestCase( + name = "Global state, multiple flush points, no output", + events = + listOf( + TestGlobalMessage(listOf(stream1 to 10L, stream2 to 20L), 1), + FlushPoint(), + TestGlobalMessage(listOf(stream1 to 20L, stream2 to 30L), 2), + FlushPoint( + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + ) + ), + TestGlobalMessage(listOf(stream1 to 30L, stream2 to 40L), 3), + FlushPoint(mapOf(stream2 to listOf(Range.closed(20L, 30L)))) + ), + expectedGlobalOutput = listOf() + ), + TestCase( + name = "Global state, multiple flush points, no output until end", + events = + listOf( + TestGlobalMessage(listOf(stream1 to 10L, stream2 to 20L), 1), + FlushPoint(), + TestGlobalMessage(listOf(stream1 to 20L, stream2 to 30L), 2), + FlushPoint( + mapOf( + stream1 to listOf(Range.closed(0L, 20L)), + ) + ), + TestGlobalMessage(listOf(stream1 to 30L, stream2 to 40L), 3), + FlushPoint( + mapOf( + stream1 to listOf(Range.closed(20L, 30L)), + stream2 to listOf(Range.closed(0L, 40L)) + ) + ) + ), + expectedGlobalOutput = listOf("1", "2", "3") + ) + ) + .stream() + .map { Arguments.of(it) } + } + } + + @ParameterizedTest + @ArgumentsSource(StateManagerTestArgumentsProvider::class) + fun testAddingAndFlushingState(testCase: TestCase) { + if (testCase.expectedException != null) { + Assertions.assertThrows(testCase.expectedException) { runTestCase(testCase) } + } else { + runTestCase(testCase) + Assertions.assertEquals( + testCase.expectedStreamOutput, + stateManager.outputConsumer.collectedStreamOutput, + testCase.name + ) + Assertions.assertEquals( + testCase.expectedGlobalOutput, + stateManager.outputConsumer.collectedGlobalOutput, + testCase.name + ) + } + } + + private fun runTestCase(testCase: TestCase) { + testCase.events.forEach { + when (it) { + is TestStreamMessage -> { + stateManager.addStreamState(it.stream, it.index, it.toMockStateIn()) + } + is TestGlobalMessage -> { + stateManager.addGlobalState(it.streamIndexes, it.toMockStateIn()) + } + is FlushPoint -> { + it.persistedRanges.forEach { (stream, ranges) -> + stateManager.streamsManager.addPersistedRanges(stream, ranges) + } + stateManager.flushStates() + } + } + } + } +} From 427e8fd0275c053fe4b53ce220116cea9eee602e Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Tue, 3 Sep 2024 18:08:30 -0400 Subject: [PATCH 14/51] feat(airbyte-ci): include components.py in manifest-only build (#44879) --- airbyte-ci/connectors/connector_ops/README.md | 1 + .../connector_ops/connector_ops/utils.py | 6 +++ .../connectors/connector_ops/pyproject.toml | 2 +- .../tests/test_required_reviewer_checks.py | 4 +- airbyte-ci/connectors/pipelines/README.md | 1 + .../steps/manifest_only_connectors.py | 11 ++++- .../connectors/pipelines/pipelines/consts.py | 1 + .../connectors/pipelines/pyproject.toml | 2 +- .../test_manifest_only_connectors.py | 48 +++++++++++++++++++ .../tests/test_helpers/test_utils.py | 3 +- .../connectors/pipelines/tests/utils.py | 4 ++ 11 files changed, 76 insertions(+), 7 deletions(-) diff --git a/airbyte-ci/connectors/connector_ops/README.md b/airbyte-ci/connectors/connector_ops/README.md index 03d9cb23590c..91baeec723a4 100644 --- a/airbyte-ci/connectors/connector_ops/README.md +++ b/airbyte-ci/connectors/connector_ops/README.md @@ -37,6 +37,7 @@ poetry run pytest ``` ## Changelog +- 0.9.0: Add components path attribute for manifest-only connectors. - 0.8.1: Gradle dependency discovery logic supports the Bulk CDK. - 0.8.0: Add a `sbom_url` property to `Connector` - 0.7.0: Added required reviewers for manifest-only connector changes/additions. diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/utils.py b/airbyte-ci/connectors/connector_ops/connector_ops/utils.py index 463b2145862b..33b02f92bbed 100644 --- a/airbyte-ci/connectors/connector_ops/connector_ops/utils.py +++ b/airbyte-ci/connectors/connector_ops/connector_ops/utils.py @@ -60,6 +60,7 @@ def download_catalog(catalog_url): OSS_CATALOG = download_catalog(OSS_CATALOG_URL) MANIFEST_FILE_NAME = "manifest.yaml" +COMPONENTS_FILE_NAME = "components.py" DOCKERFILE_FILE_NAME = "Dockerfile" PYPROJECT_FILE_NAME = "pyproject.toml" ICON_FILE_NAME = "icon.svg" @@ -347,6 +348,11 @@ def manifest_path(self) -> Path: return self._manifest_low_code_path + @property + def manifest_only_components_path(self) -> Path: + """Return the path to the components.py file of a manifest-only connector.""" + return self.code_directory / COMPONENTS_FILE_NAME + @property def has_dockerfile(self) -> bool: return self.dockerfile_file_path.is_file() diff --git a/airbyte-ci/connectors/connector_ops/pyproject.toml b/airbyte-ci/connectors/connector_ops/pyproject.toml index e3dc0cba350d..ca4722a22199 100644 --- a/airbyte-ci/connectors/connector_ops/pyproject.toml +++ b/airbyte-ci/connectors/connector_ops/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "connector_ops" -version = "0.8.1" +version = "0.9.0" description = "Packaged maintained by the connector operations team to perform CI for connectors" authors = ["Airbyte "] diff --git a/airbyte-ci/connectors/connector_ops/tests/test_required_reviewer_checks.py b/airbyte-ci/connectors/connector_ops/tests/test_required_reviewer_checks.py index 1fbb2b2439d2..01b569fce490 100644 --- a/airbyte-ci/connectors/connector_ops/tests/test_required_reviewer_checks.py +++ b/airbyte-ci/connectors/connector_ops/tests/test_required_reviewer_checks.py @@ -23,7 +23,7 @@ def mock_diffed_branched(mocker): @pytest.fixture def pokeapi_metadata_path(): - return "airbyte-integrations/connectors/source-pokeapi/metadata.yaml" + return "airbyte-integrations/connectors/source-zoho-crm/metadata.yaml" @pytest.fixture @@ -37,7 +37,7 @@ def not_tracked_change_expected_team(tmp_path, pokeapi_metadata_path): backup_path = tmp_path / "non_strategic_acceptance_test_config.backup" shutil.copyfile(pokeapi_metadata_path, backup_path) with open(pokeapi_metadata_path, "a") as metadata_file: - metadata_file.write("not_tracked") + metadata_file.write("\nnot_tracked: true\n") yield expected_teams shutil.copyfile(backup_path, pokeapi_metadata_path) diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index beee5da58af3..8cb2d86da43e 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -843,6 +843,7 @@ airbyte-ci connectors --language=low-code migrate-to-manifest-only | Version | PR | Description | | ------- | ---------------------------------------------------------- |------------------------------------------------------------------------------------------------------------------------------| +| 4.35.0 | [#44879](https://github.com/airbytehq/airbyte/pull/44879) | Mount `components.py` when building manifest-only connector image | | 4.34.2 | [#44786](https://github.com/airbytehq/airbyte/pull/44786) | Pre-emptively skip archived connectors when searching for modified files | | 4.34.1 | [#44557](https://github.com/airbytehq/airbyte/pull/44557) | Conditionally propagate parameters in manifest-only migration | | 4.34.0 | [#44551](https://github.com/airbytehq/airbyte/pull/44551) | `connectors publish` do not push the `latest` tag when the current version is a release candidate. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/manifest_only_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/manifest_only_connectors.py index c1c5e561fc86..f6a67a1e30d1 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/manifest_only_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/manifest_only_connectors.py @@ -9,7 +9,7 @@ from pipelines.airbyte_ci.connectors.build_image.steps import build_customization from pipelines.airbyte_ci.connectors.build_image.steps.common import BuildConnectorImagesBase from pipelines.airbyte_ci.connectors.context import ConnectorContext -from pipelines.consts import MANIFEST_FILE_PATH +from pipelines.consts import COMPONENTS_FILE_PATH, MANIFEST_FILE_PATH from pipelines.models.steps import StepResult from pydash.objects import get # type: ignore @@ -43,11 +43,20 @@ async def _build_from_base_image(self, platform: Platform) -> Container: """ self.logger.info(f"Building connector from base image in metadata for {platform}") + # Mount manifest file base_container = self._get_base_container(platform).with_file( f"source_declarative_manifest/{MANIFEST_FILE_PATH}", (await self.context.get_connector_dir(include=[MANIFEST_FILE_PATH])).file(MANIFEST_FILE_PATH), ) + # Mount components file if it exists + components_file = self.context.connector.manifest_only_components_path + if components_file.exists(): + base_container = base_container.with_file( + f"source_declarative_manifest/{COMPONENTS_FILE_PATH}", + (await self.context.get_connector_dir(include=[COMPONENTS_FILE_PATH])).file(COMPONENTS_FILE_PATH), + ) + connector_container = build_customization.apply_airbyte_entrypoint(base_container, self.context.connector) return connector_container diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py index 0eab987bdd96..1bfc9115cb47 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py @@ -10,6 +10,7 @@ PYPROJECT_TOML_FILE_PATH = "pyproject.toml" MANIFEST_FILE_PATH = "manifest.yaml" +COMPONENTS_FILE_PATH = "components.py" LICENSE_SHORT_FILE_PATH = "LICENSE_SHORT" CONNECTOR_TESTING_REQUIREMENTS = [ "pip==21.3.1", diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 5de496f32ec3..a349ad9d1329 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.34.2" +version = "4.35.0" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] diff --git a/airbyte-ci/connectors/pipelines/tests/test_build_image/test_manifest_only_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_manifest_only_connectors.py index 04d58614e2d1..105733ae3655 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_build_image/test_manifest_only_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_manifest_only_connectors.py @@ -35,10 +35,22 @@ def test_context_with_connector_with_base_image(self, test_context): } return test_context + @pytest.fixture + def mock_connector_directory(self, mocker): + mock_components_file = mocker.Mock() + mock_connector_dir = mocker.Mock() + mock_connector_dir.file.return_value = mock_components_file + return mock_connector_dir, mock_components_file + + def _assert_file_not_handled(self, container_mock, file_path): + """Assert that a specified file_path was not handled by the container_mock""" + assert not any(file_path in call.args[0] for call in container_mock.with_file.call_args_list) + async def test__run_using_base_image_with_mocks(self, mocker, test_context_with_connector_with_base_image, all_platforms): container_built_from_base = mock_container() container_built_from_base.with_label.return_value = container_built_from_base + mocker.patch.object(Path, "exists", return_value=True) # Mock Path.exists() to always return True mocker.patch.object( manifest_only_connectors.BuildConnectorImages, "_build_from_base_image", @@ -59,3 +71,39 @@ async def test__run_using_base_image_with_mocks(self, mocker, test_context_with_ assert step_result.status is StepStatus.SUCCESS for platform in all_platforms: assert step_result.output[platform] == container_built_from_base + + @pytest.mark.parametrize("components_file_exists", [True, False]) + async def test__run_using_base_image_with_components_file( + self, mocker, all_platforms, test_context_with_connector_with_base_image, mock_connector_directory, components_file_exists + ): + mock_connector_dir, mock_components_file = mock_connector_directory + container_built_from_base = mock_container() + + container_built_from_base.with_label.return_value = container_built_from_base + container_built_from_base.with_file.return_value = container_built_from_base + + test_context_with_connector_with_base_image.get_connector_dir = mocker.AsyncMock(return_value=mock_connector_dir) + test_context_with_connector_with_base_image.connector.manifest_only_components_path.exists = mocker.Mock( + return_value=components_file_exists + ) + + mocker.patch.object( + manifest_only_connectors.BuildConnectorImages, + "_get_base_container", + return_value=container_built_from_base, + ) + + mocker.patch.object( + build_customization, + "apply_airbyte_entrypoint", + return_value=container_built_from_base, + ) + + step = manifest_only_connectors.BuildConnectorImages(test_context_with_connector_with_base_image) + + await step._build_connector(all_platforms[0], container_built_from_base) + if components_file_exists: + container_built_from_base.with_file.assert_any_call("source_declarative_manifest/components.py", mock_components_file) + mock_connector_dir.file.assert_any_call("components.py") + else: + self._assert_file_not_handled(container_built_from_base, "source_declarative_manifest/components.py") diff --git a/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py index 4583fe7cf450..d700a54fe7ad 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py +++ b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py @@ -183,8 +183,7 @@ async def test_check_path_in_workdir(dagger_client): .with_workdir(str(connector.code_directory)) ) assert await utils.check_path_in_workdir(container, "metadata.yaml") - assert await utils.check_path_in_workdir(container, "pyproject.toml") - assert await utils.check_path_in_workdir(container, "poetry.lock") + assert await utils.check_path_in_workdir(container, "manifest.yaml") assert await utils.check_path_in_workdir(container, "not_existing_file") is False diff --git a/airbyte-ci/connectors/pipelines/tests/utils.py b/airbyte-ci/connectors/pipelines/tests/utils.py index 958858d72ffb..12eb88357f1a 100644 --- a/airbyte-ci/connectors/pipelines/tests/utils.py +++ b/airbyte-ci/connectors/pipelines/tests/utils.py @@ -19,6 +19,9 @@ def with_label(self, *args, **kwargs): async def with_exec(self, *args, **kwargs): return self + def with_file(self, *args, **kwargs): + return self + def pick_a_random_connector( language: ConnectorLanguage = None, @@ -55,4 +58,5 @@ def mock_container(): container_mock = AsyncMock(MockContainerClass) container_mock.with_label.return_value = container_mock container_mock.with_exec.return_value = container_mock + container_mock.with_file.return_value = container_mock return container_mock From 5bf11d9d9b35a3cc29e623874523353d68a7483c Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Tue, 3 Sep 2024 15:38:08 -0700 Subject: [PATCH 15/51] Destination Databricks - clarify catalog name option (#45115) --- .../connectors/destination-databricks/metadata.yaml | 2 +- .../destination-databricks/src/main/resources/spec.json | 4 ++-- docs/integrations/destinations/databricks.md | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/destination-databricks/metadata.yaml b/airbyte-integrations/connectors/destination-databricks/metadata.yaml index 328a9b1e733a..7b506d7bce56 100644 --- a/airbyte-integrations/connectors/destination-databricks/metadata.yaml +++ b/airbyte-integrations/connectors/destination-databricks/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 072d5540-f236-4294-ba7c-ade8fd918496 - dockerImageTag: 3.2.2 + dockerImageTag: 3.2.3 dockerRepository: airbyte/destination-databricks githubIssueLabel: destination-databricks icon: databricks.svg diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json index f220a4846c3c..ca7ff42b79eb 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-databricks/src/main/resources/spec.json @@ -46,8 +46,8 @@ "order": 4 }, "database": { - "title": "Databricks Unity Catalog Path", - "description": "The name of the unity catalog path for the database", + "title": "Databricks Unity Catalog Name", + "description": "The name of the unity catalog for the database", "type": "string", "order": 5 }, diff --git a/docs/integrations/destinations/databricks.md b/docs/integrations/destinations/databricks.md index d50bff076cb7..b482cd1648df 100644 --- a/docs/integrations/destinations/databricks.md +++ b/docs/integrations/destinations/databricks.md @@ -85,6 +85,7 @@ with the raw tables, and their format is subject to change without notice. | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------------------------------------------------------------------ | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 3.2.3 | 2024-09-03 | [#45115](https://github.com/airbytehq/airbyte/pull/45115) | Clarify Unity Catalog Name option. | | 3.2.2 | 2024-08-22 | [#44941](https://github.com/airbytehq/airbyte/pull/44941) | Clarify Unity Catalog Path option. | | 3.2.1 | 2024-08-22 | [#44506](https://github.com/airbytehq/airbyte/pull/44506) | Handle uppercase/mixed-case stream name/namespaces | | 3.2.0 | 2024-08-12 | [#40712](https://github.com/airbytehq/airbyte/pull/40712) | Rely solely on PAT, instead of also needing a user/pass | From af58faa63ff211c225c4348cb490c70a3456ab1b Mon Sep 17 00:00:00 2001 From: Johnny Schmidt Date: Tue, 3 Sep 2024 16:28:37 -0700 Subject: [PATCH 16/51] Unit tests for streams manager (#45090) --- .../airbyte/cdk/command/DestinationCatalog.kt | 6 +- .../airbyte/cdk/message/MessageQueueWriter.kt | 5 +- .../{StreamManager.kt => StreamsManager.kt} | 125 +++++--- .../io/airbyte/cdk/task/TeardownTask.kt | 2 +- .../airbyte/cdk/command/MockCatalogFactory.kt | 27 ++ .../io/airbyte/cdk/state/StateManagerTest.kt | 36 +-- .../airbyte/cdk/state/StreamsManagerTest.kt | 282 ++++++++++++++++++ 7 files changed, 412 insertions(+), 71 deletions(-) rename airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/{StreamManager.kt => StreamsManager.kt} (61%) create mode 100644 airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/command/MockCatalogFactory.kt create mode 100644 airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StreamsManagerTest.kt diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/command/DestinationCatalog.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/command/DestinationCatalog.kt index c476c049d840..e5ac1ddf92c0 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/command/DestinationCatalog.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/command/DestinationCatalog.kt @@ -25,8 +25,12 @@ data class DestinationCatalog( } } +interface DestinationCatalogFactory { + fun make(): DestinationCatalog +} + @Factory -class DestinationCatalogFactory( +class DefaultDestinationCatalogFactory( private val catalog: ConfiguredAirbyteCatalog, private val streamFactory: DestinationStreamFactory ) { diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt index 50c9f637ef88..a09979a2d7d1 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/message/MessageQueueWriter.kt @@ -43,13 +43,12 @@ class DestinationMessageQueueWriter( /* If the input message represents a record. */ is DestinationRecordMessage -> { val manager = streamsManager.getManager(message.stream) - val index = manager.countRecordIn(sizeBytes) when (message) { /* If a data record */ is DestinationRecord -> { val wrapped = StreamRecordWrapped( - index = index, + index = manager.countRecordIn(), sizeBytes = sizeBytes, record = message ) @@ -58,7 +57,7 @@ class DestinationMessageQueueWriter( /* If an end-of-stream marker. */ is DestinationStreamComplete -> { - val wrapped = StreamCompleteWrapped(index) + val wrapped = StreamCompleteWrapped(index = manager.countEndOfStream()) messageQueue.getChannel(message.stream).send(wrapped) } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamManager.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamsManager.kt similarity index 61% rename from airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamManager.kt rename to airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamsManager.kt index 4b74df5bf3ba..c58e126a9f8c 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamManager.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/StreamsManager.kt @@ -15,15 +15,17 @@ import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Factory import jakarta.inject.Singleton import java.util.concurrent.ConcurrentHashMap -import java.util.concurrent.CountDownLatch +import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicLong -import kotlinx.coroutines.Dispatchers -import kotlinx.coroutines.withContext +import kotlinx.coroutines.channels.Channel /** Manages the state of all streams in the destination. */ interface StreamsManager { + /** Get the manager for the given stream. Throws an exception if the stream is not found. */ fun getManager(stream: DestinationStream): StreamManager - suspend fun awaitAllStreamsComplete() + + /** Suspend until all streams are closed. */ + suspend fun awaitAllStreamsClosed() } class DefaultStreamsManager( @@ -33,68 +35,98 @@ class DefaultStreamsManager( return streamManagers[stream] ?: throw IllegalArgumentException("Stream not found: $stream") } - override suspend fun awaitAllStreamsComplete() { + override suspend fun awaitAllStreamsClosed() { streamManagers.forEach { (_, manager) -> manager.awaitStreamClosed() } } } /** Manages the state of a single stream. */ interface StreamManager { - fun countRecordIn(sizeBytes: Long): Long + /** Count incoming record and return the record's *index*. */ + fun countRecordIn(): Long + + /** + * Count the end-of-stream. Expect this exactly once. Expect no further `countRecordIn`, and + * expect that `markClosed` will always occur after this. + */ + fun countEndOfStream(): Long + + /** + * Mark a checkpoint in the stream and return the current index and the number of records since + * the last one. + * + * NOTE: Single-writer. If in the future multiple threads set checkpoints, this method should be + * synchronized. + */ fun markCheckpoint(): Pair + + /** Record that the given batch's state has been reached for the associated range(s). */ fun updateBatchState(batch: BatchEnvelope) + + /** + * True if all are true: + * * all records have been seen (ie, we've counted an end-of-stream) + * * a [Batch.State.COMPLETE] batch range has been seen covering every record + * + * Does NOT require that the stream be closed. + */ fun isBatchProcessingComplete(): Boolean + + /** + * True if all records in [0, index] have at least reached [Batch.State.PERSISTED]. This is + * implicitly true if they have all reached [Batch.State.COMPLETE]. + */ fun areRecordsPersistedUntil(index: Long): Boolean + /** Mark the stream as closed. This should only be called after all records have been read. */ fun markClosed() + + /** True if the stream has been marked as closed. */ fun streamIsClosed(): Boolean + + /** Suspend until the stream is closed. */ suspend fun awaitStreamClosed() } -/** - * Maintains a map of stream -> status metadata, and a map of batch state -> record ranges for which - * that state has been reached. - * - * TODO: Log a detailed report of the stream status on a regular cadence. - */ class DefaultStreamManager( val stream: DestinationStream, ) : StreamManager { private val log = KotlinLogging.logger {} - data class StreamStatus( - val recordCount: AtomicLong = AtomicLong(0), - val totalBytes: AtomicLong = AtomicLong(0), - val enqueuedSize: AtomicLong = AtomicLong(0), - val lastCheckpoint: AtomicLong = AtomicLong(0L), - val closedLatch: CountDownLatch = CountDownLatch(1), - ) + private val recordCount = AtomicLong(0) + private val lastCheckpoint = AtomicLong(0L) + private val readIsClosed = AtomicBoolean(false) + private val streamIsClosed = AtomicBoolean(false) + private val closedLock = Channel() - private val streamStatus: StreamStatus = StreamStatus() private val rangesState: ConcurrentHashMap> = ConcurrentHashMap() init { Batch.State.entries.forEach { rangesState[it] = TreeRangeSet.create() } } - override fun countRecordIn(sizeBytes: Long): Long { - val index = streamStatus.recordCount.getAndIncrement() - streamStatus.totalBytes.addAndGet(sizeBytes) - streamStatus.enqueuedSize.addAndGet(sizeBytes) - return index + override fun countRecordIn(): Long { + if (readIsClosed.get()) { + throw IllegalStateException("Stream is closed for reading") + } + + return recordCount.getAndIncrement() + } + + override fun countEndOfStream(): Long { + if (readIsClosed.getAndSet(true)) { + throw IllegalStateException("Stream is closed for reading") + } + + return recordCount.get() } - /** - * Mark a checkpoint in the stream and return the current index and the number of records since - * the last one. - */ override fun markCheckpoint(): Pair { - val index = streamStatus.recordCount.get() - val lastCheckpoint = streamStatus.lastCheckpoint.getAndSet(index) + val index = recordCount.get() + val lastCheckpoint = lastCheckpoint.getAndSet(index) return Pair(index, index - lastCheckpoint) } - /** Record that the given batch's state has been reached for the associated range(s). */ override fun updateBatchState(batch: BatchEnvelope) { val stateRanges = rangesState[batch.batch.state] @@ -112,37 +144,44 @@ class DefaultStreamManager( log.info { "Updated ranges for $stream[${batch.batch.state}]: $stateRanges" } } - /** True if all records in [0, index] have reached the given state. */ + /** True if all records in `[0, index)` have reached the given state. */ private fun isProcessingCompleteForState(index: Long, state: Batch.State): Boolean { - val completeRanges = rangesState[state]!! return completeRanges.encloses(Range.closedOpen(0L, index)) } - /** True if all records have associated [Batch.State.COMPLETE] batches. */ override fun isBatchProcessingComplete(): Boolean { - return isProcessingCompleteForState(streamStatus.recordCount.get(), Batch.State.COMPLETE) + /* If the stream hasn't been fully read, it can't be done. */ + if (!readIsClosed.get()) { + return false + } + + return isProcessingCompleteForState(recordCount.get(), Batch.State.COMPLETE) } - /** - * True if all records in [0, index] have at least reached [Batch.State.PERSISTED]. This is - * implicitly true if they have all reached [Batch.State.COMPLETE]. - */ override fun areRecordsPersistedUntil(index: Long): Boolean { return isProcessingCompleteForState(index, Batch.State.PERSISTED) || isProcessingCompleteForState(index, Batch.State.COMPLETE) // complete => persisted } override fun markClosed() { - streamStatus.closedLatch.countDown() + if (!readIsClosed.get()) { + throw IllegalStateException("Stream must be fully read before it can be closed") + } + + if (streamIsClosed.compareAndSet(false, true)) { + closedLock.trySend(Unit) + } } override fun streamIsClosed(): Boolean { - return streamStatus.closedLatch.count == 0L + return streamIsClosed.get() } override suspend fun awaitStreamClosed() { - withContext(Dispatchers.IO) { streamStatus.closedLatch.await() } + if (!streamIsClosed.get()) { + closedLock.receive() + } } } diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/task/TeardownTask.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/task/TeardownTask.kt index 52fec0acaf3d..5d76c2b260f9 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/task/TeardownTask.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/task/TeardownTask.kt @@ -34,7 +34,7 @@ class TeardownTask( } /** Ensure we don't run until all streams have completed */ - streamsManager.awaitAllStreamsComplete() + streamsManager.awaitAllStreamsClosed() destination.teardown() taskLauncher.stop() diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/command/MockCatalogFactory.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/command/MockCatalogFactory.kt new file mode 100644 index 000000000000..dc19a9e28452 --- /dev/null +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/command/MockCatalogFactory.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.command + +import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Replaces +import io.micronaut.context.annotation.Requires +import jakarta.inject.Named +import jakarta.inject.Singleton + +@Factory +@Replaces(factory = DestinationCatalogFactory::class) +@Requires(env = ["test"]) +class MockCatalogFactory : DestinationCatalogFactory { + companion object { + val stream1 = DestinationStream(DestinationStream.Descriptor("test", "stream1")) + val stream2 = DestinationStream(DestinationStream.Descriptor("test", "stream2")) + } + + @Singleton + @Named("mockCatalog") + override fun make(): DestinationCatalog { + return DestinationCatalog(streams = listOf(stream1, stream2)) + } +} diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt index 5c34cd8446e8..edbdaad56100 100644 --- a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StateManagerTest.kt @@ -8,17 +8,17 @@ import com.google.common.collect.Range import com.google.common.collect.RangeSet import com.google.common.collect.TreeRangeSet import io.airbyte.cdk.command.DestinationCatalog -import io.airbyte.cdk.command.DestinationCatalogFactory import io.airbyte.cdk.command.DestinationStream +import io.airbyte.cdk.command.MockCatalogFactory.Companion.stream1 +import io.airbyte.cdk.command.MockCatalogFactory.Companion.stream2 import io.airbyte.cdk.message.Batch import io.airbyte.cdk.message.BatchEnvelope import io.airbyte.cdk.message.MessageConverter -import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Prototype -import io.micronaut.context.annotation.Replaces import io.micronaut.context.annotation.Requires import io.micronaut.test.extensions.junit5.annotation.MicronautTest import jakarta.inject.Inject +import jakarta.inject.Named import jakarta.inject.Singleton import java.util.function.Consumer import java.util.stream.Stream @@ -29,25 +29,10 @@ import org.junit.jupiter.params.provider.Arguments import org.junit.jupiter.params.provider.ArgumentsProvider import org.junit.jupiter.params.provider.ArgumentsSource -@MicronautTest +@MicronautTest(environments = ["StateManagerTest"]) class StateManagerTest { @Inject lateinit var stateManager: TestStateManager - companion object { - val stream1 = DestinationStream(DestinationStream.Descriptor("test", "stream1")) - val stream2 = DestinationStream(DestinationStream.Descriptor("test", "stream2")) - } - - @Factory - @Replaces(factory = DestinationCatalogFactory::class) - class MockCatalogFactory { - @Singleton - @Requires(env = ["test"]) - fun make(): DestinationCatalog { - return DestinationCatalog(streams = listOf(stream1, stream2)) - } - } - /** * Test state messages. * @@ -95,7 +80,11 @@ class StateManagerTest { class MockStreamManager : StreamManager { var persistedRanges: RangeSet = TreeRangeSet.create() - override fun countRecordIn(sizeBytes: Long): Long { + override fun countRecordIn(): Long { + throw NotImplementedError() + } + + override fun countEndOfStream(): Long { throw NotImplementedError() } @@ -129,7 +118,8 @@ class StateManagerTest { } @Prototype - class MockStreamsManager(catalog: DestinationCatalog) : StreamsManager { + @Requires(env = ["StateManagerTest"]) + class MockStreamsManager(@Named("mockCatalog") catalog: DestinationCatalog) : StreamsManager { private val mockManagers = catalog.streams.associateWith { MockStreamManager() } fun addPersistedRanges(stream: DestinationStream, ranges: List>) { @@ -141,14 +131,14 @@ class StateManagerTest { ?: throw IllegalArgumentException("Stream not found: $stream") } - override suspend fun awaitAllStreamsComplete() { + override suspend fun awaitAllStreamsClosed() { throw NotImplementedError() } } @Prototype class TestStateManager( - override val catalog: DestinationCatalog, + @Named("mockCatalog") override val catalog: DestinationCatalog, override val streamsManager: MockStreamsManager, override val outputFactory: MessageConverter, override val outputConsumer: MockOutputConsumer diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StreamsManagerTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StreamsManagerTest.kt new file mode 100644 index 000000000000..5ba013f4fab4 --- /dev/null +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/StreamsManagerTest.kt @@ -0,0 +1,282 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.state + +import com.google.common.collect.Range +import io.airbyte.cdk.command.DestinationCatalog +import io.airbyte.cdk.command.DestinationStream +import io.airbyte.cdk.command.MockCatalogFactory.Companion.stream1 +import io.airbyte.cdk.command.MockCatalogFactory.Companion.stream2 +import io.airbyte.cdk.message.Batch +import io.airbyte.cdk.message.BatchEnvelope +import io.airbyte.cdk.message.SimpleBatch +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import jakarta.inject.Inject +import jakarta.inject.Named +import java.util.concurrent.atomic.AtomicBoolean +import java.util.stream.Stream +import kotlinx.coroutines.launch +import kotlinx.coroutines.test.runTest +import kotlinx.coroutines.withTimeout +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtensionContext +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.ArgumentsProvider +import org.junit.jupiter.params.provider.ArgumentsSource + +@MicronautTest +class StreamsManagerTest { + @Inject @Named("mockCatalog") lateinit var catalog: DestinationCatalog + + @Test + fun testCountRecordsAndCheckpoint() { + val streamsManager = StreamsManagerFactory(catalog).make() + val manager1 = streamsManager.getManager(stream1) + val manager2 = streamsManager.getManager(stream2) + + // Incrementing once yields (n, n) + repeat(10) { manager1.countRecordIn() } + val (index, count) = manager1.markCheckpoint() + + Assertions.assertEquals(10, index) + Assertions.assertEquals(10, count) + + // Incrementing a second time yields (n + m, m) + repeat(5) { manager1.countRecordIn() } + val (index2, count2) = manager1.markCheckpoint() + + Assertions.assertEquals(15, index2) + Assertions.assertEquals(5, count2) + + // Never incrementing yields (0, 0) + val (index3, count3) = manager2.markCheckpoint() + + Assertions.assertEquals(0, index3) + Assertions.assertEquals(0, count3) + + // Incrementing twice in a row yields (n + m + 0, 0) + val (index4, count4) = manager1.markCheckpoint() + + Assertions.assertEquals(15, index4) + Assertions.assertEquals(0, count4) + } + + @Test + fun testGettingNonexistentManagerFails() { + val streamsManager = StreamsManagerFactory(catalog).make() + Assertions.assertThrows(IllegalArgumentException::class.java) { + streamsManager.getManager( + DestinationStream(DestinationStream.Descriptor("test", "non-existent")) + ) + } + } + + sealed class TestEvent + data class SetRecordCount(val count: Long) : TestEvent() + data object SetEndOfStream : TestEvent() + data class AddPersisted(val firstIndex: Long, val lastIndex: Long) : TestEvent() + data class AddComplete(val firstIndex: Long, val lastIndex: Long) : TestEvent() + data class ExpectPersistedUntil(val end: Long, val expectation: Boolean = true) : TestEvent() + data class ExpectComplete(val expectation: Boolean = true) : TestEvent() + + data class TestCase( + val name: String, + val events: List>, + ) + + class TestUpdateBatchStateProvider : ArgumentsProvider { + override fun provideArguments(context: ExtensionContext): Stream { + return listOf( + TestCase( + "Single stream, single batch", + listOf( + Pair(stream1, SetRecordCount(10)), + Pair(stream1, AddPersisted(0, 9)), + Pair(stream1, ExpectPersistedUntil(9)), + Pair(stream1, ExpectPersistedUntil(10)), + Pair(stream1, ExpectComplete(false)), + Pair(stream1, ExpectPersistedUntil(11, false)), + Pair(stream2, ExpectPersistedUntil(10, false)), + ) + ), + TestCase( + "Single stream, multiple batches", + listOf( + Pair(stream1, SetRecordCount(10)), + Pair(stream1, AddPersisted(0, 4)), + Pair(stream1, ExpectPersistedUntil(4)), + Pair(stream1, AddPersisted(5, 9)), + Pair(stream1, ExpectPersistedUntil(9)), + Pair(stream1, ExpectPersistedUntil(10)), + Pair(stream1, ExpectComplete(false)), + Pair(stream1, AddComplete(0, 9)), + Pair(stream1, ExpectComplete(false)), + Pair(stream1, SetEndOfStream), + Pair(stream1, ExpectComplete(true)), + Pair(stream1, ExpectPersistedUntil(11, false)), + Pair(stream2, ExpectPersistedUntil(10, false)), + ) + ), + TestCase( + "Single stream, multiple batches, out of order", + listOf( + Pair(stream1, SetRecordCount(10)), + Pair(stream1, AddPersisted(5, 9)), + Pair(stream1, ExpectPersistedUntil(10, false)), + Pair(stream1, AddPersisted(0, 4)), + Pair(stream1, ExpectPersistedUntil(10)), + Pair(stream1, ExpectComplete(false)), + Pair(stream1, SetEndOfStream), + Pair(stream1, AddComplete(5, 9)), + Pair(stream1, ExpectComplete(false)), + Pair(stream1, AddComplete(0, 4)), + Pair(stream1, ExpectComplete(true)), + ) + ), + TestCase( + "multiple streams", + listOf( + Pair(stream1, SetRecordCount(10)), + Pair(stream2, SetRecordCount(20)), + Pair(stream2, AddPersisted(0, 9)), + Pair(stream2, ExpectPersistedUntil(10, true)), + Pair(stream1, ExpectPersistedUntil(10, false)), + Pair(stream2, SetEndOfStream), + Pair(stream2, ExpectComplete(false)), + Pair(stream1, AddPersisted(0, 9)), + Pair(stream1, ExpectPersistedUntil(10)), + Pair(stream1, ExpectComplete(false)), + Pair(stream2, AddComplete(10, 20)), + Pair(stream2, ExpectComplete(false)), + Pair(stream1, SetEndOfStream), + Pair(stream1, ExpectComplete(false)), + Pair(stream1, AddComplete(0, 9)), + Pair(stream1, ExpectComplete(true)), + Pair(stream2, AddComplete(0, 9)), + Pair(stream2, ExpectPersistedUntil(20, true)), + Pair(stream2, ExpectComplete(true)), + ) + ) + ) + .map { Arguments.of(it) } + .stream() + } + } + + @ParameterizedTest + @ArgumentsSource(TestUpdateBatchStateProvider::class) + fun testUpdateBatchState(testCase: TestCase) { + val streamsManager = StreamsManagerFactory(catalog).make() + testCase.events.forEach { (stream, event) -> + val manager = streamsManager.getManager(stream) + when (event) { + is SetRecordCount -> repeat(event.count.toInt()) { manager.countRecordIn() } + is SetEndOfStream -> manager.countEndOfStream() + is AddPersisted -> + manager.updateBatchState( + BatchEnvelope( + SimpleBatch(Batch.State.PERSISTED), + Range.closed(event.firstIndex, event.lastIndex) + ) + ) + is AddComplete -> + manager.updateBatchState( + BatchEnvelope( + SimpleBatch(Batch.State.COMPLETE), + Range.closed(event.firstIndex, event.lastIndex) + ) + ) + is ExpectPersistedUntil -> + Assertions.assertEquals( + event.expectation, + manager.areRecordsPersistedUntil(event.end), + "$stream: ${testCase.name}: ${event.end}" + ) + is ExpectComplete -> + Assertions.assertEquals( + event.expectation, + manager.isBatchProcessingComplete(), + "$stream: ${testCase.name}" + ) + } + } + } + + @Test + fun testCannotUpdateOrCloseReadClosedStream() { + val streamsManager = StreamsManagerFactory(catalog).make() + val manager = streamsManager.getManager(stream1) + + // Can't close before end-of-stream + Assertions.assertThrows(IllegalStateException::class.java) { manager.markClosed() } + + manager.countEndOfStream() + + // Can't update after end-of-stream + Assertions.assertThrows(IllegalStateException::class.java) { manager.countRecordIn() } + + Assertions.assertThrows(IllegalStateException::class.java) { manager.countEndOfStream() } + + // Can close now + Assertions.assertDoesNotThrow(manager::markClosed) + } + + @Test + fun testAwaitStreamClosed() = runTest { + val streamsManager = StreamsManagerFactory(catalog).make() + val manager = streamsManager.getManager(stream1) + val hasClosed = AtomicBoolean(false) + + val job = launch { + manager.awaitStreamClosed() + hasClosed.set(true) + } + + Assertions.assertFalse(hasClosed.get()) + manager.countEndOfStream() + manager.markClosed() + try { + withTimeout(5000) { job.join() } + } catch (e: Exception) { + Assertions.fail("Stream did not close in time") + } + Assertions.assertTrue(hasClosed.get()) + } + + @Test + fun testAwaitAllStreamsClosed() = runTest { + val streamsManager = StreamsManagerFactory(catalog).make() + val manager1 = streamsManager.getManager(stream1) + val manager2 = streamsManager.getManager(stream2) + val allHaveClosed = AtomicBoolean(false) + + val awaitStream1 = launch { manager1.awaitStreamClosed() } + + val awaitAllStreams = launch { + streamsManager.awaitAllStreamsClosed() + allHaveClosed.set(true) + } + + Assertions.assertFalse(allHaveClosed.get()) + manager1.countEndOfStream() + manager1.markClosed() + try { + withTimeout(5000) { awaitStream1.join() } + } catch (e: Exception) { + Assertions.fail("Stream1 did not close in time") + } + Assertions.assertFalse(allHaveClosed.get()) + manager2.countEndOfStream() + manager2.markClosed() + try { + withTimeout(5000) { awaitAllStreams.join() } + } catch (e: Exception) { + Assertions.fail("Streams did not close in time") + } + Assertions.assertTrue(allHaveClosed.get()) + } +} From 1f2fd6ebd54389baf0d37f39e974aa85e58a80b3 Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Tue, 3 Sep 2024 21:30:21 -0400 Subject: [PATCH 17/51] =?UTF-8?q?=F0=9F=90=9B=20Source=20Shopify:=20Have?= =?UTF-8?q?=20message=20and=20description=20be=20nullable=20for=20custom?= =?UTF-8?q?=5Fcollections=20delet=E2=80=A6=20(#45116)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-shopify/metadata.yaml | 2 +- .../connectors/source-shopify/pyproject.toml | 2 +- .../source_shopify/streams/base_streams.py | 4 +- docs/integrations/sources/shopify.md | 149 +++++++++--------- 4 files changed, 79 insertions(+), 78 deletions(-) diff --git a/airbyte-integrations/connectors/source-shopify/metadata.yaml b/airbyte-integrations/connectors/source-shopify/metadata.yaml index ee3ca407fd68..9c2782d0a151 100644 --- a/airbyte-integrations/connectors/source-shopify/metadata.yaml +++ b/airbyte-integrations/connectors/source-shopify/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 9da77001-af33-4bcd-be46-6252bf9342b9 - dockerImageTag: 2.4.23 + dockerImageTag: 2.4.24 dockerRepository: airbyte/source-shopify documentationUrl: https://docs.airbyte.com/integrations/sources/shopify erdUrl: https://dbdocs.io/airbyteio/source-shopify?view=relationships diff --git a/airbyte-integrations/connectors/source-shopify/pyproject.toml b/airbyte-integrations/connectors/source-shopify/pyproject.toml index bedc7d36be46..2bd76f101b93 100644 --- a/airbyte-integrations/connectors/source-shopify/pyproject.toml +++ b/airbyte-integrations/connectors/source-shopify/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.4.23" +version = "2.4.24" name = "source-shopify" description = "Source CDK implementation for Shopify." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py b/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py index cf85413fe0fe..d0246a963402 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py @@ -143,8 +143,8 @@ def produce_deleted_records_from_events(self, delete_events: Iterable[Mapping[st yield { "id": event["subject_id"], self.cursor_field: event["created_at"], - "deleted_message": event["message"], - "deleted_description": event["description"], + "deleted_message": event.get("message", None), + "deleted_description": event.get("description", None), "shop_url": event["shop_url"], } diff --git a/docs/integrations/sources/shopify.md b/docs/integrations/sources/shopify.md index 343889400bff..7d349fc461aa 100644 --- a/docs/integrations/sources/shopify.md +++ b/docs/integrations/sources/shopify.md @@ -231,80 +231,81 @@ For all `Shopify GraphQL BULK` api requests these limitations are applied: https | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 2.4.23 | 2024-08-31 | [44971](https://github.com/airbytehq/airbyte/pull/44971) | Update dependencies | -| 2.4.22 | 2024-08-24 | [44723](https://github.com/airbytehq/airbyte/pull/44723) | Update dependencies | -| 2.4.21 | 2024-08-17 | [44318](https://github.com/airbytehq/airbyte/pull/44318) | Update dependencies | -| 2.4.20 | 2024-08-12 | [43834](https://github.com/airbytehq/airbyte/pull/43834) | Update dependencies | -| 2.4.19 | 2024-08-10 | [43194](https://github.com/airbytehq/airbyte/pull/43194) | Update dependencies | -| 2.4.18 | 2024-08-06 | [43326](https://github.com/airbytehq/airbyte/pull/43326) | Added missing `type` type for `customer_journey_summary` field for `Customer Journey Summary` stream schema | -| 2.4.17 | 2024-08-02 | [42973](https://github.com/airbytehq/airbyte/pull/42973) | Fixed `FAILED` Job handling for `no-checkpointing` BULK Streams, fixed STATE collision for REST Streams with `Deleted Events` | -| 2.4.16 | 2024-07-21 | [42095](https://github.com/airbytehq/airbyte/pull/42095) | Added the `Checkpointing` for the `BULK` streams, fixed the `store` redirection | -| 2.4.15 | 2024-07-27 | [42806](https://github.com/airbytehq/airbyte/pull/42806) | Update dependencies | -| 2.4.14 | 2024-07-20 | [42150](https://github.com/airbytehq/airbyte/pull/42150) | Update dependencies | -| 2.4.13 | 2024-07-13 | [41809](https://github.com/airbytehq/airbyte/pull/41809) | Update dependencies | -| 2.4.12 | 2024-07-10 | [41103](https://github.com/airbytehq/airbyte/pull/41103) | Update dependencies | -| 2.4.11 | 2024-07-09 | [41068](https://github.com/airbytehq/airbyte/pull/41068) | Added `options` field to `Product Variants` stream | -| 2.4.10 | 2024-07-09 | [41042](https://github.com/airbytehq/airbyte/pull/41042) | Use latest `CDK`: 3.0.0 | -| 2.4.9 | 2024-07-06 | [40768](https://github.com/airbytehq/airbyte/pull/40768) | Update dependencies | -| 2.4.8 | 2024-07-03 | [40707](https://github.com/airbytehq/airbyte/pull/40707) | Fixed the bug when `product_images` stream emitted records with no `primary_key` | -| 2.4.7 | 2024-06-27 | [40593](https://github.com/airbytehq/airbyte/pull/40593) | Use latest `CDK` version possible | -| 2.4.6 | 2024-06-26 | [40526](https://github.com/airbytehq/airbyte/pull/40526) | Made `BULK Job termination threshold` limit adjustable from `input configuration`, increased the default value to `1 hour`. | -| 2.4.5 | 2024-06-25 | [40484](https://github.com/airbytehq/airbyte/pull/40484) | Update dependencies | -| 2.4.4 | 2024-06-19 | [39594](https://github.com/airbytehq/airbyte/pull/39594) | Extended the `Discount Codes`, `Fulfillment Orders`, `Inventory Items`, `Inventory Levels`, `Products`, `Product Variants` and `Transactions` stream schemas | -| 2.4.3 | 2024-06-06 | [38084](https://github.com/airbytehq/airbyte/pull/38084) | add resiliency on some transient errors using the HttpClient | -| 2.4.1 | 2024-06-20 | [39651](https://github.com/airbytehq/airbyte/pull/39651) | Update dependencies | -| 2.4.0 | 2024-06-17 | [39527](https://github.com/airbytehq/airbyte/pull/39527) | Added new stream `Order Agreements` | -| 2.3.0 | 2024-06-14 | [39487](https://github.com/airbytehq/airbyte/pull/39487) | Added new stream `Customer Journey Summary` | -| 2.2.3 | 2024-06-06 | [38084](https://github.com/airbytehq/airbyte/pull/38084) | add resiliency on some transient errors using the HttpClient | -| 2.2.2 | 2024-06-04 | [39019](https://github.com/airbytehq/airbyte/pull/39019) | [autopull] Upgrade base image to v1.2.1 | -| 2.2.1 | 2024-05-30 | [38769](https://github.com/airbytehq/airbyte/pull/38769) | Have products stream return all the tags comma separated | -| 2.2.0 | 2024-05-29 | [38746](https://github.com/airbytehq/airbyte/pull/38746) | Updated countries schema | -| 2.1.4 | 2024-05-24 | [38610](https://github.com/airbytehq/airbyte/pull/38610) | Updated the source `API Version` to `2024-04` | -| 2.1.3 | 2024-05-23 | [38464](https://github.com/airbytehq/airbyte/pull/38464) | Added missing fields to `Products` stream | -| 2.1.2 | 2024-05-23 | [38352](https://github.com/airbytehq/airbyte/pull/38352) | Migrated `Order Risks` stream to `GraphQL BULK` | -| 2.1.1 | 2024-05-20 | [38251](https://github.com/airbytehq/airbyte/pull/38251) | Replace AirbyteLogger with logging.Logger | -| 2.1.0 | 2024-05-02 | [37767](https://github.com/airbytehq/airbyte/pull/37767) | Migrated `Products`, `Product Images` and `Product Variants` to `GraphQL BULK` | -| 2.0.8 | 2024-05-02 | [37589](https://github.com/airbytehq/airbyte/pull/37589) | Added retry for known HTTP Errors for BULK streams | -| 2.0.7 | 2024-04-24 | [36660](https://github.com/airbytehq/airbyte/pull/36660) | Schema descriptions | -| 2.0.6 | 2024-04-22 | [37468](https://github.com/airbytehq/airbyte/pull/37468) | Fixed one time retry for `Internal Server Error` for BULK streams | -| 2.0.5 | 2024-04-03 | [36788](https://github.com/airbytehq/airbyte/pull/36788) | Added ability to dynamically adjust the size of the `slice` | -| 2.0.4 | 2024-03-22 | [36355](https://github.com/airbytehq/airbyte/pull/36355) | Update CDK version to ensure Per-Stream Error Messaging and Record Counts In State (features were already there so just upping the version) | -| 2.0.3 | 2024-03-15 | [36170](https://github.com/airbytehq/airbyte/pull/36170) | Fixed the `STATE` messages emittion frequency for the `nested` sub-streams | -| 2.0.2 | 2024-03-12 | [36000](https://github.com/airbytehq/airbyte/pull/36000) | Fix and issue where invalid shop name causes index out of bounds error | -| 2.0.1 | 2024-03-11 | [35952](https://github.com/airbytehq/airbyte/pull/35952) | Fixed the issue when `start date` is missing but the `stream` required it | -| 2.0.0 | 2024-02-12 | [32345](https://github.com/airbytehq/airbyte/pull/32345) | Fixed the issue with `state` causing the `substreams` to skip the records, made `metafield_*`: `collections, customers, draft_orders, locations, orders, product_images, product_variants, products`, and `fulfillment_orders, collections, discount_codes, inventory_levels, inventory_items, transactions_graphql, customer_address` streams to use `BULK Operations` instead of `REST` | -| 1.1.8 | 2024-02-12 | [35166](https://github.com/airbytehq/airbyte/pull/35166) | Manage dependencies with Poetry. | -| 1.1.7 | 2024-01-19 | [33804](https://github.com/airbytehq/airbyte/pull/33804) | Updated documentation with list of all supported streams | -| 1.1.6 | 2024-01-04 | [33414](https://github.com/airbytehq/airbyte/pull/33414) | Prepare for airbyte-lib | -| 1.1.5 | 2023-12-28 | [33827](https://github.com/airbytehq/airbyte/pull/33827) | Fix GraphQL query | -| 1.1.4 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 1.1.3 | 2023-10-17 | [31500](https://github.com/airbytehq/airbyte/pull/31500) | Fixed the issue caused by the `missing access token` while setup the new source and not yet authenticated | -| 1.1.2 | 2023-10-13 | [31381](https://github.com/airbytehq/airbyte/pull/31381) | Fixed the issue caused by the `state` presence while fetching the `deleted events` with pagination | -| 1.1.1 | 2023-09-18 | [30560](https://github.com/airbytehq/airbyte/pull/30560) | Performance testing - include socat binary in docker image | -| 1.1.0 | 2023-09-07 | [30246](https://github.com/airbytehq/airbyte/pull/30246) | Added ability to fetch `destroyed` records for `Articles, Blogs, CustomCollections, Orders, Pages, PriceRules, Products` | -| 1.0.0 | 2023-08-11 | [29361](https://github.com/airbytehq/airbyte/pull/29361) | Migrate to the `2023-07` Shopify API Version | -| 0.6.2 | 2023-08-09 | [29302](https://github.com/airbytehq/airbyte/pull/29302) | Handle the `Internal Server Error` when entity could be fetched | -| 0.6.1 | 2023-08-08 | [28291](https://github.com/airbytehq/airbyte/pull/28291) | Allow `shop` field to accept `*.myshopify.com` shop names, updated `OAuth Spec` | -| 0.6.0 | 2023-08-02 | [28770](https://github.com/airbytehq/airbyte/pull/28770) | Added `Disputes` stream | -| 0.5.1 | 2023-07-13 | [28700](https://github.com/airbytehq/airbyte/pull/28700) | Improved `error messages` with more user-friendly description, refactored code | -| 0.5.0 | 2023-06-13 | [27732](https://github.com/airbytehq/airbyte/pull/27732) | License Update: Elv2 | -| 0.4.0 | 2023-06-13 | [27083](https://github.com/airbytehq/airbyte/pull/27083) | Added `CustomerSavedSearch`, `CustomerAddress` and `Countries` streams | -| 0.3.4 | 2023-05-10 | [25961](https://github.com/airbytehq/airbyte/pull/25961) | Added validation for `shop` in input configuration (accepts non-url-like inputs) | -| 0.3.3 | 2023-04-12 | [25110](https://github.com/airbytehq/airbyte/pull/25110) | Fixed issue when `cursor_field` is `"None"`, added missing properties to stream schemas, fixed `access_scopes` validation error | -| 0.3.2 | 2023-02-27 | [23473](https://github.com/airbytehq/airbyte/pull/23473) | Fixed OOM / Memory leak issue for Airbyte Cloud | -| 0.3.1 | 2023-01-16 | [21461](https://github.com/airbytehq/airbyte/pull/21461) | Added `discount_applications` to `orders` stream | -| 0.3.0 | 2022-11-16 | [19492](https://github.com/airbytehq/airbyte/pull/19492) | Added support for graphql and add a graphql products stream | -| 0.2.0 | 2022-10-21 | [18298](https://github.com/airbytehq/airbyte/pull/18298) | Updated API version to the `2022-10`, make stream schemas backward cpmpatible | -| 0.1.39 | 2022-10-13 | [17962](https://github.com/airbytehq/airbyte/pull/17962) | Added metafield streams; support for nested list streams | -| 0.1.38 | 2022-10-10 | [17777](https://github.com/airbytehq/airbyte/pull/17777) | Fixed `404` for configured streams, fix missing `cursor` error for old records | -| 0.1.37 | 2022-04-30 | [12500](https://github.com/airbytehq/airbyte/pull/12500) | Improve input configuration copy | -| 0.1.36 | 2022-03-22 | [9850](https://github.com/airbytehq/airbyte/pull/9850) | Added `BalanceTransactions` stream | -| 0.1.35 | 2022-03-07 | [10915](https://github.com/airbytehq/airbyte/pull/10915) | Fixed a bug which caused `full-refresh` syncs of child REST entities configured for `incremental` | -| 0.1.34 | 2022-03-02 | [10794](https://github.com/airbytehq/airbyte/pull/10794) | Minor specification re-order, fixed links in documentation | -| 0.1.33 | 2022-02-17 | [10419](https://github.com/airbytehq/airbyte/pull/10419) | Fixed wrong field type for tax_exemptions for `Abandoned_checkouts` stream | -| 0.1.32 | 2022-02-18 | [10449](https://github.com/airbytehq/airbyte/pull/10449) | Added `tender_transactions` stream | -| 0.1.31 | 2022-02-08 | [10175](https://github.com/airbytehq/airbyte/pull/10175) | Fixed compatibility issues for legacy user config | -| 0.1.30 | 2022-01-24 | [9648](https://github.com/airbytehq/airbyte/pull/9648) | Added permission validation before sync | +| 2.4.24 | 2024-09-03 | [45116](https://github.com/airbytehq/airbyte/pull/45116) | Have message and description be nullable for custom_collections deleted events | +| 2.4.23 | 2024-08-31 | [44971](https://github.com/airbytehq/airbyte/pull/44971) | Update dependencies | +| 2.4.22 | 2024-08-24 | [44723](https://github.com/airbytehq/airbyte/pull/44723) | Update dependencies | +| 2.4.21 | 2024-08-17 | [44318](https://github.com/airbytehq/airbyte/pull/44318) | Update dependencies | +| 2.4.20 | 2024-08-12 | [43834](https://github.com/airbytehq/airbyte/pull/43834) | Update dependencies | +| 2.4.19 | 2024-08-10 | [43194](https://github.com/airbytehq/airbyte/pull/43194) | Update dependencies | +| 2.4.18 | 2024-08-06 | [43326](https://github.com/airbytehq/airbyte/pull/43326) | Added missing `type` type for `customer_journey_summary` field for `Customer Journey Summary` stream schema | +| 2.4.17 | 2024-08-02 | [42973](https://github.com/airbytehq/airbyte/pull/42973) | Fixed `FAILED` Job handling for `no-checkpointing` BULK Streams, fixed STATE collision for REST Streams with `Deleted Events` | +| 2.4.16 | 2024-07-21 | [42095](https://github.com/airbytehq/airbyte/pull/42095) | Added the `Checkpointing` for the `BULK` streams, fixed the `store` redirection | +| 2.4.15 | 2024-07-27 | [42806](https://github.com/airbytehq/airbyte/pull/42806) | Update dependencies | +| 2.4.14 | 2024-07-20 | [42150](https://github.com/airbytehq/airbyte/pull/42150) | Update dependencies | +| 2.4.13 | 2024-07-13 | [41809](https://github.com/airbytehq/airbyte/pull/41809) | Update dependencies | +| 2.4.12 | 2024-07-10 | [41103](https://github.com/airbytehq/airbyte/pull/41103) | Update dependencies | +| 2.4.11 | 2024-07-09 | [41068](https://github.com/airbytehq/airbyte/pull/41068) | Added `options` field to `Product Variants` stream | +| 2.4.10 | 2024-07-09 | [41042](https://github.com/airbytehq/airbyte/pull/41042) | Use latest `CDK`: 3.0.0 | +| 2.4.9 | 2024-07-06 | [40768](https://github.com/airbytehq/airbyte/pull/40768) | Update dependencies | +| 2.4.8 | 2024-07-03 | [40707](https://github.com/airbytehq/airbyte/pull/40707) | Fixed the bug when `product_images` stream emitted records with no `primary_key` | +| 2.4.7 | 2024-06-27 | [40593](https://github.com/airbytehq/airbyte/pull/40593) | Use latest `CDK` version possible | +| 2.4.6 | 2024-06-26 | [40526](https://github.com/airbytehq/airbyte/pull/40526) | Made `BULK Job termination threshold` limit adjustable from `input configuration`, increased the default value to `1 hour`. | +| 2.4.5 | 2024-06-25 | [40484](https://github.com/airbytehq/airbyte/pull/40484) | Update dependencies | +| 2.4.4 | 2024-06-19 | [39594](https://github.com/airbytehq/airbyte/pull/39594) | Extended the `Discount Codes`, `Fulfillment Orders`, `Inventory Items`, `Inventory Levels`, `Products`, `Product Variants` and `Transactions` stream schemas | +| 2.4.3 | 2024-06-06 | [38084](https://github.com/airbytehq/airbyte/pull/38084) | add resiliency on some transient errors using the HttpClient | +| 2.4.1 | 2024-06-20 | [39651](https://github.com/airbytehq/airbyte/pull/39651) | Update dependencies | +| 2.4.0 | 2024-06-17 | [39527](https://github.com/airbytehq/airbyte/pull/39527) | Added new stream `Order Agreements` | +| 2.3.0 | 2024-06-14 | [39487](https://github.com/airbytehq/airbyte/pull/39487) | Added new stream `Customer Journey Summary` | +| 2.2.3 | 2024-06-06 | [38084](https://github.com/airbytehq/airbyte/pull/38084) | add resiliency on some transient errors using the HttpClient | +| 2.2.2 | 2024-06-04 | [39019](https://github.com/airbytehq/airbyte/pull/39019) | [autopull] Upgrade base image to v1.2.1 | +| 2.2.1 | 2024-05-30 | [38769](https://github.com/airbytehq/airbyte/pull/38769) | Have products stream return all the tags comma separated | +| 2.2.0 | 2024-05-29 | [38746](https://github.com/airbytehq/airbyte/pull/38746) | Updated countries schema | +| 2.1.4 | 2024-05-24 | [38610](https://github.com/airbytehq/airbyte/pull/38610) | Updated the source `API Version` to `2024-04` | +| 2.1.3 | 2024-05-23 | [38464](https://github.com/airbytehq/airbyte/pull/38464) | Added missing fields to `Products` stream | +| 2.1.2 | 2024-05-23 | [38352](https://github.com/airbytehq/airbyte/pull/38352) | Migrated `Order Risks` stream to `GraphQL BULK` | +| 2.1.1 | 2024-05-20 | [38251](https://github.com/airbytehq/airbyte/pull/38251) | Replace AirbyteLogger with logging.Logger | +| 2.1.0 | 2024-05-02 | [37767](https://github.com/airbytehq/airbyte/pull/37767) | Migrated `Products`, `Product Images` and `Product Variants` to `GraphQL BULK` | +| 2.0.8 | 2024-05-02 | [37589](https://github.com/airbytehq/airbyte/pull/37589) | Added retry for known HTTP Errors for BULK streams | +| 2.0.7 | 2024-04-24 | [36660](https://github.com/airbytehq/airbyte/pull/36660) | Schema descriptions | +| 2.0.6 | 2024-04-22 | [37468](https://github.com/airbytehq/airbyte/pull/37468) | Fixed one time retry for `Internal Server Error` for BULK streams | +| 2.0.5 | 2024-04-03 | [36788](https://github.com/airbytehq/airbyte/pull/36788) | Added ability to dynamically adjust the size of the `slice` | +| 2.0.4 | 2024-03-22 | [36355](https://github.com/airbytehq/airbyte/pull/36355) | Update CDK version to ensure Per-Stream Error Messaging and Record Counts In State (features were already there so just upping the version) | +| 2.0.3 | 2024-03-15 | [36170](https://github.com/airbytehq/airbyte/pull/36170) | Fixed the `STATE` messages emittion frequency for the `nested` sub-streams | +| 2.0.2 | 2024-03-12 | [36000](https://github.com/airbytehq/airbyte/pull/36000) | Fix and issue where invalid shop name causes index out of bounds error | +| 2.0.1 | 2024-03-11 | [35952](https://github.com/airbytehq/airbyte/pull/35952) | Fixed the issue when `start date` is missing but the `stream` required it | +| 2.0.0 | 2024-02-12 | [32345](https://github.com/airbytehq/airbyte/pull/32345) | Fixed the issue with `state` causing the `substreams` to skip the records, made `metafield_*`: `collections, customers, draft_orders, locations, orders, product_images, product_variants, products`, and `fulfillment_orders, collections, discount_codes, inventory_levels, inventory_items, transactions_graphql, customer_address` streams to use `BULK Operations` instead of `REST` | +| 1.1.8 | 2024-02-12 | [35166](https://github.com/airbytehq/airbyte/pull/35166) | Manage dependencies with Poetry. | +| 1.1.7 | 2024-01-19 | [33804](https://github.com/airbytehq/airbyte/pull/33804) | Updated documentation with list of all supported streams | +| 1.1.6 | 2024-01-04 | [33414](https://github.com/airbytehq/airbyte/pull/33414) | Prepare for airbyte-lib | +| 1.1.5 | 2023-12-28 | [33827](https://github.com/airbytehq/airbyte/pull/33827) | Fix GraphQL query | +| 1.1.4 | 2023-10-19 | [31599](https://github.com/airbytehq/airbyte/pull/31599) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 1.1.3 | 2023-10-17 | [31500](https://github.com/airbytehq/airbyte/pull/31500) | Fixed the issue caused by the `missing access token` while setup the new source and not yet authenticated | +| 1.1.2 | 2023-10-13 | [31381](https://github.com/airbytehq/airbyte/pull/31381) | Fixed the issue caused by the `state` presence while fetching the `deleted events` with pagination | +| 1.1.1 | 2023-09-18 | [30560](https://github.com/airbytehq/airbyte/pull/30560) | Performance testing - include socat binary in docker image | +| 1.1.0 | 2023-09-07 | [30246](https://github.com/airbytehq/airbyte/pull/30246) | Added ability to fetch `destroyed` records for `Articles, Blogs, CustomCollections, Orders, Pages, PriceRules, Products` | +| 1.0.0 | 2023-08-11 | [29361](https://github.com/airbytehq/airbyte/pull/29361) | Migrate to the `2023-07` Shopify API Version | +| 0.6.2 | 2023-08-09 | [29302](https://github.com/airbytehq/airbyte/pull/29302) | Handle the `Internal Server Error` when entity could be fetched | +| 0.6.1 | 2023-08-08 | [28291](https://github.com/airbytehq/airbyte/pull/28291) | Allow `shop` field to accept `*.myshopify.com` shop names, updated `OAuth Spec` | +| 0.6.0 | 2023-08-02 | [28770](https://github.com/airbytehq/airbyte/pull/28770) | Added `Disputes` stream | +| 0.5.1 | 2023-07-13 | [28700](https://github.com/airbytehq/airbyte/pull/28700) | Improved `error messages` with more user-friendly description, refactored code | +| 0.5.0 | 2023-06-13 | [27732](https://github.com/airbytehq/airbyte/pull/27732) | License Update: Elv2 | +| 0.4.0 | 2023-06-13 | [27083](https://github.com/airbytehq/airbyte/pull/27083) | Added `CustomerSavedSearch`, `CustomerAddress` and `Countries` streams | +| 0.3.4 | 2023-05-10 | [25961](https://github.com/airbytehq/airbyte/pull/25961) | Added validation for `shop` in input configuration (accepts non-url-like inputs) | +| 0.3.3 | 2023-04-12 | [25110](https://github.com/airbytehq/airbyte/pull/25110) | Fixed issue when `cursor_field` is `"None"`, added missing properties to stream schemas, fixed `access_scopes` validation error | +| 0.3.2 | 2023-02-27 | [23473](https://github.com/airbytehq/airbyte/pull/23473) | Fixed OOM / Memory leak issue for Airbyte Cloud | +| 0.3.1 | 2023-01-16 | [21461](https://github.com/airbytehq/airbyte/pull/21461) | Added `discount_applications` to `orders` stream | +| 0.3.0 | 2022-11-16 | [19492](https://github.com/airbytehq/airbyte/pull/19492) | Added support for graphql and add a graphql products stream | +| 0.2.0 | 2022-10-21 | [18298](https://github.com/airbytehq/airbyte/pull/18298) | Updated API version to the `2022-10`, make stream schemas backward cpmpatible | +| 0.1.39 | 2022-10-13 | [17962](https://github.com/airbytehq/airbyte/pull/17962) | Added metafield streams; support for nested list streams | +| 0.1.38 | 2022-10-10 | [17777](https://github.com/airbytehq/airbyte/pull/17777) | Fixed `404` for configured streams, fix missing `cursor` error for old records | +| 0.1.37 | 2022-04-30 | [12500](https://github.com/airbytehq/airbyte/pull/12500) | Improve input configuration copy | +| 0.1.36 | 2022-03-22 | [9850](https://github.com/airbytehq/airbyte/pull/9850) | Added `BalanceTransactions` stream | +| 0.1.35 | 2022-03-07 | [10915](https://github.com/airbytehq/airbyte/pull/10915) | Fixed a bug which caused `full-refresh` syncs of child REST entities configured for `incremental` | +| 0.1.34 | 2022-03-02 | [10794](https://github.com/airbytehq/airbyte/pull/10794) | Minor specification re-order, fixed links in documentation | +| 0.1.33 | 2022-02-17 | [10419](https://github.com/airbytehq/airbyte/pull/10419) | Fixed wrong field type for tax_exemptions for `Abandoned_checkouts` stream | +| 0.1.32 | 2022-02-18 | [10449](https://github.com/airbytehq/airbyte/pull/10449) | Added `tender_transactions` stream | +| 0.1.31 | 2022-02-08 | [10175](https://github.com/airbytehq/airbyte/pull/10175) | Fixed compatibility issues for legacy user config | +| 0.1.30 | 2022-01-24 | [9648](https://github.com/airbytehq/airbyte/pull/9648) | Added permission validation before sync | | 0.1.29 | 2022-01-20 | [9049](https://github.com/airbytehq/airbyte/pull/9248) | Added `shop_url` to the record for all streams | | 0.1.28 | 2022-01-19 | [9591](https://github.com/airbytehq/airbyte/pull/9591) | Implemented `OAuth2.0` authentication method for Airbyte Cloud | | 0.1.27 | 2021-12-22 | [9049](https://github.com/airbytehq/airbyte/pull/9049) | Updated connector fields title/description | From 84f3b03f9c32b966047bd60f679493bc82260c51 Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:55:18 +0300 Subject: [PATCH 18/51] fix(source-github): fix state handling for stream WorkflowRuns (#44592) --- .../connectors/source-github/metadata.yaml | 2 +- .../connectors/source-github/pyproject.toml | 2 +- .../source-github/source_github/streams.py | 4 + docs/integrations/sources/github.md | 117 +++++++++--------- 4 files changed, 65 insertions(+), 60 deletions(-) diff --git a/airbyte-integrations/connectors/source-github/metadata.yaml b/airbyte-integrations/connectors/source-github/metadata.yaml index 84d4ccf3b274..c0c8ddf00ece 100644 --- a/airbyte-integrations/connectors/source-github/metadata.yaml +++ b/airbyte-integrations/connectors/source-github/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: ef69ef6e-aa7f-4af1-a01d-ef775033524e - dockerImageTag: 1.8.7 + dockerImageTag: 1.8.8 dockerRepository: airbyte/source-github documentationUrl: https://docs.airbyte.com/integrations/sources/github erdUrl: https://dbdocs.io/airbyteio/source-github?view=relationships diff --git a/airbyte-integrations/connectors/source-github/pyproject.toml b/airbyte-integrations/connectors/source-github/pyproject.toml index 20499a31f72a..ab11245c01e7 100644 --- a/airbyte-integrations/connectors/source-github/pyproject.toml +++ b/airbyte-integrations/connectors/source-github/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.8.7" +version = "1.8.8" name = "source-github" description = "Source implementation for GitHub." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-github/source_github/streams.py b/airbyte-integrations/connectors/source-github/source_github/streams.py index eaedac354f88..7e9fcc80cc3e 100644 --- a/airbyte-integrations/connectors/source-github/source_github/streams.py +++ b/airbyte-integrations/connectors/source-github/source_github/streams.py @@ -1459,6 +1459,8 @@ def read_records( # only to look behind on 30 days to find all records which were updated. start_point = self.get_starting_point(stream_state=stream_state, stream_slice=stream_slice) break_point = None + # the state is updated only in the end of the sync as records are sorted in reverse order + new_state = self.state if start_point: break_point = (pendulum.parse(start_point) - pendulum.duration(days=self.re_run_period)).to_iso8601_string() for record in super(SemiIncrementalMixin, self).read_records( @@ -1468,8 +1470,10 @@ def read_records( created_at = record["created_at"] if not start_point or cursor_value > start_point: yield record + new_state = self._get_updated_state(new_state, record) if break_point and created_at < break_point: break + self.state = new_state class WorkflowJobs(SemiIncrementalMixin, GithubStream): diff --git a/docs/integrations/sources/github.md b/docs/integrations/sources/github.md index d7a34517934d..28e83abe43ec 100644 --- a/docs/integrations/sources/github.md +++ b/docs/integrations/sources/github.md @@ -222,64 +222,65 @@ Your token should have at least the `repo` scope. Depending on which streams you | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 1.8.7 | 2024-08-31 | [45061](https://github.com/airbytehq/airbyte/pull/45061) | Update dependencies | -| 1.8.6 | 2024-08-24 | [44703](https://github.com/airbytehq/airbyte/pull/44703) | Update dependencies | -| 1.8.5 | 2024-08-17 | [44227](https://github.com/airbytehq/airbyte/pull/44227) | Update dependencies | -| 1.8.4 | 2024-08-12 | [43749](https://github.com/airbytehq/airbyte/pull/43749) | Update dependencies | -| 1.8.3 | 2024-08-10 | [42671](https://github.com/airbytehq/airbyte/pull/42671) | Update dependencies | -| 1.8.2 | 2024-08-20 | [42966](https://github.com/airbytehq/airbyte/pull/42966) | Bump cdk version and enable RFR for all non-incremental streams | -| 1.8.1 | 2024-07-20 | [42342](https://github.com/airbytehq/airbyte/pull/42342) | Update dependencies | -| 1.8.0 | 2024-07-16 | [41677](https://github.com/airbytehq/airbyte/pull/41677) | Update to 3.4.0 CDK | -| 1.7.13 | 2024-07-13 | [41746](https://github.com/airbytehq/airbyte/pull/41746) | Update dependencies | -| 1.7.12 | 2024-07-10 | [41354](https://github.com/airbytehq/airbyte/pull/41354) | Update dependencies | -| 1.7.11 | 2024-07-09 | [41221](https://github.com/airbytehq/airbyte/pull/41221) | Update dependencies | -| 1.7.10 | 2024-07-06 | [41000](https://github.com/airbytehq/airbyte/pull/41000) | Update dependencies | -| 1.7.9 | 2024-06-25 | [40289](https://github.com/airbytehq/airbyte/pull/40289) | Update dependencies | -| 1.7.8 | 2024-06-22 | [40128](https://github.com/airbytehq/airbyte/pull/40128) | Update dependencies | -| 1.7.7 | 2024-06-17 | [39513](https://github.com/airbytehq/airbyte/pull/39513) | Update deprecated state handling method | -| 1.7.6 | 2024-06-04 | [39078](https://github.com/airbytehq/airbyte/pull/39078) | [autopull] Upgrade base image to v1.2.1 | -| 1.7.5 | 2024-05-29 | [38341](https://github.com/airbytehq/airbyte/pull/38341) | Add `max_waiting_time` to configuration | -| 1.7.4 | 2024-05-21 | [38341](https://github.com/airbytehq/airbyte/pull/38341) | Update CDK authenticator package | -| 1.7.3 | 2024-05-20 | [38299](https://github.com/airbytehq/airbyte/pull/38299) | Fixed spec typo | -| 1.7.2 | 2024-04-19 | [36636](https://github.com/airbytehq/airbyte/pull/36636) | Updating to 0.80.0 CDK | -| 1.7.1 | 2024-04-12 | [36636](https://github.com/airbytehq/airbyte/pull/36636) | schema descriptions | -| 1.7.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | -| 1.6.5 | 2024-03-12 | [35986](https://github.com/airbytehq/airbyte/pull/35986) | Handle rate limit exception as config error | -| 1.6.4 | 2024-03-08 | [35915](https://github.com/airbytehq/airbyte/pull/35915) | Fix per stream error handler; Make use the latest CDK version | -| 1.6.3 | 2024-02-15 | [35271](https://github.com/airbytehq/airbyte/pull/35271) | Update branches schema | -| 1.6.2 | 2024-02-12 | [34933](https://github.com/airbytehq/airbyte/pull/34933) | Update Airbyte CDK for integration tests | -| 1.6.1 | 2024-02-09 | [35087](https://github.com/airbytehq/airbyte/pull/35087) | Manage dependencies with Poetry. | -| 1.6.0 | 2024-02-02 | [34700](https://github.com/airbytehq/airbyte/pull/34700) | Continue Sync on Stream failure | -| 1.5.7 | 2024-01-29 | [34598](https://github.com/airbytehq/airbyte/pull/34598) | Fix MultipleToken sleep time | -| 1.5.6 | 2024-01-26 | [34503](https://github.com/airbytehq/airbyte/pull/34503) | Fix MultipleToken rotation logic | -| 1.5.5 | 2023-12-26 | [33783](https://github.com/airbytehq/airbyte/pull/33783) | Fix retry for 504 error in GraphQL based streams | -| 1.5.4 | 2023-11-20 | [32679](https://github.com/airbytehq/airbyte/pull/32679) | Return AirbyteMessage if max retry exeeded for 202 status code | -| 1.5.3 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 1.5.2 | 2023-10-13 | [31386](https://github.com/airbytehq/airbyte/pull/31386) | Handle `ContributorActivity` continuous `ACCEPTED` response | -| 1.5.1 | 2023-10-12 | [31307](https://github.com/airbytehq/airbyte/pull/31307) | Increase backoff_time for stream `ContributorActivity` | -| 1.5.0 | 2023-10-11 | [31300](https://github.com/airbytehq/airbyte/pull/31300) | Update Schemas: Add date-time format to fields | -| 1.4.6 | 2023-10-04 | [31056](https://github.com/airbytehq/airbyte/pull/31056) | Migrate spec properties' `repository` and `branch` type to \ | -| 1.4.5 | 2023-10-02 | [31023](https://github.com/airbytehq/airbyte/pull/31023) | Increase backoff for stream `Contributor Activity` | -| 1.4.4 | 2023-10-02 | [30971](https://github.com/airbytehq/airbyte/pull/30971) | Mark `start_date` as optional. | -| 1.4.3 | 2023-10-02 | [30979](https://github.com/airbytehq/airbyte/pull/30979) | Fetch archived records in `Project Cards` | -| 1.4.2 | 2023-09-30 | [30927](https://github.com/airbytehq/airbyte/pull/30927) | Provide actionable user error messages | -| 1.4.1 | 2023-09-30 | [30839](https://github.com/airbytehq/airbyte/pull/30839) | Update CDK to Latest version | -| 1.4.0 | 2023-09-29 | [30823](https://github.com/airbytehq/airbyte/pull/30823) | Add new stream `issue Timeline Events` | -| 1.3.1 | 2023-09-28 | [30824](https://github.com/airbytehq/airbyte/pull/30824) | Handle empty response in stream `ContributorActivity` | -| 1.3.0 | 2023-09-25 | [30731](https://github.com/airbytehq/airbyte/pull/30731) | Add new stream `ProjectsV2` | -| 1.2.1 | 2023-09-22 | [30693](https://github.com/airbytehq/airbyte/pull/30693) | Handle 404 error in `TeamMemberShips` | -| 1.2.0 | 2023-09-22 | [30647](https://github.com/airbytehq/airbyte/pull/30647) | Add support for self-hosted GitHub instances | -| 1.1.1 | 2023-09-21 | [30654](https://github.com/airbytehq/airbyte/pull/30654) | Rewrite source connection error messages | -| 1.1.0 | 2023-08-03 | [30615](https://github.com/airbytehq/airbyte/pull/30615) | Add new stream `Contributor Activity` | -| 1.0.4 | 2023-08-03 | [29031](https://github.com/airbytehq/airbyte/pull/29031) | Reverted `advancedAuth` spec changes | -| 1.0.3 | 2023-08-01 | [28910](https://github.com/airbytehq/airbyte/pull/28910) | Updated `advancedAuth` broken references | -| 1.0.2 | 2023-07-11 | [28144](https://github.com/airbytehq/airbyte/pull/28144) | Add `archived_at` property to `Organizations` schema parameter | -| 1.0.1 | 2023-05-22 | [25838](https://github.com/airbytehq/airbyte/pull/25838) | Deprecate "page size" input parameter | -| 1.0.0 | 2023-05-19 | [25778](https://github.com/airbytehq/airbyte/pull/25778) | Improve repo(s) name validation on UI | -| 0.5.0 | 2023-05-16 | [25793](https://github.com/airbytehq/airbyte/pull/25793) | Implement client-side throttling of requests | -| 0.4.11 | 2023-05-12 | [26025](https://github.com/airbytehq/airbyte/pull/26025) | Added more transparent depiction of the personal access token expired | -| 0.4.10 | 2023-05-15 | [26075](https://github.com/airbytehq/airbyte/pull/26075) | Add more specific error message description for no repos case. | -| 0.4.9 | 2023-05-01 | [24523](https://github.com/airbytehq/airbyte/pull/24523) | Add undeclared columns to spec | +| 1.8.8 | 2024-08-23 | [44592](https://github.com/airbytehq/airbyte/pull/44592) | Fix state handling for stream WorkflowRuns | +| 1.8.7 | 2024-08-31 | [45061](https://github.com/airbytehq/airbyte/pull/45061) | Update dependencies | +| 1.8.6 | 2024-08-24 | [44703](https://github.com/airbytehq/airbyte/pull/44703) | Update dependencies | +| 1.8.5 | 2024-08-17 | [44227](https://github.com/airbytehq/airbyte/pull/44227) | Update dependencies | +| 1.8.4 | 2024-08-12 | [43749](https://github.com/airbytehq/airbyte/pull/43749) | Update dependencies | +| 1.8.3 | 2024-08-10 | [42671](https://github.com/airbytehq/airbyte/pull/42671) | Update dependencies | +| 1.8.2 | 2024-08-20 | [42966](https://github.com/airbytehq/airbyte/pull/42966) | Bump cdk version and enable RFR for all non-incremental streams | +| 1.8.1 | 2024-07-20 | [42342](https://github.com/airbytehq/airbyte/pull/42342) | Update dependencies | +| 1.8.0 | 2024-07-16 | [41677](https://github.com/airbytehq/airbyte/pull/41677) | Update to 3.4.0 CDK | +| 1.7.13 | 2024-07-13 | [41746](https://github.com/airbytehq/airbyte/pull/41746) | Update dependencies | +| 1.7.12 | 2024-07-10 | [41354](https://github.com/airbytehq/airbyte/pull/41354) | Update dependencies | +| 1.7.11 | 2024-07-09 | [41221](https://github.com/airbytehq/airbyte/pull/41221) | Update dependencies | +| 1.7.10 | 2024-07-06 | [41000](https://github.com/airbytehq/airbyte/pull/41000) | Update dependencies | +| 1.7.9 | 2024-06-25 | [40289](https://github.com/airbytehq/airbyte/pull/40289) | Update dependencies | +| 1.7.8 | 2024-06-22 | [40128](https://github.com/airbytehq/airbyte/pull/40128) | Update dependencies | +| 1.7.7 | 2024-06-17 | [39513](https://github.com/airbytehq/airbyte/pull/39513) | Update deprecated state handling method | +| 1.7.6 | 2024-06-04 | [39078](https://github.com/airbytehq/airbyte/pull/39078) | [autopull] Upgrade base image to v1.2.1 | +| 1.7.5 | 2024-05-29 | [38341](https://github.com/airbytehq/airbyte/pull/38341) | Add `max_waiting_time` to configuration | +| 1.7.4 | 2024-05-21 | [38341](https://github.com/airbytehq/airbyte/pull/38341) | Update CDK authenticator package | +| 1.7.3 | 2024-05-20 | [38299](https://github.com/airbytehq/airbyte/pull/38299) | Fixed spec typo | +| 1.7.2 | 2024-04-19 | [36636](https://github.com/airbytehq/airbyte/pull/36636) | Updating to 0.80.0 CDK | +| 1.7.1 | 2024-04-12 | [36636](https://github.com/airbytehq/airbyte/pull/36636) | schema descriptions | +| 1.7.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` | +| 1.6.5 | 2024-03-12 | [35986](https://github.com/airbytehq/airbyte/pull/35986) | Handle rate limit exception as config error | +| 1.6.4 | 2024-03-08 | [35915](https://github.com/airbytehq/airbyte/pull/35915) | Fix per stream error handler; Make use the latest CDK version | +| 1.6.3 | 2024-02-15 | [35271](https://github.com/airbytehq/airbyte/pull/35271) | Update branches schema | +| 1.6.2 | 2024-02-12 | [34933](https://github.com/airbytehq/airbyte/pull/34933) | Update Airbyte CDK for integration tests | +| 1.6.1 | 2024-02-09 | [35087](https://github.com/airbytehq/airbyte/pull/35087) | Manage dependencies with Poetry. | +| 1.6.0 | 2024-02-02 | [34700](https://github.com/airbytehq/airbyte/pull/34700) | Continue Sync on Stream failure | +| 1.5.7 | 2024-01-29 | [34598](https://github.com/airbytehq/airbyte/pull/34598) | Fix MultipleToken sleep time | +| 1.5.6 | 2024-01-26 | [34503](https://github.com/airbytehq/airbyte/pull/34503) | Fix MultipleToken rotation logic | +| 1.5.5 | 2023-12-26 | [33783](https://github.com/airbytehq/airbyte/pull/33783) | Fix retry for 504 error in GraphQL based streams | +| 1.5.4 | 2023-11-20 | [32679](https://github.com/airbytehq/airbyte/pull/32679) | Return AirbyteMessage if max retry exeeded for 202 status code | +| 1.5.3 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 1.5.2 | 2023-10-13 | [31386](https://github.com/airbytehq/airbyte/pull/31386) | Handle `ContributorActivity` continuous `ACCEPTED` response | +| 1.5.1 | 2023-10-12 | [31307](https://github.com/airbytehq/airbyte/pull/31307) | Increase backoff_time for stream `ContributorActivity` | +| 1.5.0 | 2023-10-11 | [31300](https://github.com/airbytehq/airbyte/pull/31300) | Update Schemas: Add date-time format to fields | +| 1.4.6 | 2023-10-04 | [31056](https://github.com/airbytehq/airbyte/pull/31056) | Migrate spec properties' `repository` and `branch` type to \ | +| 1.4.5 | 2023-10-02 | [31023](https://github.com/airbytehq/airbyte/pull/31023) | Increase backoff for stream `Contributor Activity` | +| 1.4.4 | 2023-10-02 | [30971](https://github.com/airbytehq/airbyte/pull/30971) | Mark `start_date` as optional. | +| 1.4.3 | 2023-10-02 | [30979](https://github.com/airbytehq/airbyte/pull/30979) | Fetch archived records in `Project Cards` | +| 1.4.2 | 2023-09-30 | [30927](https://github.com/airbytehq/airbyte/pull/30927) | Provide actionable user error messages | +| 1.4.1 | 2023-09-30 | [30839](https://github.com/airbytehq/airbyte/pull/30839) | Update CDK to Latest version | +| 1.4.0 | 2023-09-29 | [30823](https://github.com/airbytehq/airbyte/pull/30823) | Add new stream `issue Timeline Events` | +| 1.3.1 | 2023-09-28 | [30824](https://github.com/airbytehq/airbyte/pull/30824) | Handle empty response in stream `ContributorActivity` | +| 1.3.0 | 2023-09-25 | [30731](https://github.com/airbytehq/airbyte/pull/30731) | Add new stream `ProjectsV2` | +| 1.2.1 | 2023-09-22 | [30693](https://github.com/airbytehq/airbyte/pull/30693) | Handle 404 error in `TeamMemberShips` | +| 1.2.0 | 2023-09-22 | [30647](https://github.com/airbytehq/airbyte/pull/30647) | Add support for self-hosted GitHub instances | +| 1.1.1 | 2023-09-21 | [30654](https://github.com/airbytehq/airbyte/pull/30654) | Rewrite source connection error messages | +| 1.1.0 | 2023-08-03 | [30615](https://github.com/airbytehq/airbyte/pull/30615) | Add new stream `Contributor Activity` | +| 1.0.4 | 2023-08-03 | [29031](https://github.com/airbytehq/airbyte/pull/29031) | Reverted `advancedAuth` spec changes | +| 1.0.3 | 2023-08-01 | [28910](https://github.com/airbytehq/airbyte/pull/28910) | Updated `advancedAuth` broken references | +| 1.0.2 | 2023-07-11 | [28144](https://github.com/airbytehq/airbyte/pull/28144) | Add `archived_at` property to `Organizations` schema parameter | +| 1.0.1 | 2023-05-22 | [25838](https://github.com/airbytehq/airbyte/pull/25838) | Deprecate "page size" input parameter | +| 1.0.0 | 2023-05-19 | [25778](https://github.com/airbytehq/airbyte/pull/25778) | Improve repo(s) name validation on UI | +| 0.5.0 | 2023-05-16 | [25793](https://github.com/airbytehq/airbyte/pull/25793) | Implement client-side throttling of requests | +| 0.4.11 | 2023-05-12 | [26025](https://github.com/airbytehq/airbyte/pull/26025) | Added more transparent depiction of the personal access token expired | +| 0.4.10 | 2023-05-15 | [26075](https://github.com/airbytehq/airbyte/pull/26075) | Add more specific error message description for no repos case. | +| 0.4.9 | 2023-05-01 | [24523](https://github.com/airbytehq/airbyte/pull/24523) | Add undeclared columns to spec | | 0.4.8 | 2023-04-19 | [00000](https://github.com/airbytehq/airbyte/pull/25312) | Fix repo name validation | | 0.4.7 | 2023-03-24 | [24457](https://github.com/airbytehq/airbyte/pull/24457) | Add validation and transformation for repositories config | | 0.4.6 | 2023-03-24 | [24398](https://github.com/airbytehq/airbyte/pull/24398) | Fix caching for `get_starting_point` in stream "Commits" | From b1b11870dbcf31df4976a6b896eb72b411d5215e Mon Sep 17 00:00:00 2001 From: Augustin Date: Wed, 4 Sep 2024 10:03:48 +0200 Subject: [PATCH 19/51] metadata-service[orchestrator]: fix broken test (#45120) --- .../metadata_service/orchestrator/tests/test_registry.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py b/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py index 98902a0e6d22..d47510947188 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/tests/test_registry.py @@ -460,6 +460,7 @@ def test_set_language_from_tags(): mock_metadata_entry = mock.Mock() mock_metadata_entry.metadata_definition.dict.return_value = metadata + mock_metadata_entry.dependency_file_url = "test-dependency-file-url" result = metadata_to_registry_entry(mock_metadata_entry, "oss") assert result["language"] == "manifest-only" @@ -481,6 +482,6 @@ def test_language_from_tags_does_not_override_top_level_language(): mock_metadata_entry = mock.Mock() mock_metadata_entry.metadata_definition.dict.return_value = metadata - + mock_metadata_entry.dependency_file_url = "test-dependency-file-url" result = metadata_to_registry_entry(mock_metadata_entry, "oss") assert result["language"] == "python" From a3a2b47c2aa063e26ffa23c12d12c7331787d9d7 Mon Sep 17 00:00:00 2001 From: Brian Li Date: Wed, 4 Sep 2024 08:44:41 -0400 Subject: [PATCH 20/51] Update `abctl version` command in oss-quickstart (#44927) --- docs/using-airbyte/getting-started/oss-quickstart.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/using-airbyte/getting-started/oss-quickstart.md b/docs/using-airbyte/getting-started/oss-quickstart.md index 545881fdd136..9df855f7334d 100644 --- a/docs/using-airbyte/getting-started/oss-quickstart.md +++ b/docs/using-airbyte/getting-started/oss-quickstart.md @@ -109,7 +109,7 @@ sudo mv abctl /usr/local/bin **5: Verify the installation** ```bash -abctl --version +abctl version ``` If this command prints the installed version of the Airbyte Command Line Tool, it confirm that you are now ready to manage a local Airbyte instance using `abctl`. From 75cee334ceda9f2b70e168db883abf4f52ecea21 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Wed, 4 Sep 2024 09:18:11 -0400 Subject: [PATCH 21/51] Source Orbit: archive connector (#44788) --- .../connectors/source-orbit/README.md | 104 -- .../connectors/source-orbit/__init__.py | 3 - .../source-orbit/acceptance-test-config.yml | 26 - .../integration_tests/__init__.py | 3 - .../integration_tests/abnormal_state.json | 5 - .../integration_tests/acceptance.py | 16 - .../integration_tests/configured_catalog.json | 44 - .../integration_tests/invalid_config.json | 5 - .../integration_tests/sample_config.json | 5 - .../integration_tests/sample_state.json | 5 - .../connectors/source-orbit/main.py | 8 - .../connectors/source-orbit/metadata.yaml | 8 +- .../connectors/source-orbit/poetry.lock | 1046 ----------------- .../connectors/source-orbit/pyproject.toml | 28 - .../source-orbit/source_orbit/__init__.py | 8 - .../source-orbit/source_orbit/manifest.yaml | 473 -------- .../source-orbit/source_orbit/run.py | 14 - .../source-orbit/source_orbit/source.py | 18 - 18 files changed, 4 insertions(+), 1815 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-orbit/README.md delete mode 100644 airbyte-integrations/connectors/source-orbit/__init__.py delete mode 100644 airbyte-integrations/connectors/source-orbit/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-orbit/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-orbit/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-orbit/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-orbit/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-orbit/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-orbit/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-orbit/integration_tests/sample_state.json delete mode 100644 airbyte-integrations/connectors/source-orbit/main.py delete mode 100644 airbyte-integrations/connectors/source-orbit/poetry.lock delete mode 100644 airbyte-integrations/connectors/source-orbit/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-orbit/source_orbit/__init__.py delete mode 100644 airbyte-integrations/connectors/source-orbit/source_orbit/manifest.yaml delete mode 100644 airbyte-integrations/connectors/source-orbit/source_orbit/run.py delete mode 100644 airbyte-integrations/connectors/source-orbit/source_orbit/source.py diff --git a/airbyte-integrations/connectors/source-orbit/README.md b/airbyte-integrations/connectors/source-orbit/README.md deleted file mode 100644 index bd7cdd17bb3e..000000000000 --- a/airbyte-integrations/connectors/source-orbit/README.md +++ /dev/null @@ -1,104 +0,0 @@ -# Orbit source connector - -This is the repository for the Orbit source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/orbit). - -## Local development - -### Prerequisites - -- Python (~=3.9) -- Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) - -### Installing the connector - -From this connector directory, run: - -```bash -poetry install --with dev -``` - -### Create credentials - -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/orbit) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_orbit/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `sample_files/sample_config.json` for a sample config file. - -### Locally running the connector - -``` -poetry run source-orbit spec -poetry run source-orbit check --config secrets/config.json -poetry run source-orbit discover --config secrets/config.json -poetry run source-orbit read --config secrets/config.json --catalog sample_files/configured_catalog.json -``` - -### Running unit tests - -To run unit tests locally, from the connector directory run: - -``` -poetry run pytest unit_tests -``` - -### Building the docker image - -1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) -2. Run the following command to build the docker image: - -```bash -airbyte-ci connectors --name=source-orbit build -``` - -An image will be available on your host with the tag `airbyte/source-orbit:dev`. - -### Running as a docker container - -Then run any of the connector commands as follows: - -``` -docker run --rm airbyte/source-orbit:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-orbit:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-orbit:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-orbit:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -### Running our CI test suite - -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): - -```bash -airbyte-ci connectors --name=source-orbit test -``` - -### Customizing acceptance Tests - -Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -### Dependency Management - -All of your dependencies should be managed via Poetry. -To add a new dependency, run: - -```bash -poetry add -``` - -Please commit the changes to `pyproject.toml` and `poetry.lock` files. - -## Publishing a new version of the connector - -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? - -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-orbit test` -2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): - - bump the `dockerImageTag` value in in `metadata.yaml` - - bump the `version` value in `pyproject.toml` -3. Make sure the `metadata.yaml` content is up to date. -4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/orbit.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. -8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. diff --git a/airbyte-integrations/connectors/source-orbit/__init__.py b/airbyte-integrations/connectors/source-orbit/__init__.py deleted file mode 100644 index c941b3045795..000000000000 --- a/airbyte-integrations/connectors/source-orbit/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-orbit/acceptance-test-config.yml b/airbyte-integrations/connectors/source-orbit/acceptance-test-config.yml deleted file mode 100644 index feeebbfa13ee..000000000000 --- a/airbyte-integrations/connectors/source-orbit/acceptance-test-config.yml +++ /dev/null @@ -1,26 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-orbit:dev -test_strictness_level: low -acceptance_tests: - spec: - tests: - - spec_path: "source_orbit/spec.yaml" - connection: - tests: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - tests: - - config_path: "secrets/config.json" - basic_read: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] - full_refresh: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/__init__.py b/airbyte-integrations/connectors/source-orbit/integration_tests/__init__.py deleted file mode 100644 index c941b3045795..000000000000 --- a/airbyte-integrations/connectors/source-orbit/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-orbit/integration_tests/abnormal_state.json deleted file mode 100644 index 52b0f2c2118f..000000000000 --- a/airbyte-integrations/connectors/source-orbit/integration_tests/abnormal_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "todo-stream-name": { - "todo-field-name": "todo-abnormal-value" - } -} diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-orbit/integration_tests/acceptance.py deleted file mode 100644 index 9e6409236281..000000000000 --- a/airbyte-integrations/connectors/source-orbit/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies if needed. otherwise remove the TODO comments - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-orbit/integration_tests/configured_catalog.json deleted file mode 100644 index a0d0091af9f8..000000000000 --- a/airbyte-integrations/connectors/source-orbit/integration_tests/configured_catalog.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "members", - "json_schema": { - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "properties": { - "api_token": { - "type": "string" - }, - "workspace": { - "type": "string" - } - } - }, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "workspace", - "json_schema": { - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "properties": { - "api_token": { - "type": "string" - }, - "workspace": { - "type": "string" - } - } - }, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-orbit/integration_tests/invalid_config.json deleted file mode 100644 index f9ccb6888e3f..000000000000 --- a/airbyte-integrations/connectors/source-orbit/integration_tests/invalid_config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "api_token": "invalid_api_key", - "workspace": "airbyte", - "start_date": "2022-06-26" -} diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-orbit/integration_tests/sample_config.json deleted file mode 100644 index c9a6730146d3..000000000000 --- a/airbyte-integrations/connectors/source-orbit/integration_tests/sample_config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "api_token": "api_key", - "workspace": "airbyte", - "start_date": "2022-06-26" -} diff --git a/airbyte-integrations/connectors/source-orbit/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-orbit/integration_tests/sample_state.json deleted file mode 100644 index 3587e579822d..000000000000 --- a/airbyte-integrations/connectors/source-orbit/integration_tests/sample_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "todo-stream-name": { - "todo-field-name": "value" - } -} diff --git a/airbyte-integrations/connectors/source-orbit/main.py b/airbyte-integrations/connectors/source-orbit/main.py deleted file mode 100644 index 16b6891a52b6..000000000000 --- a/airbyte-integrations/connectors/source-orbit/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_orbit.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-orbit/metadata.yaml b/airbyte-integrations/connectors/source-orbit/metadata.yaml index 96fa58e65e37..ea9a409d698e 100644 --- a/airbyte-integrations/connectors/source-orbit/metadata.yaml +++ b/airbyte-integrations/connectors/source-orbit/metadata.yaml @@ -19,16 +19,16 @@ data: name: Orbit registryOverrides: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseDate: "2022-06-27" releaseStage: alpha remoteRegistries: pypi: - enabled: true + enabled: false packageName: airbyte-source-orbit - supportLevel: community + supportLevel: archived tags: - language:python - cdk:low-code diff --git a/airbyte-integrations/connectors/source-orbit/poetry.lock b/airbyte-integrations/connectors/source-orbit/poetry.lock deleted file mode 100644 index 66297feaa9ca..000000000000 --- a/airbyte-integrations/connectors/source-orbit/poetry.lock +++ /dev/null @@ -1,1046 +0,0 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "airbyte-cdk" -version = "0.80.0" -description = "A framework for writing Airbyte Connectors." -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "airbyte_cdk-0.80.0-py3-none-any.whl", hash = "sha256:060e92323a73674fa4e9e2e4a1eb312b9b9d072c9bbe5fa28f54ef21cb4974f3"}, - {file = "airbyte_cdk-0.80.0.tar.gz", hash = "sha256:1383512a83917fecca5b24cea4c72aa5c561cf96dd464485fbcefda48fe574c5"}, -] - -[package.dependencies] -airbyte-protocol-models = "0.5.1" -backoff = "*" -cachetools = "*" -Deprecated = ">=1.2,<1.3" -dpath = ">=2.0.1,<2.1.0" -genson = "1.2.2" -isodate = ">=0.6.1,<0.7.0" -Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<0.3" -jsonschema = ">=3.2.0,<3.3.0" -pendulum = "<3.0.0" -pydantic = ">=1.10.8,<2.0.0" -pyrate-limiter = ">=3.1.0,<3.2.0" -python-dateutil = "*" -PyYAML = ">=6.0.1,<7.0.0" -requests = "*" -requests_cache = "*" -wcmatch = "8.4" - -[package.extras] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] - -[[package]] -name = "airbyte-protocol-models" -version = "0.5.1" -description = "Declares the Airbyte Protocol." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, -] - -[package.dependencies] -pydantic = ">=1.9.2,<2.0.0" - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "bracex" -version = "2.5" -description = "Bash style brace expander." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bracex-2.5-py3-none-any.whl", hash = "sha256:d2fcf4b606a82ac325471affe1706dd9bbaa3536c91ef86a31f6b766f3dad1d0"}, - {file = "bracex-2.5.tar.gz", hash = "sha256:0725da5045e8d37ea9592ab3614d8b561e22c3c5fde3964699be672e072ab611"}, -] - -[[package]] -name = "cachetools" -version = "5.5.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, -] - -[[package]] -name = "cattrs" -version = "24.1.0" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cattrs-24.1.0-py3-none-any.whl", hash = "sha256:043bb8af72596432a7df63abcff0055ac0f198a4d2e95af8db5a936a7074a761"}, - {file = "cattrs-24.1.0.tar.gz", hash = "sha256:8274f18b253bf7674a43da851e3096370d67088165d23138b04a1c04c8eaf48e"}, -] - -[package.dependencies] -attrs = ">=23.1.0" -exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -msgspec = ["msgspec (>=0.18.5)"] -orjson = ["orjson (>=3.9.2)"] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "dpath" -version = "2.0.8" -description = "Filesystem-like pathing and searching for dictionaries" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, - {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "genson" -version = "1.2.2" -description = "GenSON is a powerful, user-friendly JSON Schema generator." -optional = false -python-versions = "*" -files = [ - {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, -] - -[[package]] -name = "idna" -version = "3.8" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonref" -version = "0.2" -description = "An implementation of JSON Reference for Python" -optional = false -python-versions = "*" -files = [ - {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, - {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, -] - -[[package]] -name = "jsonschema" -version = "3.2.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = "*" -files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" - -[package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pydantic" -version = "1.10.18" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, - {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, - {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, - {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, - {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, - {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, - {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, - {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, - {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, - {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, - {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pyrate-limiter" -version = "3.1.1" -description = "Python Rate-Limiter using Leaky-Bucket Algorithm" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, - {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, -] - -[package.extras] -all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] -docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] - -[[package]] -name = "pyrsistent" -version = "0.20.0" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, - {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, - {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, - {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, - {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, - {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, - {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, -] - -[[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-cache" -version = "1.2.1" -description = "A persistent cache for python requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests_cache-1.2.1-py3-none-any.whl", hash = "sha256:1285151cddf5331067baa82598afe2d47c7495a1334bfe7a7d329b43e9fd3603"}, - {file = "requests_cache-1.2.1.tar.gz", hash = "sha256:68abc986fdc5b8d0911318fbb5f7c80eebcd4d01bfacc6685ecf8876052511d1"}, -] - -[package.dependencies] -attrs = ">=21.2" -cattrs = ">=22.2" -platformdirs = ">=2.5" -requests = ">=2.22" -url-normalize = ">=1.4" -urllib3 = ">=1.25.5" - -[package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] -bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] -dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] -json = ["ujson (>=5.4)"] -mongodb = ["pymongo (>=3)"] -redis = ["redis (>=3)"] -security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "requests-mock" -version = "1.12.1" -description = "Mock out responses from the requests package" -optional = false -python-versions = ">=3.5" -files = [ - {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, - {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, -] - -[package.dependencies] -requests = ">=2.22,<3" - -[package.extras] -fixture = ["fixtures"] - -[[package]] -name = "setuptools" -version = "74.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "url-normalize" -version = "1.4.3" -description = "URL normalization for Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, - {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wcmatch" -version = "8.4" -description = "Wildcard/glob file name matcher." -optional = false -python-versions = ">=3.7" -files = [ - {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, - {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, -] - -[package.dependencies] -bracex = ">=2.1.1" - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9,<3.12" -content-hash = "990042bd8aff2361370f7cea38b2dffbadb5bd28397a241166061ec2619f6426" diff --git a/airbyte-integrations/connectors/source-orbit/pyproject.toml b/airbyte-integrations/connectors/source-orbit/pyproject.toml deleted file mode 100644 index 043e81ab9926..000000000000 --- a/airbyte-integrations/connectors/source-orbit/pyproject.toml +++ /dev/null @@ -1,28 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -version = "0.3.18" -name = "source-orbit" -description = "Source implementation for Orbit." -authors = [ "Airbyte ",] -license = "MIT" -readme = "README.md" -documentation = "https://docs.airbyte.com/integrations/sources/orbit" -homepage = "https://airbyte.com" -repository = "https://github.com/airbytehq/airbyte" -[[tool.poetry.packages]] -include = "source_orbit" - -[tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "0.80.0" - -[tool.poetry.scripts] -source-orbit = "source_orbit.run:run" - -[tool.poetry.group.dev.dependencies] -pytest-mock = "^3.6.1" -pytest = "^6.2" -requests-mock = "^1.9.3" diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/__init__.py b/airbyte-integrations/connectors/source-orbit/source_orbit/__init__.py deleted file mode 100644 index 4e4286af3993..000000000000 --- a/airbyte-integrations/connectors/source-orbit/source_orbit/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceOrbit - -__all__ = ["SourceOrbit"] diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/manifest.yaml b/airbyte-integrations/connectors/source-orbit/source_orbit/manifest.yaml deleted file mode 100644 index 12fdfb40bb7b..000000000000 --- a/airbyte-integrations/connectors/source-orbit/source_orbit/manifest.yaml +++ /dev/null @@ -1,473 +0,0 @@ -version: 0.79.1 - -type: DeclarativeSource - -check: - type: CheckStream - stream_names: - - workspace - -definitions: - streams: - workspace: - type: DeclarativeStream - name: workspace - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: workspaces/{{config['workspace']}} - http_method: GET - request_parameters: - start_date: "{{ config.get('start_date') }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - data - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/workspace" - members: - type: DeclarativeStream - name: members - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - $ref: "#/definitions/base_requester" - path: "{{config['workspace']}}/members" - http_method: GET - request_parameters: - start_date: "{{ config.get('start_date') }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - data - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: page - page_size_option: - type: RequestOption - inject_into: request_parameter - field_name: items - pagination_strategy: - type: PageIncrement - start_from_page: 1 - page_size: 100 - schema_loader: - type: InlineSchemaLoader - schema: - $ref: "#/schemas/members" - base_requester: - type: HttpRequester - url_base: https://app.orbit.love/api/v1/ - authenticator: - type: BearerAuthenticator - api_token: "{{ config['api_token'] }}" - -streams: - - $ref: "#/definitions/streams/workspace" - - $ref: "#/definitions/streams/members" - -spec: - type: Spec - connection_specification: - type: object - $schema: http://json-schema.org/draft-07/schema# - required: - - api_token - - workspace - properties: - api_token: - type: string - airbyte_secret: true - title: API Token - description: >- - Authorizes you to work with Orbit workspaces associated with the - token. - order: 0 - workspace: - type: string - title: Workspace - description: >- - The unique name of the workspace that your API token is associated - with. - order: 1 - start_date: - type: string - title: Start Date - description: >- - Date in the format 2022-06-26. Only load members whose last activities - are after this date. - pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ - order: 2 - additionalProperties: true - -metadata: - autoImportSchema: - workspace: false - members: false - -schemas: - workspace: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - type: - type: - - "null" - - string - description: The type of entity, in this case, it's 'workspace' - attributes: - type: - - "null" - - object - additionalProperties: true - description: Attributes related to the workspace - properties: - activities_count: - type: - - "null" - - integer - description: The total number of activities in the workspace - created_at: - type: - - "null" - - string - description: Date and time when the workspace was created - members_count: - type: - - "null" - - integer - description: The total number of members in the workspace - name: - type: - - "null" - - string - description: The name of the workspace - slug: - type: - - "null" - - string - description: A unique identifier for the workspace - tags: - type: - - "null" - - object - additionalProperties: true - description: Tags associated with the workspace - properties: {} - updated_at: - type: - - "null" - - string - description: Date and time when the workspace was last updated - id: - type: - - "null" - - string - description: The unique identifier for the workspace - relationships: - type: object - description: Relationships of the workspace with other entities - properties: - last_activity: - type: - - "null" - - object - description: Information about the last activity in the workspace - properties: - data: - type: - - "null" - - object - description: Details of the last activity - last_member: - type: - - "null" - - object - description: Information about the last member who joined the workspace - properties: - data: - type: - - "null" - - object - description: Details of the last member - repositories: - type: - - "null" - - object - description: List of repositories associated with the workspace - properties: - data: - type: - - "null" - - array - description: Details of the repositories - members: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - type: - type: - - "null" - - string - description: Type of entity, in this case, the member - attributes: - type: - - "null" - - object - additionalProperties: true - description: Contains the main attributes data of the member - properties: - activities_count: - type: - - "null" - - integer - description: Total number of activities performed by the member - activities_score: - type: - - "null" - - number - description: Score calculated based on the member's activities - avatar_url: - type: - - "null" - - string - description: URL of the member's avatar image - bio: - type: - - "null" - - string - description: Member's biography or description - birthday: - type: - - "null" - - string - description: Date of birth of the member - company: - type: - - "null" - - string - description: Name of the company the member is associated with - created: - type: - - "null" - - boolean - description: Creation timestamp of the member - created_at: - type: - - "null" - - string - description: Timestamp when the member was created - deleted_at: - type: - - "null" - - string - description: Timestamp when the member was deleted, if applicable - devto: - type: - - "null" - - string - description: DEV.TO profile link of the member - discord: - type: - - "null" - - string - description: Discord profile link of the member - discourse: - type: - - "null" - - string - description: Discourse profile link of the member - email: - type: - - "null" - - string - description: Email address of the member - first_activity_occurred_at: - type: - - "null" - - string - description: Timestamp of the first activity performed by the member - github: - type: - - "null" - - string - description: GitHub profile link of the member - github_followers: - type: - - "null" - - integer - description: Number of followers on GitHub - id: - type: - - "null" - - string - description: Unique identifier for the member - languages: - type: - - "null" - - array - description: List of programming languages known by the member - items: - type: - - "null" - - string - last_activity_occurred_at: - type: - - "null" - - string - description: Timestamp of the last activity performed by the member - linkedin: - type: - - "null" - - string - description: LinkedIn profile link of the member - location: - type: - - "null" - - string - description: Current location of the member - love: - type: - - "null" - - string - description: >- - Measure of appreciation or positive feedback received by the - member - merged_at: - type: - - "null" - - string - description: Timestamp when the member's data was merged - name: - type: - - "null" - - string - description: Full name of the member - orbit_level: - type: - - "null" - - integer - description: Level of engagement with Orbit platform - orbit_url: - type: - - "null" - - string - description: URL to access the member's Orbit profile - pronouns: - type: - - "null" - - string - description: Preferred pronouns of the member - reach: - type: - - "null" - - integer - description: Extent of influence or reach the member has - shipping_address: - type: - - "null" - - string - description: Shipping address details of the member - slug: - type: - - "null" - - string - description: Unique string used in URLs to identify the member - source: - type: - - "null" - - string - description: Source or platform from which the member originated - tag_list: - type: - - "null" - - array - description: List of tags associated with the member - items: - type: - - "null" - - string - tags: - type: - - "null" - - array - description: Additional tags associated with the member for categorization - items: - type: - - "null" - - string - teammate: - type: boolean - description: Indicates if the member is a teammate within an organization - title: - type: - - "null" - - string - description: Job title or role of the member - topics: - type: - - "null" - - array - description: List of topics of interest to the member - items: - type: - - "null" - - string - tshirt: - type: - - "null" - - string - description: T-shirt size preference of the member - twitter: - type: - - "null" - - string - description: Twitter profile link of the member - twitter_followers: - type: - - "null" - - integer - description: Number of followers on Twitter - updated_at: - type: - - "null" - - string - description: Timestamp of the last update to the member profile - url: - type: - - "null" - - string - description: URL of the member profile - fake: - type: - - "null" - - string - description: Indicates if the member data is fake or real - id: - type: - - "null" - - string - description: Unique identifier for the member - relationships: - type: - - "null" - - object - additionalProperties: true - description: Contains the relationship data of the member - properties: {} diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/run.py b/airbyte-integrations/connectors/source-orbit/source_orbit/run.py deleted file mode 100644 index 593cfb9a326a..000000000000 --- a/airbyte-integrations/connectors/source-orbit/source_orbit/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_orbit import SourceOrbit - - -def run(): - source = SourceOrbit() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-orbit/source_orbit/source.py b/airbyte-integrations/connectors/source-orbit/source_orbit/source.py deleted file mode 100644 index 1fcba693e608..000000000000 --- a/airbyte-integrations/connectors/source-orbit/source_orbit/source.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource - -""" -This file provides the necessary constructs to interpret a provided declarative YAML configuration file into -source connector. - -WARNING: Do not modify this file. -""" - - -# Declarative Source -class SourceOrbit(YamlDeclarativeSource): - def __init__(self): - super().__init__(**{"path_to_yaml": "manifest.yaml"}) From ee698d623677633b0e31b21db688d968dd96c73c Mon Sep 17 00:00:00 2001 From: Augustin Date: Wed, 4 Sep 2024 17:03:45 +0200 Subject: [PATCH 22/51] metadata-service[orchestrator]: generate connector registry with release candidates (#44588) --- .../generated/ConnectorBreakingChanges.py | 49 +++ .../ConnectorMetadataDefinitionV0.py | 4 +- .../ConnectorRegistryDestinationDefinition.py | 104 +++++- .../generated/ConnectorRegistryReleases.py | 309 ++++++++++++++++++ .../ConnectorRegistrySourceDefinition.py | 109 +++++- .../models/generated/ConnectorRegistryV0.py | 125 ++++--- .../models/generated/ConnectorReleases.py | 4 +- .../models/generated/__init__.py | 2 + .../models/src/ConnectorBreakingChanges.yaml | 59 ++++ ...onnectorRegistryDestinationDefinition.yaml | 2 +- .../models/src/ConnectorRegistryReleases.yaml | 35 ++ .../ConnectorRegistrySourceDefinition.yaml | 2 +- .../models/src/ConnectorReleases.yaml | 58 +--- .../metadata_service/lib/pyproject.toml | 2 +- .../orchestrator/orchestrator/__init__.py | 24 +- .../orchestrator/assets/connector_metrics.py | 2 +- .../orchestrator/assets/github.py | 45 ++- .../orchestrator/assets/metadata.py | 21 +- .../orchestrator/assets/registry.py | 119 +++++-- .../orchestrator/assets/registry_entry.py | 130 ++++++-- .../orchestrator/jobs/registry.py | 3 +- .../orchestrator/models/metadata.py | 8 + .../orchestrator/orchestrator/sensors/gcs.py | 9 +- .../metadata_service/orchestrator/poetry.lock | 14 +- .../orchestrator/pyproject.toml | 2 +- 25 files changed, 1013 insertions(+), 228 deletions(-) create mode 100644 airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorBreakingChanges.py create mode 100644 airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py create mode 100644 airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorBreakingChanges.yaml create mode 100644 airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorBreakingChanges.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorBreakingChanges.py new file mode 100644 index 000000000000..adaa55b5f4f4 --- /dev/null +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorBreakingChanges.py @@ -0,0 +1,49 @@ +# generated by datamodel-codegen: +# filename: ConnectorBreakingChanges.yaml + +from __future__ import annotations + +from datetime import date +from typing import Any, Dict, List, Optional + +from pydantic import AnyUrl, BaseModel, Extra, Field, constr + + +class StreamBreakingChangeScope(BaseModel): + class Config: + extra = Extra.forbid + + scopeType: Any = Field("stream", const=True) + impactedScopes: List[str] = Field(..., description="List of streams that are impacted by the breaking change.", min_items=1) + + +class BreakingChangeScope(BaseModel): + __root__: StreamBreakingChangeScope = Field(..., description="A scope that can be used to limit the impact of a breaking change.") + + +class VersionBreakingChange(BaseModel): + class Config: + extra = Extra.forbid + + upgradeDeadline: date = Field(..., description="The deadline by which to upgrade before the breaking change takes effect.") + message: str = Field(..., description="Descriptive message detailing the breaking change.") + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version}", + ) + scopedImpact: Optional[List[BreakingChangeScope]] = Field( + None, + description="List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types.", + min_items=1, + ) + + +class ConnectorBreakingChanges(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", + ) diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorMetadataDefinitionV0.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorMetadataDefinitionV0.py index 05c366aed62e..2e0af418fe6c 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorMetadataDefinitionV0.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorMetadataDefinitionV0.py @@ -267,7 +267,9 @@ class Config: extra = Extra.forbid __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( - ..., description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade." + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", ) diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py index c10a1e246d07..92e64e1c11c4 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py @@ -86,6 +86,16 @@ class Config: impactedScopes: List[str] = Field(..., description="List of streams that are impacted by the breaking change.", min_items=1) +class SuggestedStreams(BaseModel): + class Config: + extra = Extra.allow + + streams: Optional[List[str]] = Field( + None, + description="An array of streams that this connector suggests the average user will want. SuggestedStreams not being present for the source means that all streams are suggested. An empty list here means that no streams are suggested.", + ) + + class AirbyteInternal(BaseModel): class Config: extra = Extra.allow @@ -182,20 +192,9 @@ class Config: extra = Extra.forbid __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( - ..., description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade." - ) - - -class ConnectorReleases(BaseModel): - class Config: - extra = Extra.forbid - - isReleaseCandidate: Optional[bool] = Field(False, description="Whether the release is eligible to be a release candidate.") - rolloutConfiguration: Optional[RolloutConfiguration] = None - breakingChanges: ConnectorBreakingChanges - migrationDocumentationUrl: Optional[AnyUrl] = Field( - None, - description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", ) @@ -230,11 +229,82 @@ class Config: description="an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used.", ) allowedHosts: Optional[AllowedHosts] = None - releases: Optional[ConnectorReleases] = None + releases: Optional[ConnectorRegistryReleases] = None ab_internal: Optional[AirbyteInternal] = None supportsRefreshes: Optional[bool] = False generated: Optional[GeneratedFields] = None packageInfo: Optional[ConnectorPackageInfo] = None - language: Optional[str] = Field( - None, description="The language the connector is written in" + language: Optional[str] = Field(None, description="The language the connector is written in") + + +class ConnectorRegistryReleases(BaseModel): + class Config: + extra = Extra.forbid + + releaseCandidates: Optional[ConnectorReleaseCandidates] = None + rolloutConfiguration: Optional[RolloutConfiguration] = None + breakingChanges: Optional[ConnectorBreakingChanges] = None + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", + ) + + +class ConnectorReleaseCandidates(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionReleaseCandidate] = Field( + ..., description="Each entry denotes a release candidate version of a connector." + ) + + +class VersionReleaseCandidate(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Union[ConnectorRegistrySourceDefinition, ConnectorRegistryDestinationDefinition] = Field( + ..., description="Contains information about a release candidate version of a connector." + ) + + +class ConnectorRegistrySourceDefinition(BaseModel): + class Config: + extra = Extra.allow + + sourceDefinitionId: UUID + name: str + dockerRepository: str + dockerImageTag: str + documentationUrl: str + icon: Optional[str] = None + iconUrl: Optional[str] = None + sourceType: Optional[Literal["api", "file", "database", "custom"]] = None + spec: Dict[str, Any] + tombstone: Optional[bool] = Field( + False, description="if false, the configuration is active. if true, then this configuration is permanently off." ) + public: Optional[bool] = Field(False, description="true if this connector definition is available to all workspaces") + custom: Optional[bool] = Field(False, description="whether this is a custom connector definition") + releaseStage: Optional[ReleaseStage] = None + supportLevel: Optional[SupportLevel] = None + releaseDate: Optional[date] = Field(None, description="The date when this connector was first released, in yyyy-mm-dd format.") + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + protocolVersion: Optional[str] = Field(None, description="the Airbyte Protocol version supported by the connector") + allowedHosts: Optional[AllowedHosts] = None + suggestedStreams: Optional[SuggestedStreams] = None + maxSecondsBetweenMessages: Optional[int] = Field( + None, description="Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach" + ) + erdUrl: Optional[str] = Field(None, description="The URL where you can visualize the ERD") + releases: Optional[ConnectorRegistryReleases] = None + ab_internal: Optional[AirbyteInternal] = None + generated: Optional[GeneratedFields] = None + packageInfo: Optional[ConnectorPackageInfo] = None + language: Optional[str] = Field(None, description="The language the connector is written in") + + +ConnectorRegistryDestinationDefinition.update_forward_refs() +ConnectorRegistryReleases.update_forward_refs() +ConnectorReleaseCandidates.update_forward_refs() +VersionReleaseCandidate.update_forward_refs() diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py new file mode 100644 index 000000000000..80c930e04109 --- /dev/null +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py @@ -0,0 +1,309 @@ +# generated by datamodel-codegen: +# filename: ConnectorRegistryReleases.yaml + +from __future__ import annotations + +from datetime import date, datetime +from typing import Any, Dict, List, Optional, Union +from uuid import UUID + +from pydantic import AnyUrl, BaseModel, Extra, Field, conint, constr +from typing_extensions import Literal + + +class RolloutConfiguration(BaseModel): + class Config: + extra = Extra.forbid + + initialPercentage: Optional[conint(ge=0, le=100)] = Field( + 0, description="The percentage of users that should receive the new version initially." + ) + maxPercentage: Optional[conint(ge=0, le=100)] = Field( + 50, description="The percentage of users who should receive the release candidate during the test phase before full rollout." + ) + advanceDelayMinutes: Optional[conint(ge=10)] = Field( + 10, description="The number of minutes to wait before advancing the rollout percentage." + ) + + +class StreamBreakingChangeScope(BaseModel): + class Config: + extra = Extra.forbid + + scopeType: Any = Field("stream", const=True) + impactedScopes: List[str] = Field(..., description="List of streams that are impacted by the breaking change.", min_items=1) + + +class ReleaseStage(BaseModel): + __root__: Literal["alpha", "beta", "generally_available", "custom"] = Field( + ..., description="enum that describes a connector's release stage", title="ReleaseStage" + ) + + +class SupportLevel(BaseModel): + __root__: Literal["community", "certified", "archived"] = Field( + ..., description="enum that describes a connector's release stage", title="SupportLevel" + ) + + +class ResourceRequirements(BaseModel): + class Config: + extra = Extra.forbid + + cpu_request: Optional[str] = None + cpu_limit: Optional[str] = None + memory_request: Optional[str] = None + memory_limit: Optional[str] = None + + +class JobType(BaseModel): + __root__: Literal["get_spec", "check_connection", "discover_schema", "sync", "reset_connection", "connection_updater", "replicate"] = ( + Field(..., description="enum that describes the different types of jobs that the platform runs.", title="JobType") + ) + + +class AllowedHosts(BaseModel): + class Config: + extra = Extra.allow + + hosts: Optional[List[str]] = Field( + None, + description="An array of hosts that this connector can connect to. AllowedHosts not being present for the source or destination means that access to all hosts is allowed. An empty list here means that no network access is granted.", + ) + + +class SuggestedStreams(BaseModel): + class Config: + extra = Extra.allow + + streams: Optional[List[str]] = Field( + None, + description="An array of streams that this connector suggests the average user will want. SuggestedStreams not being present for the source means that all streams are suggested. An empty list here means that no streams are suggested.", + ) + + +class AirbyteInternal(BaseModel): + class Config: + extra = Extra.allow + + sl: Optional[Literal[100, 200, 300]] = None + ql: Optional[Literal[100, 200, 300, 400, 500, 600]] = None + + +class GitInfo(BaseModel): + class Config: + extra = Extra.forbid + + commit_sha: Optional[str] = Field(None, description="The git commit sha of the last commit that modified this file.") + commit_timestamp: Optional[datetime] = Field(None, description="The git commit timestamp of the last commit that modified this file.") + commit_author: Optional[str] = Field(None, description="The git commit author of the last commit that modified this file.") + commit_author_email: Optional[str] = Field(None, description="The git commit author email of the last commit that modified this file.") + + +class SourceFileInfo(BaseModel): + metadata_etag: Optional[str] = None + metadata_file_path: Optional[str] = None + metadata_bucket_name: Optional[str] = None + metadata_last_modified: Optional[str] = None + registry_entry_generated_at: Optional[str] = None + + +class ConnectorMetrics(BaseModel): + all: Optional[Any] = None + cloud: Optional[Any] = None + oss: Optional[Any] = None + + +class ConnectorMetric(BaseModel): + class Config: + extra = Extra.allow + + usage: Optional[Union[str, Literal["low", "medium", "high"]]] = None + sync_success_rate: Optional[Union[str, Literal["low", "medium", "high"]]] = None + connector_version: Optional[str] = None + + +class ConnectorPackageInfo(BaseModel): + cdk_version: Optional[str] = None + + +class NormalizationDestinationDefinitionConfig(BaseModel): + class Config: + extra = Extra.allow + + normalizationRepository: str = Field( + ..., + description="a field indicating the name of the repository to be used for normalization. If the value of the flag is NULL - normalization is not used.", + ) + normalizationTag: str = Field(..., description="a field indicating the tag of the docker repository to be used for normalization.") + normalizationIntegrationType: str = Field( + ..., description="a field indicating the type of integration dialect to use for normalization." + ) + + +class BreakingChangeScope(BaseModel): + __root__: StreamBreakingChangeScope = Field(..., description="A scope that can be used to limit the impact of a breaking change.") + + +class JobTypeResourceLimit(BaseModel): + class Config: + extra = Extra.forbid + + jobType: JobType + resourceRequirements: ResourceRequirements + + +class GeneratedFields(BaseModel): + git: Optional[GitInfo] = None + source_file_info: Optional[SourceFileInfo] = None + metrics: Optional[ConnectorMetrics] = None + sbomUrl: Optional[str] = Field(None, description="URL to the SBOM file") + + +class VersionBreakingChange(BaseModel): + class Config: + extra = Extra.forbid + + upgradeDeadline: date = Field(..., description="The deadline by which to upgrade before the breaking change takes effect.") + message: str = Field(..., description="Descriptive message detailing the breaking change.") + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version}", + ) + scopedImpact: Optional[List[BreakingChangeScope]] = Field( + None, + description="List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types.", + min_items=1, + ) + + +class ActorDefinitionResourceRequirements(BaseModel): + class Config: + extra = Extra.forbid + + default: Optional[ResourceRequirements] = Field( + None, description="if set, these are the requirements that should be set for ALL jobs run for this actor definition." + ) + jobSpecific: Optional[List[JobTypeResourceLimit]] = None + + +class ConnectorBreakingChanges(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", + ) + + +class ConnectorRegistryReleases(BaseModel): + class Config: + extra = Extra.forbid + + releaseCandidates: Optional[ConnectorReleaseCandidates] = None + rolloutConfiguration: Optional[RolloutConfiguration] = None + breakingChanges: Optional[ConnectorBreakingChanges] = None + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", + ) + + +class ConnectorReleaseCandidates(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionReleaseCandidate] = Field( + ..., description="Each entry denotes a release candidate version of a connector." + ) + + +class VersionReleaseCandidate(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Union[ConnectorRegistrySourceDefinition, ConnectorRegistryDestinationDefinition] = Field( + ..., description="Contains information about a release candidate version of a connector." + ) + + +class ConnectorRegistrySourceDefinition(BaseModel): + class Config: + extra = Extra.allow + + sourceDefinitionId: UUID + name: str + dockerRepository: str + dockerImageTag: str + documentationUrl: str + icon: Optional[str] = None + iconUrl: Optional[str] = None + sourceType: Optional[Literal["api", "file", "database", "custom"]] = None + spec: Dict[str, Any] + tombstone: Optional[bool] = Field( + False, description="if false, the configuration is active. if true, then this configuration is permanently off." + ) + public: Optional[bool] = Field(False, description="true if this connector definition is available to all workspaces") + custom: Optional[bool] = Field(False, description="whether this is a custom connector definition") + releaseStage: Optional[ReleaseStage] = None + supportLevel: Optional[SupportLevel] = None + releaseDate: Optional[date] = Field(None, description="The date when this connector was first released, in yyyy-mm-dd format.") + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + protocolVersion: Optional[str] = Field(None, description="the Airbyte Protocol version supported by the connector") + allowedHosts: Optional[AllowedHosts] = None + suggestedStreams: Optional[SuggestedStreams] = None + maxSecondsBetweenMessages: Optional[int] = Field( + None, description="Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach" + ) + erdUrl: Optional[str] = Field(None, description="The URL where you can visualize the ERD") + releases: Optional[ConnectorRegistryReleases] = None + ab_internal: Optional[AirbyteInternal] = None + generated: Optional[GeneratedFields] = None + packageInfo: Optional[ConnectorPackageInfo] = None + language: Optional[str] = Field(None, description="The language the connector is written in") + + +class ConnectorRegistryDestinationDefinition(BaseModel): + class Config: + extra = Extra.allow + + destinationDefinitionId: UUID + name: str + dockerRepository: str + dockerImageTag: str + documentationUrl: str + icon: Optional[str] = None + iconUrl: Optional[str] = None + spec: Dict[str, Any] + tombstone: Optional[bool] = Field( + False, description="if false, the configuration is active. if true, then this configuration is permanently off." + ) + public: Optional[bool] = Field(False, description="true if this connector definition is available to all workspaces") + custom: Optional[bool] = Field(False, description="whether this is a custom connector definition") + releaseStage: Optional[ReleaseStage] = None + supportLevel: Optional[SupportLevel] = None + releaseDate: Optional[date] = Field(None, description="The date when this connector was first released, in yyyy-mm-dd format.") + tags: Optional[List[str]] = Field( + None, description="An array of tags that describe the connector. E.g: language:python, keyword:rds, etc." + ) + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + protocolVersion: Optional[str] = Field(None, description="the Airbyte Protocol version supported by the connector") + normalizationConfig: Optional[NormalizationDestinationDefinitionConfig] = None + supportsDbt: Optional[bool] = Field( + None, + description="an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used.", + ) + allowedHosts: Optional[AllowedHosts] = None + releases: Optional[ConnectorRegistryReleases] = None + ab_internal: Optional[AirbyteInternal] = None + supportsRefreshes: Optional[bool] = False + generated: Optional[GeneratedFields] = None + packageInfo: Optional[ConnectorPackageInfo] = None + language: Optional[str] = Field(None, description="The language the connector is written in") + + +ConnectorRegistryReleases.update_forward_refs() +ConnectorReleaseCandidates.update_forward_refs() +VersionReleaseCandidate.update_forward_refs() diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py index 36e8c1f1e856..be46a26c7213 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py @@ -82,6 +82,20 @@ class Config: impactedScopes: List[str] = Field(..., description="List of streams that are impacted by the breaking change.", min_items=1) +class NormalizationDestinationDefinitionConfig(BaseModel): + class Config: + extra = Extra.allow + + normalizationRepository: str = Field( + ..., + description="a field indicating the name of the repository to be used for normalization. If the value of the flag is NULL - normalization is not used.", + ) + normalizationTag: str = Field(..., description="a field indicating the tag of the docker repository to be used for normalization.") + normalizationIntegrationType: str = Field( + ..., description="a field indicating the type of integration dialect to use for normalization." + ) + + class AirbyteInternal(BaseModel): class Config: extra = Extra.allow @@ -178,35 +192,90 @@ class Config: extra = Extra.forbid __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( - ..., description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade." + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", ) -class ConnectorReleases(BaseModel): +class ConnectorRegistrySourceDefinition(BaseModel): + class Config: + extra = Extra.allow + + sourceDefinitionId: UUID + name: str + dockerRepository: str + dockerImageTag: str + documentationUrl: str + icon: Optional[str] = None + iconUrl: Optional[str] = None + sourceType: Optional[Literal["api", "file", "database", "custom"]] = None + spec: Dict[str, Any] + tombstone: Optional[bool] = Field( + False, description="if false, the configuration is active. if true, then this configuration is permanently off." + ) + public: Optional[bool] = Field(False, description="true if this connector definition is available to all workspaces") + custom: Optional[bool] = Field(False, description="whether this is a custom connector definition") + releaseStage: Optional[ReleaseStage] = None + supportLevel: Optional[SupportLevel] = None + releaseDate: Optional[date] = Field(None, description="The date when this connector was first released, in yyyy-mm-dd format.") + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + protocolVersion: Optional[str] = Field(None, description="the Airbyte Protocol version supported by the connector") + allowedHosts: Optional[AllowedHosts] = None + suggestedStreams: Optional[SuggestedStreams] = None + maxSecondsBetweenMessages: Optional[int] = Field( + None, description="Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach" + ) + erdUrl: Optional[str] = Field(None, description="The URL where you can visualize the ERD") + releases: Optional[ConnectorRegistryReleases] = None + ab_internal: Optional[AirbyteInternal] = None + generated: Optional[GeneratedFields] = None + packageInfo: Optional[ConnectorPackageInfo] = None + language: Optional[str] = Field(None, description="The language the connector is written in") + + +class ConnectorRegistryReleases(BaseModel): class Config: extra = Extra.forbid - isReleaseCandidate: Optional[bool] = Field(False, description="Whether the release is eligible to be a release candidate.") + releaseCandidates: Optional[ConnectorReleaseCandidates] = None rolloutConfiguration: Optional[RolloutConfiguration] = None - breakingChanges: ConnectorBreakingChanges + breakingChanges: Optional[ConnectorBreakingChanges] = None migrationDocumentationUrl: Optional[AnyUrl] = Field( None, description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", ) -class ConnectorRegistrySourceDefinition(BaseModel): +class ConnectorReleaseCandidates(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionReleaseCandidate] = Field( + ..., description="Each entry denotes a release candidate version of a connector." + ) + + +class VersionReleaseCandidate(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Union[ConnectorRegistrySourceDefinition, ConnectorRegistryDestinationDefinition] = Field( + ..., description="Contains information about a release candidate version of a connector." + ) + + +class ConnectorRegistryDestinationDefinition(BaseModel): class Config: extra = Extra.allow - sourceDefinitionId: UUID + destinationDefinitionId: UUID name: str dockerRepository: str dockerImageTag: str documentationUrl: str icon: Optional[str] = None iconUrl: Optional[str] = None - sourceType: Optional[Literal["api", "file", "database", "custom"]] = None spec: Dict[str, Any] tombstone: Optional[bool] = Field( False, description="if false, the configuration is active. if true, then this configuration is permanently off." @@ -216,18 +285,26 @@ class Config: releaseStage: Optional[ReleaseStage] = None supportLevel: Optional[SupportLevel] = None releaseDate: Optional[date] = Field(None, description="The date when this connector was first released, in yyyy-mm-dd format.") + tags: Optional[List[str]] = Field( + None, description="An array of tags that describe the connector. E.g: language:python, keyword:rds, etc." + ) resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None protocolVersion: Optional[str] = Field(None, description="the Airbyte Protocol version supported by the connector") - allowedHosts: Optional[AllowedHosts] = None - suggestedStreams: Optional[SuggestedStreams] = None - maxSecondsBetweenMessages: Optional[int] = Field( - None, description="Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach" + normalizationConfig: Optional[NormalizationDestinationDefinitionConfig] = None + supportsDbt: Optional[bool] = Field( + None, + description="an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used.", ) - erdUrl: Optional[str] = Field(None, description="The URL where you can visualize the ERD") - releases: Optional[ConnectorReleases] = None + allowedHosts: Optional[AllowedHosts] = None + releases: Optional[ConnectorRegistryReleases] = None ab_internal: Optional[AirbyteInternal] = None + supportsRefreshes: Optional[bool] = False generated: Optional[GeneratedFields] = None packageInfo: Optional[ConnectorPackageInfo] = None - language: Optional[str] = Field( - None, description="The language the connector is written in" - ) + language: Optional[str] = Field(None, description="The language the connector is written in") + + +ConnectorRegistrySourceDefinition.update_forward_refs() +ConnectorRegistryReleases.update_forward_refs() +ConnectorReleaseCandidates.update_forward_refs() +VersionReleaseCandidate.update_forward_refs() diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py index a14e04684aa6..739f395e33d2 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py @@ -86,6 +86,16 @@ class Config: impactedScopes: List[str] = Field(..., description="List of streams that are impacted by the breaking change.", min_items=1) +class SuggestedStreams(BaseModel): + class Config: + extra = Extra.allow + + streams: Optional[List[str]] = Field( + None, + description="An array of streams that this connector suggests the average user will want. SuggestedStreams not being present for the source means that all streams are suggested. An empty list here means that no streams are suggested.", + ) + + class AirbyteInternal(BaseModel): class Config: extra = Extra.allow @@ -131,16 +141,6 @@ class ConnectorPackageInfo(BaseModel): cdk_version: Optional[str] = None -class SuggestedStreams(BaseModel): - class Config: - extra = Extra.allow - - streams: Optional[List[str]] = Field( - None, - description="An array of streams that this connector suggests the average user will want. SuggestedStreams not being present for the source means that all streams are suggested. An empty list here means that no streams are suggested.", - ) - - class JobTypeResourceLimit(BaseModel): class Config: extra = Extra.forbid @@ -192,35 +192,28 @@ class Config: extra = Extra.forbid __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( - ..., description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade." + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", ) -class ConnectorReleases(BaseModel): - class Config: - extra = Extra.forbid - - isReleaseCandidate: Optional[bool] = Field(False, description="Whether the release is eligible to be a release candidate.") - rolloutConfiguration: Optional[RolloutConfiguration] = None - breakingChanges: ConnectorBreakingChanges - migrationDocumentationUrl: Optional[AnyUrl] = Field( - None, - description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", - ) +class ConnectorRegistryV0(BaseModel): + destinations: List[ConnectorRegistryDestinationDefinition] + sources: List[ConnectorRegistrySourceDefinition] -class ConnectorRegistrySourceDefinition(BaseModel): +class ConnectorRegistryDestinationDefinition(BaseModel): class Config: extra = Extra.allow - sourceDefinitionId: UUID + destinationDefinitionId: UUID name: str dockerRepository: str dockerImageTag: str documentationUrl: str icon: Optional[str] = None iconUrl: Optional[str] = None - sourceType: Optional[Literal["api", "file", "database", "custom"]] = None spec: Dict[str, Any] tombstone: Optional[bool] = Field( False, description="if false, the configuration is active. if true, then this configuration is permanently off." @@ -230,34 +223,68 @@ class Config: releaseStage: Optional[ReleaseStage] = None supportLevel: Optional[SupportLevel] = None releaseDate: Optional[date] = Field(None, description="The date when this connector was first released, in yyyy-mm-dd format.") + tags: Optional[List[str]] = Field( + None, description="An array of tags that describe the connector. E.g: language:python, keyword:rds, etc." + ) resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None protocolVersion: Optional[str] = Field(None, description="the Airbyte Protocol version supported by the connector") - allowedHosts: Optional[AllowedHosts] = None - suggestedStreams: Optional[SuggestedStreams] = None - maxSecondsBetweenMessages: Optional[int] = Field( - None, description="Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach" + normalizationConfig: Optional[NormalizationDestinationDefinitionConfig] = None + supportsDbt: Optional[bool] = Field( + None, + description="an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used.", ) - erdUrl: Optional[str] = Field(None, description="The URL where you can visualize the ERD") - releases: Optional[ConnectorReleases] = None + allowedHosts: Optional[AllowedHosts] = None + releases: Optional[ConnectorRegistryReleases] = None ab_internal: Optional[AirbyteInternal] = None + supportsRefreshes: Optional[bool] = False generated: Optional[GeneratedFields] = None packageInfo: Optional[ConnectorPackageInfo] = None - language: Optional[str] = Field( - None, description="The language the connector is written in" + language: Optional[str] = Field(None, description="The language the connector is written in") + + +class ConnectorRegistryReleases(BaseModel): + class Config: + extra = Extra.forbid + + releaseCandidates: Optional[ConnectorReleaseCandidates] = None + rolloutConfiguration: Optional[RolloutConfiguration] = None + breakingChanges: Optional[ConnectorBreakingChanges] = None + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", ) -class ConnectorRegistryDestinationDefinition(BaseModel): +class ConnectorReleaseCandidates(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionReleaseCandidate] = Field( + ..., description="Each entry denotes a release candidate version of a connector." + ) + + +class VersionReleaseCandidate(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Union[ConnectorRegistrySourceDefinition, ConnectorRegistryDestinationDefinition] = Field( + ..., description="Contains information about a release candidate version of a connector." + ) + + +class ConnectorRegistrySourceDefinition(BaseModel): class Config: extra = Extra.allow - destinationDefinitionId: UUID + sourceDefinitionId: UUID name: str dockerRepository: str dockerImageTag: str documentationUrl: str icon: Optional[str] = None iconUrl: Optional[str] = None + sourceType: Optional[Literal["api", "file", "database", "custom"]] = None spec: Dict[str, Any] tombstone: Optional[bool] = Field( False, description="if false, the configuration is active. if true, then this configuration is permanently off." @@ -267,27 +294,23 @@ class Config: releaseStage: Optional[ReleaseStage] = None supportLevel: Optional[SupportLevel] = None releaseDate: Optional[date] = Field(None, description="The date when this connector was first released, in yyyy-mm-dd format.") - tags: Optional[List[str]] = Field( - None, description="An array of tags that describe the connector. E.g: language:python, keyword:rds, etc." - ) resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None protocolVersion: Optional[str] = Field(None, description="the Airbyte Protocol version supported by the connector") - normalizationConfig: Optional[NormalizationDestinationDefinitionConfig] = None - supportsDbt: Optional[bool] = Field( - None, - description="an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used.", - ) allowedHosts: Optional[AllowedHosts] = None - releases: Optional[ConnectorReleases] = None + suggestedStreams: Optional[SuggestedStreams] = None + maxSecondsBetweenMessages: Optional[int] = Field( + None, description="Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach" + ) + erdUrl: Optional[str] = Field(None, description="The URL where you can visualize the ERD") + releases: Optional[ConnectorRegistryReleases] = None ab_internal: Optional[AirbyteInternal] = None - supportsRefreshes: Optional[bool] = False generated: Optional[GeneratedFields] = None packageInfo: Optional[ConnectorPackageInfo] = None - language: Optional[str] = Field( - None, description="The language the connector is written in" - ) + language: Optional[str] = Field(None, description="The language the connector is written in") -class ConnectorRegistryV0(BaseModel): - destinations: List[ConnectorRegistryDestinationDefinition] - sources: List[ConnectorRegistrySourceDefinition] +ConnectorRegistryV0.update_forward_refs() +ConnectorRegistryDestinationDefinition.update_forward_refs() +ConnectorRegistryReleases.update_forward_refs() +ConnectorReleaseCandidates.update_forward_refs() +VersionReleaseCandidate.update_forward_refs() diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorReleases.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorReleases.py index 4e33fa7f6358..ab3399094a93 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorReleases.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorReleases.py @@ -58,7 +58,9 @@ class Config: extra = Extra.forbid __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( - ..., description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade." + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", ) diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/__init__.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/__init__.py index 00c75f10006c..fe744caace4b 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/__init__.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/__init__.py @@ -2,11 +2,13 @@ from .ActorDefinitionResourceRequirements import * from .AirbyteInternal import * from .AllowedHosts import * +from .ConnectorBreakingChanges import * from .ConnectorBuildOptions import * from .ConnectorMetadataDefinitionV0 import * from .ConnectorMetrics import * from .ConnectorPackageInfo import * from .ConnectorRegistryDestinationDefinition import * +from .ConnectorRegistryReleases import * from .ConnectorRegistrySourceDefinition import * from .ConnectorRegistryV0 import * from .ConnectorReleases import * diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorBreakingChanges.yaml b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorBreakingChanges.yaml new file mode 100644 index 000000000000..584d29ae8c8c --- /dev/null +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorBreakingChanges.yaml @@ -0,0 +1,59 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorBreakingChanges.yaml +title: ConnectorBreakingChanges +description: Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade. +type: object +additionalProperties: false +minProperties: 1 +patternProperties: + "^\\d+\\.\\d+\\.\\d+$": + $ref: "#/definitions/VersionBreakingChange" +definitions: + VersionBreakingChange: + description: Contains information about a breaking change, including the deadline to upgrade and a message detailing the change. + type: object + additionalProperties: false + required: + - upgradeDeadline + - message + properties: + upgradeDeadline: + description: The deadline by which to upgrade before the breaking change takes effect. + type: string + format: date + message: + description: Descriptive message detailing the breaking change. + type: string + migrationDocumentationUrl: + description: URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version} + type: string + format: uri + scopedImpact: + description: List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types. + type: array + minItems: 1 + items: + $ref: "#/definitions/BreakingChangeScope" + BreakingChangeScope: + description: A scope that can be used to limit the impact of a breaking change. + type: object + oneOf: + - $ref: "#/definitions/StreamBreakingChangeScope" + StreamBreakingChangeScope: + description: A scope that can be used to limit the impact of a breaking change to specific streams. + type: object + additionalProperties: false + required: + - scopeType + - impactedScopes + properties: + scopeType: + type: const + const: stream + impactedScopes: + description: List of streams that are impacted by the breaking change. + type: array + minItems: 1 + items: + type: string diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryDestinationDefinition.yaml b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryDestinationDefinition.yaml index 9ee74b57c8c9..32d4fc47ba2b 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryDestinationDefinition.yaml +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryDestinationDefinition.yaml @@ -69,7 +69,7 @@ properties: allowedHosts: "$ref": AllowedHosts.yaml releases: - "$ref": ConnectorReleases.yaml + "$ref": ConnectorRegistryReleases.yaml ab_internal: "$ref": AirbyteInternal.yaml supportsRefreshes: diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml new file mode 100644 index 000000000000..c219572fb393 --- /dev/null +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml @@ -0,0 +1,35 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml +title: ConnectorRegistryReleases +description: Contains information about different types of releases for a connector. +type: object +additionalProperties: false +properties: + releaseCandidates: + $ref: "#/definitions/ConnectorReleaseCandidates" + rolloutConfiguration: + $ref: RolloutConfiguration.yaml + breakingChanges: + $ref: ConnectorBreakingChanges.yaml + migrationDocumentationUrl: + description: URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations + type: string + format: uri +definitions: + ConnectorReleaseCandidates: + description: Each entry denotes a release candidate version of a connector. + type: object + additionalProperties: false + minProperties: 1 + maxProperties: 1 + patternProperties: + "^\\d+\\.\\d+\\.\\d+$": + $ref: "#/definitions/VersionReleaseCandidate" + VersionReleaseCandidate: + description: Contains information about a release candidate version of a connector. + additionalProperties: false + type: object + oneOf: + - $ref: ConnectorRegistrySourceDefinition.yaml + - $ref: ConnectorRegistryDestinationDefinition.yaml diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistrySourceDefinition.yaml b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistrySourceDefinition.yaml index 9052fa841185..31b424c00ad6 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistrySourceDefinition.yaml +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistrySourceDefinition.yaml @@ -74,7 +74,7 @@ properties: type: string description: The URL where you can visualize the ERD releases: - "$ref": ConnectorReleases.yaml + "$ref": ConnectorRegistryReleases.yaml ab_internal: "$ref": AirbyteInternal.yaml generated: diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorReleases.yaml b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorReleases.yaml index b670415bf938..390974f6fbb8 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorReleases.yaml +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorReleases.yaml @@ -15,64 +15,8 @@ properties: rolloutConfiguration: $ref: RolloutConfiguration.yaml breakingChanges: - $ref: "#/definitions/ConnectorBreakingChanges" + $ref: ConnectorBreakingChanges.yaml migrationDocumentationUrl: description: URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations type: string format: uri -definitions: - ConnectorBreakingChanges: - description: Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade. - type: object - additionalProperties: false - minProperties: 1 - patternProperties: - "^\\d+\\.\\d+\\.\\d+$": - $ref: "#/definitions/VersionBreakingChange" - VersionBreakingChange: - description: Contains information about a breaking change, including the deadline to upgrade and a message detailing the change. - type: object - additionalProperties: false - required: - - upgradeDeadline - - message - properties: - upgradeDeadline: - description: The deadline by which to upgrade before the breaking change takes effect. - type: string - format: date - message: - description: Descriptive message detailing the breaking change. - type: string - migrationDocumentationUrl: - description: URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version} - type: string - format: uri - scopedImpact: - description: List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types. - type: array - minItems: 1 - items: - $ref: "#/definitions/BreakingChangeScope" - BreakingChangeScope: - description: A scope that can be used to limit the impact of a breaking change. - type: object - oneOf: - - $ref: "#/definitions/StreamBreakingChangeScope" - StreamBreakingChangeScope: - description: A scope that can be used to limit the impact of a breaking change to specific streams. - type: object - additionalProperties: false - required: - - scopeType - - impactedScopes - properties: - scopeType: - type: const - const: stream - impactedScopes: - description: List of streams that are impacted by the breaking change. - type: array - minItems: 1 - items: - type: string diff --git a/airbyte-ci/connectors/metadata_service/lib/pyproject.toml b/airbyte-ci/connectors/metadata_service/lib/pyproject.toml index d97f8edd7a1e..bbdeadf96d2d 100644 --- a/airbyte-ci/connectors/metadata_service/lib/pyproject.toml +++ b/airbyte-ci/connectors/metadata_service/lib/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metadata-service" -version = "0.13.1" +version = "0.14.0" description = "" authors = ["Ben Church "] readme = "README.md" diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/__init__.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/__init__.py index d4866dd5f878..b919db828162 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/__init__.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/__init__.py @@ -94,7 +94,7 @@ ), "latest_metadata_file_blobs": gcs_directory_blobs.configured( {"gcs_bucket": {"env": "METADATA_BUCKET"}, "prefix": METADATA_FOLDER, "match_regex": f".*latest/{METADATA_FILE_NAME}$"} - ), + ) } DATA_WAREHOUSE_RESOURCE_TREE = { @@ -125,6 +125,12 @@ "latest_oss_registry_entries_file_blobs": gcs_directory_blobs.configured( {"gcs_bucket": {"env": "METADATA_BUCKET"}, "prefix": METADATA_FOLDER, "match_regex": f".*latest/oss.json$"} ), + "release_candidate_cloud_registry_entries_file_blobs": gcs_directory_blobs.configured( + {"gcs_bucket": {"env": "METADATA_BUCKET"}, "prefix": METADATA_FOLDER, "match_regex": f".*release_candidate/cloud.json$"} + ), + "release_candidate_oss_registry_entries_file_blobs": gcs_directory_blobs.configured( + {"gcs_bucket": {"env": "METADATA_BUCKET"}, "prefix": METADATA_FOLDER, "match_regex": f".*release_candidate/oss.json$"} + ), } CONNECTOR_TEST_REPORT_SENSOR_RESOURCE_TREE = { @@ -167,12 +173,26 @@ gcs_blobs_resource_key="latest_oss_registry_entries_file_blobs", interval=60, ), + new_gcs_blobs_sensor( + job=generate_oss_registry, + resources_def=REGISTRY_ENTRY_RESOURCE_TREE, + gcs_blobs_resource_key="release_candidate_oss_registry_entries_file_blobs", + interval=60, + allow_duplicate_runs=True, + ), new_gcs_blobs_sensor( job=generate_cloud_registry, resources_def=REGISTRY_ENTRY_RESOURCE_TREE, gcs_blobs_resource_key="latest_cloud_registry_entries_file_blobs", interval=60, ), + new_gcs_blobs_sensor( + job=generate_cloud_registry, + resources_def=REGISTRY_ENTRY_RESOURCE_TREE, + gcs_blobs_resource_key="release_candidate_cloud_registry_entries_file_blobs", + interval=60, + allow_duplicate_runs=True, + ), new_gcs_blobs_sensor( job=generate_nightly_reports, resources_def=CONNECTOR_TEST_REPORT_SENSOR_RESOURCE_TREE, @@ -184,7 +204,7 @@ SCHEDULES = [ ScheduleDefinition(job=add_new_metadata_partitions, cron_schedule="*/2 * * * *", tags={"dagster/priority": HIGH_QUEUE_PRIORITY}), ScheduleDefinition( - cron_schedule="0 1 * * *", # Daily at 1am US/Pacific + cron_schedule="*/2 * * * *", # Every 2 minutes execution_timezone="US/Pacific", job=remove_stale_metadata_partitions, ), diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/connector_metrics.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/connector_metrics.py index a8cda7468c20..20afa5a6b391 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/connector_metrics.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/connector_metrics.py @@ -59,7 +59,7 @@ def _convert_json_to_metrics_dict(jsonl_string: str) -> dict: @asset(required_resource_keys={"latest_metrics_gcs_blob"}, group_name=GROUP_NAME) @sentry.instrument_asset_op -def latest_connnector_metrics(context: OpExecutionContext) -> dict: +def latest_connector_metrics(context: OpExecutionContext) -> dict: latest_metrics_gcs_blob = context.resources.latest_metrics_gcs_blob latest_metrics_jsonl = _safe_read_gcs_file(latest_metrics_gcs_blob) diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py index a6c25648ffbf..137a7d7a7222 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py @@ -13,7 +13,7 @@ from dagster import OpExecutionContext, Output, asset from github import Repository from orchestrator.logging import sentry -from orchestrator.models.metadata import LatestMetadataEntry, MetadataDefinition, PartialMetadataDefinition +from orchestrator.models.metadata import LatestMetadataEntry, MetadataDefinition from orchestrator.ops.slack import send_slack_message from orchestrator.utils.dagster_helpers import OutputDataFrame, output_dataframe @@ -102,27 +102,56 @@ def github_metadata_definitions(context): return Output(metadata_definitions, metadata={"preview": [md.json() for md in metadata_definitions]}) +def entry_is_younger_than_grace_period(entry: LatestMetadataEntry) -> bool: + grace_period_marker = datetime.datetime.now(datetime.timezone.utc) - PUBLISH_GRACE_PERIOD + entry_last_modified = datetime.datetime.strptime(entry.last_modified, "%a, %d %b %Y %H:%M:%S %Z").replace(tzinfo=datetime.timezone.utc) + return entry_last_modified > grace_period_marker + + +def entry_should_be_on_gcs(metadata_entry: LatestMetadataEntry) -> bool: + """For stale metadata detection, we only want to scan latest metadata files from our master branch that are expected to be on GCS. + A metadata file should be on GCS, in the latest directory, if: + - it is not archived + - not a release candidate + - has been published for more than the grace period (just to reduce false positives when publish pipeline and stale detection run concurrently). + + Args: + metadata_entry (LatestMetadataEntry): The metadata entry to check + + Returns: + bool: True if the metadata entry should be on GCS, False otherwise + """ + if metadata_entry.metadata_definition.data.supportLevel == "archived": + return False + if getattr(metadata_entry.metadata_definition.releases, "isReleaseCandidate", False): + return False + if entry_is_younger_than_grace_period(metadata_entry): + return False + return True + + @asset(required_resource_keys={"slack"}, group_name=GROUP_NAME) -def stale_gcs_latest_metadata_file(context, github_metadata_definitions: list, metadata_definitions: list) -> OutputDataFrame: +def stale_gcs_latest_metadata_file(context, github_metadata_definitions: list, latest_metadata_entries: list) -> OutputDataFrame: """ Return a list of all metadata files in the github repo and denote whether they are stale or not. Stale means that the file in the github repo is not in the latest metadata file blobs. """ + + # TODO: + # The logic here is not bulletproof. It can't find release candidate metadata which did not made their way to GCS. + # We should improve this logic to be able to detect those cases as well. + latest_versions_on_gcs = { metadata_entry.metadata_definition.data.dockerRepository: metadata_entry.metadata_definition.data.dockerImageTag - for metadata_entry in metadata_definitions + for metadata_entry in latest_metadata_entries if metadata_entry.metadata_definition.data.supportLevel != "archived" } - now = datetime.datetime.now(datetime.timezone.utc) latest_versions_on_github = { metadata_entry.metadata_definition.data.dockerRepository: metadata_entry.metadata_definition.data.dockerImageTag for metadata_entry in github_metadata_definitions - if metadata_entry.metadata_definition.data.supportLevel - != "archived" # We give a 2 hour grace period for the metadata to be updated - and datetime.datetime.strptime(metadata_entry.last_modified, "%a, %d %b %Y %H:%M:%S %Z").replace(tzinfo=datetime.timezone.utc) - < now - PUBLISH_GRACE_PERIOD + if entry_should_be_on_gcs(metadata_entry) } stale_connectors = [] diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/metadata.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/metadata.py index 60159fe35e04..ea3d0fb54a7d 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/metadata.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/metadata.py @@ -103,16 +103,9 @@ def compute_registry_overrides(merged_df): return registries -# ASSETS - - -@asset(required_resource_keys={"latest_metadata_file_blobs"}, group_name=GROUP_NAME) -@sentry.instrument_asset_op -def metadata_definitions(context: OpExecutionContext) -> List[LatestMetadataEntry]: - latest_metadata_file_blobs = context.resources.latest_metadata_file_blobs - +def get_metadata_entries(blob_resource) -> Output: metadata_entries = [] - for blob in latest_metadata_file_blobs: + for blob in blob_resource: yaml_string = blob.download_as_string().decode("utf-8") metadata_dict = yaml.safe_load(yaml_string) metadata_def = MetadataDefinition.parse_obj(metadata_dict) @@ -137,4 +130,12 @@ def metadata_definitions(context: OpExecutionContext) -> List[LatestMetadataEntr ) metadata_entries.append(metadata_entry) - return metadata_entries + return Output(metadata_entries, metadata={"preview": [m.file_path for m in metadata_entries]}) + + +# ASSETS +@asset(required_resource_keys={"latest_metadata_file_blobs"}, group_name=GROUP_NAME) +@sentry.instrument_asset_op +def latest_metadata_entries(context: OpExecutionContext) -> Output[List[LatestMetadataEntry]]: + latest_metadata_file_blobs = context.resources.latest_metadata_file_blobs + return get_metadata_entries(latest_metadata_file_blobs) diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry.py index b81b15be1d8f..67de2c6791b0 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry.py @@ -2,21 +2,26 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +import copy import json -from typing import List +from typing import List, Union import sentry_sdk -from dagster import MetadataValue, OpExecutionContext, Output, asset +from dagster import AutoMaterializePolicy, MetadataValue, OpExecutionContext, Output, asset from dagster_gcp.gcs.file_manager import GCSFileHandle, GCSFileManager -from google.cloud import storage +from metadata_service.models.generated.ConnectorRegistryDestinationDefinition import ConnectorRegistryDestinationDefinition +from metadata_service.models.generated.ConnectorRegistrySourceDefinition import ConnectorRegistrySourceDefinition from metadata_service.models.generated.ConnectorRegistryV0 import ConnectorRegistryV0 from metadata_service.models.transform import to_json_sanitized_dict from orchestrator.assets.registry_entry import ConnectorTypePrimaryKey, ConnectorTypes, read_registry_entry_blob from orchestrator.logging import sentry from orchestrator.logging.publish_connector_lifecycle import PublishConnectorLifecycle, PublishConnectorLifecycleStage, StageStatus +from orchestrator.models.metadata import LatestMetadataEntry, MetadataDefinition from orchestrator.utils.object_helpers import default_none_to_dict from pydash.objects import set_with +PolymorphicRegistryEntry = Union[ConnectorRegistrySourceDefinition, ConnectorRegistryDestinationDefinition] + GROUP_NAME = "registry" @@ -79,13 +84,54 @@ def persist_registry_to_json( return file_handle +@sentry_sdk.trace +def apply_release_candidates( + latest_registry_entry: dict, + release_candidate_registry_entry: PolymorphicRegistryEntry, +) -> dict: + updated_registry_entry = copy.deepcopy(latest_registry_entry) + updated_registry_entry.setdefault("releases", {}) + updated_registry_entry["releases"]["releaseCandidates"] = { + release_candidate_registry_entry.dockerImageTag: to_json_sanitized_dict(release_candidate_registry_entry) + } + return updated_registry_entry + + +def apply_release_candidate_entries(registry_entry_dict: dict, docker_repository_to_rc_registry_entry: dict) -> dict: + """Apply the optionally existing release candidate entries to the registry entry. + We need both the release candidate metadata entry and the release candidate registry entry because the metadata entry contains the rollout configuration, and the registry entry contains the actual RC registry entry. + + Args: + registry_entry_dict (dict): The registry entry. + docker_repository_to_rc_registry_entry (dict): Mapping of docker repository to release candidate registry entry. + + Returns: + dict: The registry entry with release candidates applied. + """ + registry_entry_dict = copy.deepcopy(registry_entry_dict) + if registry_entry_dict["dockerRepository"] in docker_repository_to_rc_registry_entry: + release_candidate_registry_entry = docker_repository_to_rc_registry_entry[registry_entry_dict["dockerRepository"]] + registry_entry_dict = apply_release_candidates(registry_entry_dict, release_candidate_registry_entry) + return registry_entry_dict + + +def get_connector_type_from_registry_entry(registry_entry: PolymorphicRegistryEntry) -> ConnectorTypes: + if hasattr(registry_entry, "sourceDefinitionId"): + return ConnectorTypes.SOURCE + elif hasattr(registry_entry, "destinationDefinitionId"): + return ConnectorTypes.DESTINATION + else: + raise ValueError("Registry entry is not a source or destination") + + @sentry_sdk.trace def generate_and_persist_registry( context: OpExecutionContext, - registry_entry_file_blobs: List[storage.Blob], + latest_registry_entries: List, + release_candidate_registry_entries: List, registry_directory_manager: GCSFileManager, registry_name: str, - latest_connnector_metrics: dict, + latest_connector_metrics: dict, ) -> Output[ConnectorRegistryV0]: """Generate the selected registry from the metadata files, and persist it to GCS. @@ -104,14 +150,21 @@ def generate_and_persist_registry( ) registry_dict = {"sources": [], "destinations": []} - for blob in registry_entry_file_blobs: - connector_type, registry_entry = read_registry_entry_blob(blob) - plural_connector_type = f"{connector_type}s" - # We santiize the registry entry to ensure its in a format + docker_repository_to_rc_registry_entry = { + release_candidate_registry_entries.dockerRepository: release_candidate_registry_entries + for release_candidate_registry_entries in release_candidate_registry_entries + } + + for latest_registry_entry in latest_registry_entries: + connector_type = get_connector_type_from_registry_entry(latest_registry_entry) + plural_connector_type = f"{connector_type.value}s" + + # We sanitize the registry entry to ensure its in a format # that can be parsed by pydantic. - registry_entry_dict = to_json_sanitized_dict(registry_entry) - enriched_registry_entry_dict = apply_metrics_to_registry_entry(registry_entry_dict, connector_type, latest_connnector_metrics) + registry_entry_dict = to_json_sanitized_dict(latest_registry_entry) + enriched_registry_entry_dict = apply_metrics_to_registry_entry(registry_entry_dict, connector_type, latest_connector_metrics) + enriched_registry_entry_dict = apply_release_candidate_entries(enriched_registry_entry_dict, docker_repository_to_rc_registry_entry) registry_dict[plural_connector_type].append(enriched_registry_entry_dict) @@ -137,46 +190,66 @@ def generate_and_persist_registry( @asset( - required_resource_keys={"slack", "registry_directory_manager", "latest_oss_registry_entries_file_blobs", "latest_metrics_gcs_blob"}, + required_resource_keys={ + "slack", + "registry_directory_manager", + "latest_oss_registry_entries_file_blobs", + "release_candidate_oss_registry_entries_file_blobs", + "latest_metrics_gcs_blob", + }, group_name=GROUP_NAME, ) @sentry.instrument_asset_op -def persisted_oss_registry(context: OpExecutionContext, latest_connnector_metrics: dict) -> Output[ConnectorRegistryV0]: +def persisted_oss_registry( + context: OpExecutionContext, + latest_connector_metrics: dict, + latest_oss_registry_entries: List, + release_candidate_oss_registry_entries: List, +) -> Output[ConnectorRegistryV0]: """ This asset is used to generate the oss registry from the registry entries. """ registry_name = "oss" registry_directory_manager = context.resources.registry_directory_manager - latest_oss_registry_entries_file_blobs = context.resources.latest_oss_registry_entries_file_blobs - return generate_and_persist_registry( context=context, - registry_entry_file_blobs=latest_oss_registry_entries_file_blobs, + latest_registry_entries=latest_oss_registry_entries, + release_candidate_registry_entries=release_candidate_oss_registry_entries, registry_directory_manager=registry_directory_manager, registry_name=registry_name, - latest_connnector_metrics=latest_connnector_metrics, + latest_connector_metrics=latest_connector_metrics, ) @asset( - required_resource_keys={"slack", "registry_directory_manager", "latest_cloud_registry_entries_file_blobs", "latest_metrics_gcs_blob"}, + required_resource_keys={ + "slack", + "registry_directory_manager", + "latest_cloud_registry_entries_file_blobs", + "release_candidate_cloud_registry_entries_file_blobs", + "latest_metrics_gcs_blob", + }, group_name=GROUP_NAME, ) @sentry.instrument_asset_op -def persisted_cloud_registry(context: OpExecutionContext, latest_connnector_metrics: dict) -> Output[ConnectorRegistryV0]: +def persisted_cloud_registry( + context: OpExecutionContext, + latest_connector_metrics: dict, + latest_cloud_registry_entries: List, + release_candidate_cloud_registry_entries: List, +) -> Output[ConnectorRegistryV0]: """ This asset is used to generate the cloud registry from the registry entries. """ registry_name = "cloud" registry_directory_manager = context.resources.registry_directory_manager - latest_cloud_registry_entries_file_blobs = context.resources.latest_cloud_registry_entries_file_blobs - return generate_and_persist_registry( context=context, - registry_entry_file_blobs=latest_cloud_registry_entries_file_blobs, + latest_registry_entries=latest_cloud_registry_entries, + release_candidate_registry_entries=release_candidate_cloud_registry_entries, registry_directory_manager=registry_directory_manager, registry_name=registry_name, - latest_connnector_metrics=latest_connnector_metrics, + latest_connector_metrics=latest_connector_metrics, ) diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py index 3f143d22e574..77f821b6d3e0 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py @@ -11,6 +11,7 @@ import orchestrator.hacks as HACKS import pandas as pd +import semver import sentry_sdk from dagster import AutoMaterializePolicy, DynamicPartitionsDefinition, MetadataValue, OpExecutionContext, Output, asset from dagster_gcp.gcs.file_manager import GCSFileHandle, GCSFileManager @@ -90,25 +91,29 @@ def calculate_migration_documentation_url(releases_or_breaking_change: dict, doc @deep_copy_params -def apply_connector_release_defaults(metadata: dict) -> Optional[pd.DataFrame]: - metadata_releases = metadata.get("releases") +def apply_connector_releases(metadata: dict) -> Optional[pd.DataFrame]: documentation_url = metadata.get("documentationUrl") - if metadata_releases is None: - return None + final_registry_releases = {} - # apply defaults for connector releases - metadata_releases["migrationDocumentationUrl"] = calculate_migration_documentation_url(metadata_releases, documentation_url) + if metadata.get("releases", {}).get("breakingChanges"): + # apply defaults for connector releases + final_registry_releases["migrationDocumentationUrl"] = calculate_migration_documentation_url( + metadata["releases"], documentation_url + ) - # releases has a dictionary field called breakingChanges, where the key is the version and the value is the data for the breaking change - # each breaking change has a migrationDocumentationUrl field that is optional, so we need to apply defaults to it - breaking_changes = metadata_releases["breakingChanges"] - if breaking_changes is not None: - for version, breaking_change in breaking_changes.items(): - breaking_change["migrationDocumentationUrl"] = calculate_migration_documentation_url( - breaking_change, documentation_url, version - ) + # releases has a dictionary field called breakingChanges, where the key is the version and the value is the data for the breaking change + # each breaking change has a migrationDocumentationUrl field that is optional, so we need to apply defaults to it + breaking_changes = metadata["releases"]["breakingChanges"] + if breaking_changes is not None: + for version, breaking_change in breaking_changes.items(): + breaking_change["migrationDocumentationUrl"] = calculate_migration_documentation_url( + breaking_change, documentation_url, version + ) + final_registry_releases["breakingChanges"] = breaking_changes - return metadata_releases + if metadata.get("releases", {}).get("rolloutConfiguration"): + final_registry_releases["rolloutConfiguration"] = metadata["releases"]["rolloutConfiguration"] + return final_registry_releases @deep_copy_params @@ -278,8 +283,7 @@ def metadata_to_registry_entry(metadata_entry: LatestMetadataEntry, override_reg # apply generated fields overridden_metadata_data["iconUrl"] = metadata_entry.icon_url - overridden_metadata_data["releases"] = apply_connector_release_defaults(overridden_metadata_data) - + overridden_metadata_data["releases"] = apply_connector_releases(overridden_metadata_data) return overridden_metadata_data @@ -303,7 +307,7 @@ def get_connector_type_from_registry_entry(registry_entry: dict) -> TaggedRegist raise Exception("Could not determine connector type from registry entry") -def _get_latest_entry_write_path(metadata_path: Optional[str], registry_name: str) -> str: +def _get_directory_write_path(metadata_path: Optional[str], registry_name: str) -> str: """Get the write path for the registry entry, assuming the metadata entry is the latest version.""" if metadata_path is None: raise Exception(f"Metadata entry {metadata_entry} does not have a file path") @@ -316,9 +320,9 @@ def get_registry_entry_write_path( registry_entry: Optional[PolymorphicRegistryEntry], metadata_entry: LatestMetadataEntry, registry_name: str ) -> str: """Get the write path for the registry entry.""" - if metadata_entry.is_latest_version_path: - # if the metadata entry is the latest version, write the registry entry to the same path as the metadata entry - return _get_latest_entry_write_path(metadata_entry.file_path, registry_name) + if metadata_entry.is_latest_version_path or metadata_entry.is_release_candidate_version_path: + # if the metadata entry is the latest or RC version, write the registry entry to the same path as the metadata entry + return _get_directory_write_path(metadata_entry.file_path, registry_name) else: if registry_entry is None: raise Exception(f"Could not determine write path for registry entry {registry_entry} because it is None") @@ -353,29 +357,48 @@ def persist_registry_entry_to_json( return file_handle -@sentry_sdk.trace -def generate_and_persist_registry_entry( +def generate_registry_entry( metadata_entry: LatestMetadataEntry, spec_cache: SpecCache, - metadata_directory_manager: GCSFileManager, registry_name: str, -) -> str: - """Generate the selected registry from the metadata files, and persist it to GCS. +) -> PolymorphicRegistryEntry: + """Generate a registry entry given a metadata entry. + Enriches the metadata entry with spec and release candidate information. Args: - context (OpExecutionContext): The execution context. - metadata_entry (List[LatestMetadataEntry]): The metadata definitions. - cached_specs (OutputDataFrame): The cached specs. + metadata_entry (LatestMetadataEntry): The metadata entry. + spec_cache (SpecCache): The spec cache. + registry_name (str): The name of the registry_entry. One of "cloud" or "oss". Returns: - Output[ConnectorRegistryV0]: The registry. + PolymorphicRegistryEntry: The registry entry (could be a source or destination entry). """ raw_entry_dict = metadata_to_registry_entry(metadata_entry, registry_name) registry_entry_with_spec = apply_spec_to_registry_entry(raw_entry_dict, spec_cache, registry_name) _, ConnectorModel = get_connector_type_from_registry_entry(registry_entry_with_spec) - registry_model = ConnectorModel.parse_obj(registry_entry_with_spec) + return ConnectorModel.parse_obj(registry_entry_with_spec) + + +@sentry_sdk.trace +def generate_and_persist_registry_entry( + metadata_entry: LatestMetadataEntry, + spec_cache: SpecCache, + metadata_directory_manager: GCSFileManager, + registry_name: str, +) -> str: + """Generate the selected registry from the metadata files, and persist it to GCS. + + Args: + metadata_entry (List[LatestMetadataEntry]): The metadata entry. + spec_cache (SpecCache): The spec cache. + metadata_directory_manager (GCSFileManager): The metadata directory manager. + registry_name (str): The name of the registry_entry. One of "cloud" or "oss". + Returns: + str: The public url of the registry entry. + """ + registry_model = generate_registry_entry(metadata_entry, spec_cache, registry_name) file_handle = persist_registry_entry_to_json(registry_model, registry_name, metadata_entry, metadata_directory_manager) @@ -585,7 +608,10 @@ def metadata_entry(context: OpExecutionContext) -> Output[Optional[LatestMetadat auto_materialize_policy=AutoMaterializePolicy.eager(max_materializations_per_minute=MAX_METADATA_PARTITION_RUN_REQUEST), ) @sentry.instrument_asset_op -def registry_entry(context: OpExecutionContext, metadata_entry: Optional[LatestMetadataEntry]) -> Output[Optional[dict]]: +def registry_entry( + context: OpExecutionContext, + metadata_entry: Optional[LatestMetadataEntry], +) -> Output[Optional[dict]]: """ Generate the registry entry files from the given metadata file, and persist it to GCS. """ @@ -613,7 +639,12 @@ def registry_entry(context: OpExecutionContext, metadata_entry: Optional[LatestM enabled_registries, disabled_registries = get_registry_status_lists(metadata_entry) persisted_registry_entries = { - registry_name: generate_and_persist_registry_entry(metadata_entry, spec_cache, root_metadata_directory_manager, registry_name) + registry_name: generate_and_persist_registry_entry( + metadata_entry, + spec_cache, + root_metadata_directory_manager, + registry_name, + ) for registry_name in enabled_registries } @@ -663,3 +694,36 @@ def registry_entry(context: OpExecutionContext, metadata_entry: Optional[LatestM ) return Output(metadata=dagster_metadata, value=persisted_registry_entries) + + +def get_registry_entries(blob_resource) -> Output[List]: + registry_entries = [] + for blob in blob_resource: + _, registry_entry = read_registry_entry_blob(blob) + registry_entries.append(registry_entry) + + return Output(registry_entries) + + +@asset(required_resource_keys={"latest_cloud_registry_entries_file_blobs"}, group_name=GROUP_NAME) +@sentry.instrument_asset_op +def latest_cloud_registry_entries(context: OpExecutionContext) -> Output[List]: + return get_registry_entries(context.resources.latest_cloud_registry_entries_file_blobs) + + +@asset(required_resource_keys={"latest_oss_registry_entries_file_blobs"}, group_name=GROUP_NAME) +@sentry.instrument_asset_op +def latest_oss_registry_entries(context: OpExecutionContext) -> Output[List]: + return get_registry_entries(context.resources.latest_oss_registry_entries_file_blobs) + + +@asset(required_resource_keys={"release_candidate_cloud_registry_entries_file_blobs"}, group_name=GROUP_NAME) +@sentry.instrument_asset_op +def release_candidate_cloud_registry_entries(context: OpExecutionContext) -> Output[List]: + return get_registry_entries(context.resources.release_candidate_cloud_registry_entries_file_blobs) + + +@asset(required_resource_keys={"release_candidate_oss_registry_entries_file_blobs"}, group_name=GROUP_NAME) +@sentry.instrument_asset_op +def release_candidate_oss_registry_entries(context: OpExecutionContext) -> Output[List]: + return get_registry_entries(context.resources.release_candidate_oss_registry_entries_file_blobs) diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/jobs/registry.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/jobs/registry.py index 166ebf4f3fb5..322c2e5d3002 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/jobs/registry.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/jobs/registry.py @@ -3,13 +3,14 @@ # from dagster import AssetSelection, SkipReason, define_asset_job, job, op -from orchestrator.assets import registry_entry +from orchestrator.assets import metadata, registry, registry_entry, specs_secrets_mask from orchestrator.config import HIGH_QUEUE_PRIORITY, MAX_METADATA_PARTITION_RUN_REQUEST from orchestrator.logging.publish_connector_lifecycle import PublishConnectorLifecycle, PublishConnectorLifecycleStage, StageStatus oss_registry_inclusive = AssetSelection.keys("persisted_oss_registry", "specs_secrets_mask_yaml").upstream() generate_oss_registry = define_asset_job(name="generate_oss_registry", selection=oss_registry_inclusive) + cloud_registry_inclusive = AssetSelection.keys("persisted_cloud_registry", "specs_secrets_mask_yaml").upstream() generate_cloud_registry = define_asset_job(name="generate_cloud_registry", selection=cloud_registry_inclusive) diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/models/metadata.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/models/metadata.py index 4445aa1356aa..9a5f074c0954 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/models/metadata.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/models/metadata.py @@ -65,6 +65,14 @@ def is_latest_version_path(self) -> bool: ending_path = f"latest/{METADATA_FILE_NAME}" return self.file_path.endswith(ending_path) + @property + def is_release_candidate_version_path(self) -> bool: + """ + Path is considered a latest version path if the subfolder containing METADATA_FILE_NAME is "latest" + """ + ending_path = f"release_candidate/{METADATA_FILE_NAME}" + return self.file_path.endswith(ending_path) + @property def dependency_file_url(self) -> Optional[str]: if not self.bucket_name or not self.metadata_definition: diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/sensors/gcs.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/sensors/gcs.py index b6fda58d291c..d83a1ae4c183 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/sensors/gcs.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/sensors/gcs.py @@ -2,6 +2,8 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +import time + from dagster import ( DefaultSensorStatus, RunRequest, @@ -21,6 +23,7 @@ def new_gcs_blobs_sensor( job, interval, resources_def, + allow_duplicate_runs=False, ) -> SensorDefinition: """ This sensor is responsible for polling a list of gcs blobs and triggering a job when the list changes. @@ -42,7 +45,6 @@ def new_gcs_blobs_sensor_definition(context: SensorEvaluationContext): context.log.info(f"Old etag cursor: {context.cursor}") gcs_blobs_resource = getattr(resources, gcs_blobs_resource_key) - new_etags_cursor = string_array_to_hash([blob.etag for blob in gcs_blobs_resource]) context.log.info(f"New etag cursor: {new_etags_cursor}") @@ -54,6 +56,11 @@ def new_gcs_blobs_sensor_definition(context: SensorEvaluationContext): context.update_cursor(new_etags_cursor) context.log.info(f"New {gcs_blobs_resource_key} in GCS bucket") run_key = f"{sensor_name}:{new_etags_cursor}" + # Dagster skips runs with the same run_key + # It means that if the GCS blob list changed back to a state which was already processed, the run will be skipped + # This is not desirable in cases we want to reprocess the same data again after a blob deletion + if allow_duplicate_runs: + run_key += f":{int(time.time())}" return RunRequest(run_key=run_key) return new_gcs_blobs_sensor_definition diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock b/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock index 681fba946c20..65b4c30a2a86 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock +++ b/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -1747,7 +1747,7 @@ files = [ [[package]] name = "metadata-service" -version = "0.13.0" +version = "0.14.0" description = "" optional = false python-versions = "^3.9" @@ -2514,6 +2514,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2521,8 +2522,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2539,6 +2547,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2546,6 +2555,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml index a182237f2b22..b96544b70d21 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml +++ b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "orchestrator" -version = "0.4.1" +version = "0.5.0" description = "" authors = ["Ben Church "] readme = "README.md" From 8c23992c0c3b32d27b7a094de86e3133588cb7a1 Mon Sep 17 00:00:00 2001 From: Augustin Date: Wed, 4 Sep 2024 17:17:22 +0200 Subject: [PATCH 23/51] source-faker: test release candidate (#45126) --- .../connectors/source-faker/metadata.yaml | 3 +- .../connectors/source-faker/pyproject.toml | 2 +- docs/integrations/sources/faker.md | 55 ++++++++++--------- 3 files changed, 31 insertions(+), 29 deletions(-) diff --git a/airbyte-integrations/connectors/source-faker/metadata.yaml b/airbyte-integrations/connectors/source-faker/metadata.yaml index f6252fd98f81..112f3df1b70a 100644 --- a/airbyte-integrations/connectors/source-faker/metadata.yaml +++ b/airbyte-integrations/connectors/source-faker/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: api connectorType: source definitionId: dfd88b22-b603-4c3d-aad7-3701784586b1 - dockerImageTag: 6.2.11 + dockerImageTag: 6.2.12 dockerRepository: airbyte/source-faker documentationUrl: https://docs.airbyte.com/integrations/sources/faker githubIssueLabel: source-faker @@ -23,6 +23,7 @@ data: enabled: true releaseStage: beta releases: + isReleaseCandidate: true breakingChanges: 4.0.0: message: This is a breaking change message diff --git a/airbyte-integrations/connectors/source-faker/pyproject.toml b/airbyte-integrations/connectors/source-faker/pyproject.toml index ea437aa354f2..49874f18503b 100644 --- a/airbyte-integrations/connectors/source-faker/pyproject.toml +++ b/airbyte-integrations/connectors/source-faker/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "6.2.11" +version = "6.2.12" name = "source-faker" description = "Source implementation for fake but realistic looking data." authors = [ "Airbyte ",] diff --git a/docs/integrations/sources/faker.md b/docs/integrations/sources/faker.md index 2711e1f721f3..763e34ee9508 100644 --- a/docs/integrations/sources/faker.md +++ b/docs/integrations/sources/faker.md @@ -104,33 +104,34 @@ None! | Version | Date | Pull Request | Subject | | :------ | :--------- | :-------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------- | -| 6.2.11 | 2024-08-31 | [45025](https://github.com/airbytehq/airbyte/pull/45025) | Update dependencies | -| 6.2.10 | 2024-08-24 | [44659](https://github.com/airbytehq/airbyte/pull/44659) | Update dependencies | -| 6.2.9 | 2024-08-17 | [44221](https://github.com/airbytehq/airbyte/pull/44221) | Update dependencies | -| 6.2.8 | 2024-08-12 | [43753](https://github.com/airbytehq/airbyte/pull/43753) | Update dependencies | -| 6.2.7 | 2024-08-10 | [43570](https://github.com/airbytehq/airbyte/pull/43570) | Update dependencies | -| 6.2.6 | 2024-08-03 | [43102](https://github.com/airbytehq/airbyte/pull/43102) | Update dependencies | -| 6.2.5 | 2024-07-27 | [42682](https://github.com/airbytehq/airbyte/pull/42682) | Update dependencies | -| 6.2.4 | 2024-07-20 | [42367](https://github.com/airbytehq/airbyte/pull/42367) | Update dependencies | -| 6.2.3 | 2024-07-13 | [41848](https://github.com/airbytehq/airbyte/pull/41848) | Update dependencies | -| 6.2.2 | 2024-07-10 | [41467](https://github.com/airbytehq/airbyte/pull/41467) | Update dependencies | -| 6.2.1 | 2024-07-09 | [41180](https://github.com/airbytehq/airbyte/pull/41180) | Update dependencies | -| 6.2.0 | 2024-07-07 | [39935](https://github.com/airbytehq/airbyte/pull/39935) | Update CDK to 2.0. | -| 6.1.6 | 2024-07-06 | [40956](https://github.com/airbytehq/airbyte/pull/40956) | Update dependencies | -| 6.1.5 | 2024-06-25 | [40426](https://github.com/airbytehq/airbyte/pull/40426) | Update dependencies | -| 6.1.4 | 2024-06-21 | [39935](https://github.com/airbytehq/airbyte/pull/39935) | Update dependencies | -| 6.1.3 | 2024-06-04 | [39029](https://github.com/airbytehq/airbyte/pull/39029) | [autopull] Upgrade base image to v1.2.1 | -| 6.1.2 | 2024-06-03 | [38831](https://github.com/airbytehq/airbyte/pull/38831) | Bump CDK to allow and prefer versions `1.x` | -| 6.1.1 | 2024-05-20 | [38256](https://github.com/airbytehq/airbyte/pull/38256) | Replace AirbyteLogger with logging.Logger | -| 6.1.0 | 2024-04-08 | [36898](https://github.com/airbytehq/airbyte/pull/36898) | Update car prices and years | -| 6.0.3 | 2024-03-15 | [36167](https://github.com/airbytehq/airbyte/pull/36167) | Make 'count' an optional config parameter. | -| 6.0.2 | 2024-02-12 | [35174](https://github.com/airbytehq/airbyte/pull/35174) | Manage dependencies with Poetry. | -| 6.0.1 | 2024-02-12 | [35172](https://github.com/airbytehq/airbyte/pull/35172) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 6.0.0 | 2024-01-30 | [34644](https://github.com/airbytehq/airbyte/pull/34644) | Declare 'id' columns as primary keys. | -| 5.0.2 | 2024-01-17 | [34344](https://github.com/airbytehq/airbyte/pull/34344) | Ensure unique state messages | -| 5.0.1 | 2023-01-08 | [34033](https://github.com/airbytehq/airbyte/pull/34033) | Add standard entrypoints for usage with AirbyteLib | -| 5.0.0 | 2023-08-08 | [29213](https://github.com/airbytehq/airbyte/pull/29213) | Change all `*id` fields and `products.year` to be integer | -| 4.0.0 | 2023-07-19 | [28485](https://github.com/airbytehq/airbyte/pull/28485) | Bump to test publication | +| 6.2.12 | 2024-09-04 | [45126](https://github.com/airbytehq/airbyte/pull/45126) | Test a release candidate release | +| 6.2.11 | 2024-08-31 | [45025](https://github.com/airbytehq/airbyte/pull/45025) | Update dependencies | +| 6.2.10 | 2024-08-24 | [44659](https://github.com/airbytehq/airbyte/pull/44659) | Update dependencies | +| 6.2.9 | 2024-08-17 | [44221](https://github.com/airbytehq/airbyte/pull/44221) | Update dependencies | +| 6.2.8 | 2024-08-12 | [43753](https://github.com/airbytehq/airbyte/pull/43753) | Update dependencies | +| 6.2.7 | 2024-08-10 | [43570](https://github.com/airbytehq/airbyte/pull/43570) | Update dependencies | +| 6.2.6 | 2024-08-03 | [43102](https://github.com/airbytehq/airbyte/pull/43102) | Update dependencies | +| 6.2.5 | 2024-07-27 | [42682](https://github.com/airbytehq/airbyte/pull/42682) | Update dependencies | +| 6.2.4 | 2024-07-20 | [42367](https://github.com/airbytehq/airbyte/pull/42367) | Update dependencies | +| 6.2.3 | 2024-07-13 | [41848](https://github.com/airbytehq/airbyte/pull/41848) | Update dependencies | +| 6.2.2 | 2024-07-10 | [41467](https://github.com/airbytehq/airbyte/pull/41467) | Update dependencies | +| 6.2.1 | 2024-07-09 | [41180](https://github.com/airbytehq/airbyte/pull/41180) | Update dependencies | +| 6.2.0 | 2024-07-07 | [39935](https://github.com/airbytehq/airbyte/pull/39935) | Update CDK to 2.0. | +| 6.1.6 | 2024-07-06 | [40956](https://github.com/airbytehq/airbyte/pull/40956) | Update dependencies | +| 6.1.5 | 2024-06-25 | [40426](https://github.com/airbytehq/airbyte/pull/40426) | Update dependencies | +| 6.1.4 | 2024-06-21 | [39935](https://github.com/airbytehq/airbyte/pull/39935) | Update dependencies | +| 6.1.3 | 2024-06-04 | [39029](https://github.com/airbytehq/airbyte/pull/39029) | [autopull] Upgrade base image to v1.2.1 | +| 6.1.2 | 2024-06-03 | [38831](https://github.com/airbytehq/airbyte/pull/38831) | Bump CDK to allow and prefer versions `1.x` | +| 6.1.1 | 2024-05-20 | [38256](https://github.com/airbytehq/airbyte/pull/38256) | Replace AirbyteLogger with logging.Logger | +| 6.1.0 | 2024-04-08 | [36898](https://github.com/airbytehq/airbyte/pull/36898) | Update car prices and years | +| 6.0.3 | 2024-03-15 | [36167](https://github.com/airbytehq/airbyte/pull/36167) | Make 'count' an optional config parameter. | +| 6.0.2 | 2024-02-12 | [35174](https://github.com/airbytehq/airbyte/pull/35174) | Manage dependencies with Poetry. | +| 6.0.1 | 2024-02-12 | [35172](https://github.com/airbytehq/airbyte/pull/35172) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 6.0.0 | 2024-01-30 | [34644](https://github.com/airbytehq/airbyte/pull/34644) | Declare 'id' columns as primary keys. | +| 5.0.2 | 2024-01-17 | [34344](https://github.com/airbytehq/airbyte/pull/34344) | Ensure unique state messages | +| 5.0.1 | 2023-01-08 | [34033](https://github.com/airbytehq/airbyte/pull/34033) | Add standard entrypoints for usage with AirbyteLib | +| 5.0.0 | 2023-08-08 | [29213](https://github.com/airbytehq/airbyte/pull/29213) | Change all `*id` fields and `products.year` to be integer | +| 4.0.0 | 2023-07-19 | [28485](https://github.com/airbytehq/airbyte/pull/28485) | Bump to test publication | | 3.0.2 | 2023-07-07 | [27807](https://github.com/airbytehq/airbyte/pull/28060) | Bump to test publication | | 3.0.1 | 2023-06-28 | [27807](https://github.com/airbytehq/airbyte/pull/27807) | Fix bug with purchase stream updated_at | | 3.0.0 | 2023-06-23 | [27684](https://github.com/airbytehq/airbyte/pull/27684) | Stream cursor is now `updated_at` & remove `records_per_sync` option | From d02d29c054772739f9e3f4cbdf835040883f5056 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Wed, 4 Sep 2024 09:03:59 -0700 Subject: [PATCH 24/51] Destination databricks: add banner for gcp oauth (#45127) --- docs/integrations/destinations/databricks.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/integrations/destinations/databricks.md b/docs/integrations/destinations/databricks.md index b482cd1648df..b48664bda0a9 100644 --- a/docs/integrations/destinations/databricks.md +++ b/docs/integrations/destinations/databricks.md @@ -9,6 +9,12 @@ This destination syncs data to Delta Lake on Databricks Lakehouse. Each stream i You **must** be using Unity Catalog to use this connector. ::: +:::info +Please note, at this time OAuth2 authentication is only supported in AWS +deployments. If you are running Databricks in GCP, you **must** use an access +token. +::: + This connector requires a JDBC driver to connect to the Databricks cluster. By using the driver and the connector, you must agree to the [JDBC ODBC driver license](https://databricks.com/jdbc-odbc-driver-license). This means that you can From 1a9728d64531373788033b31409595e8762866a3 Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Wed, 4 Sep 2024 14:30:02 -0700 Subject: [PATCH 25/51] bulk-cdk: make exception classifiers recursive (#45141) --- .../cdk/output/DefaultExceptionClassifier.kt | 24 +++++-------------- .../airbyte/cdk/output/ExceptionClassifier.kt | 16 +++++++++---- .../output/RegexExceptionClassifierTest.kt | 12 ++++++++++ .../cdk/output/JdbcExceptionClassifier.kt | 11 +++++---- 4 files changed, 36 insertions(+), 27 deletions(-) diff --git a/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/DefaultExceptionClassifier.kt b/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/DefaultExceptionClassifier.kt index 71da76a5859f..fc6f09259b1a 100644 --- a/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/DefaultExceptionClassifier.kt +++ b/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/DefaultExceptionClassifier.kt @@ -22,24 +22,12 @@ class DefaultExceptionClassifier( ) : ExceptionClassifier { override fun classify(e: Throwable): ConnectorError? { - return when (val connectorErrorException: ConnectorErrorException? = unwind(e)) { - is ConfigErrorException -> ConfigError(connectorErrorException.message!!) - is TransientErrorException -> TransientError(connectorErrorException.message!!) - is SystemErrorException -> SystemError(connectorErrorException.message) - null -> null + val unwound: Throwable? = ExceptionClassifier.unwind(e) { it is ConnectorErrorException } + return when (unwound) { + is ConfigErrorException -> ConfigError(unwound.message!!) + is TransientErrorException -> TransientError(unwound.message!!) + is SystemErrorException -> SystemError(unwound.message) + else -> null } } - - /** Recursively walks the causes of [e] and returns the last [ConnectorErrorException]. */ - fun unwind(e: Throwable): ConnectorErrorException? { - var connectorErrorException: ConnectorErrorException? = null - var unwound: Throwable? = e - while (unwound != null) { - if (unwound is ConnectorErrorException) { - connectorErrorException = unwound - } - unwound = unwound.cause - } - return connectorErrorException - } } diff --git a/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/ExceptionClassifier.kt b/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/ExceptionClassifier.kt index 0b7e19b44aec..1583a271fb6e 100644 --- a/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/ExceptionClassifier.kt +++ b/airbyte-cdk/bulk/core/base/src/main/kotlin/io/airbyte/cdk/output/ExceptionClassifier.kt @@ -15,6 +15,16 @@ interface ExceptionClassifier : Ordered { val orderValue: Int override fun getOrder(): Int = orderValue + + companion object { + fun unwind(e: Throwable, stopUnwind: (Throwable) -> Boolean): Throwable? { + var unwound = e + while (!stopUnwind(unwound)) { + unwound = unwound.cause ?: return null + } + return unwound + } + } } /** Each [ConnectorError] subtype corresponds to a [AirbyteErrorTraceMessage.FailureType]. */ @@ -54,10 +64,8 @@ interface RuleBasedExceptionClassifier : override fun classify(e: Throwable): ConnectorError? { for (rule in rules) { - if (!rule.matches(e)) { - continue - } - val message: String = rule.output ?: e.message ?: e.toString() + val match: Throwable = ExceptionClassifier.unwind(e, rule::matches) ?: continue + val message: String = rule.output ?: match.message ?: match.toString() val firstLine: String = if (rule.group == null) message else "${rule.group}: $message" val lines: List = listOf(firstLine) + rule.referenceLinks val displayMessage: String = lines.joinToString(separator = "\n") diff --git a/airbyte-cdk/bulk/core/base/src/test/kotlin/io/airbyte/cdk/output/RegexExceptionClassifierTest.kt b/airbyte-cdk/bulk/core/base/src/test/kotlin/io/airbyte/cdk/output/RegexExceptionClassifierTest.kt index e511d46488ec..2fa23ddd680a 100644 --- a/airbyte-cdk/bulk/core/base/src/test/kotlin/io/airbyte/cdk/output/RegexExceptionClassifierTest.kt +++ b/airbyte-cdk/bulk/core/base/src/test/kotlin/io/airbyte/cdk/output/RegexExceptionClassifierTest.kt @@ -80,4 +80,16 @@ class RegexExceptionClassifierTest { classifier.classify(RuntimeException("barbaz")), ) } + + @Test + fun testRecursiveRuleOrdering() { + Assertions.assertEquals( + ConfigError("grouped: has foo\nhttps://www.youtube.com/watch?v=xvFZjo5PgG0"), + classifier.classify(RuntimeException("quux", RuntimeException("foobarbaz"))), + ) + Assertions.assertEquals( + TransientError("barbaz"), + classifier.classify(RuntimeException("quux", RuntimeException("barbaz"))), + ) + } } diff --git a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/output/JdbcExceptionClassifier.kt b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/output/JdbcExceptionClassifier.kt index 806f0c1d10c6..adb8713bde4e 100644 --- a/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/output/JdbcExceptionClassifier.kt +++ b/airbyte-cdk/bulk/toolkits/extract-jdbc/src/main/kotlin/io/airbyte/cdk/output/JdbcExceptionClassifier.kt @@ -29,15 +29,16 @@ class JdbcExceptionClassifier( } override fun classify(e: Throwable): ConnectorError? { - if (e !is SQLException) return null + var match: SQLException = + ExceptionClassifier.unwind(e) { it is SQLException } as? SQLException ?: return null val decoratedMessage: String = listOfNotNull( - e.sqlState?.let { "State code: $it" }, - e.errorCode.takeIf { it != 0 }?.let { "Error code: $it" }, - e.message?.let { "Message: $it" }, + match.sqlState?.let { "State code: $it" }, + match.errorCode.takeIf { it != 0 }?.let { "Error code: $it" }, + match.message?.let { "Message: $it" }, ) .joinToString(separator = "; ") - val decoratedException = SQLException(decoratedMessage, e.sqlState, e.errorCode) + val decoratedException = SQLException(decoratedMessage, match.sqlState, match.errorCode) val ruleBasedMatch: ConnectorError? = super.classify(decoratedException) if (ruleBasedMatch != null) { return ruleBasedMatch From 4264c08bca63ddbe05da871a40b490bf3310a317 Mon Sep 17 00:00:00 2001 From: pmossman Date: Wed, 4 Sep 2024 23:54:58 +0000 Subject: [PATCH 26/51] Bump Airbyte version from 0.64.1 to 0.64.2 --- .bumpversion.cfg | 2 +- gradle.properties | 2 +- run-ab-platform.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 1511de7e098a..47fb05b997bc 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.64.1 +current_version = 0.64.2 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/gradle.properties b/gradle.properties index 9e225dc503f8..a6eb6a999e1d 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -VERSION=0.64.1 +VERSION=0.64.2 # NOTE: some of these values are overwritten in CI! # NOTE: if you want to override this for your local machine, set overrides in ~/.gradle/gradle.properties diff --git a/run-ab-platform.sh b/run-ab-platform.sh index 75f2af0ffc7e..bc2c6cabd508 100755 --- a/run-ab-platform.sh +++ b/run-ab-platform.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -VERSION=0.64.1 +VERSION=0.64.2 # Run away from anything even a little scary set -o nounset # -u exit if a variable is not set set -o errexit # -f exit for any command failure" From 458e96ea668366d3326d3f97fe477a30d8b6f838 Mon Sep 17 00:00:00 2001 From: Xiaohan Song Date: Wed, 4 Sep 2024 18:43:47 -0700 Subject: [PATCH 27/51] [source-mssql] update datetimeoffset format (#45142) --- .../connectors/source-mssql/metadata.yaml | 2 +- .../integrations/source/mssql/MssqlQueryUtils.java | 9 ++++----- .../integrations/source/mssql/MssqlSourceOperations.java | 4 ++-- .../source/mssql/initialsync/MssqlInitialReadUtil.java | 1 - .../source/mssql/MssqlSourceOperationsTest.java | 2 +- docs/integrations/sources/mssql.md | 1 + 6 files changed, 9 insertions(+), 10 deletions(-) diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index 102daf2d4e52..f522f8910824 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 - dockerImageTag: 4.1.10 + dockerImageTag: 4.1.11 dockerRepository: airbyte/source-mssql documentationUrl: https://docs.airbyte.com/integrations/sources/mssql githubIssueLabel: source-mssql diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlQueryUtils.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlQueryUtils.java index b2ac6dd7f6d4..c59d46135e91 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlQueryUtils.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlQueryUtils.java @@ -14,7 +14,6 @@ import com.google.common.collect.ImmutableList; import com.microsoft.sqlserver.jdbc.SQLServerResultSetMetaData; import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; import io.airbyte.cdk.integrations.source.relationaldb.models.CursorBasedStatus; import io.airbyte.cdk.integrations.source.relationaldb.models.InternalModels.StateType; @@ -79,7 +78,7 @@ public static void getIndexInfoForStreams(final JdbcDatabase database, final Con final String query = INDEX_QUERY.formatted(fullTableName); LOGGER.debug("Index lookup query: {}", query); final List jsonNodes = database.bufferedResultSetQuery(conn -> conn.prepareStatement(query).executeQuery(), - resultSet -> JdbcUtils.getDefaultSourceOperations().rowToJson(resultSet)); + resultSet -> new MssqlSourceOperations().rowToJson(resultSet)); if (jsonNodes != null) { jsonNodes.stream().map(node -> Jsons.convertValue(node, Index.class)) .forEach(i -> LOGGER.info("Index {}", i)); @@ -106,7 +105,7 @@ public static String getMaxOcValueForStream(final JdbcDatabase database, LOGGER.info("Querying for max oc value: {}", maxOcQuery); try { final List jsonNodes = database.bufferedResultSetQuery(conn -> conn.prepareStatement(maxOcQuery).executeQuery(), - resultSet -> JdbcUtils.getDefaultSourceOperations().rowToJson(resultSet)); + resultSet -> new MssqlSourceOperations().rowToJson(resultSet)); Preconditions.checkState(jsonNodes.size() == 1); if (jsonNodes.get(0).get(MAX_OC_COL) == null) { LOGGER.info("Max PK is null for table {} - this could indicate an empty table", fullTableName); @@ -213,7 +212,7 @@ public static Map jsonNodes; try { jsonNodes = database.bufferedResultSetQuery(conn -> conn.prepareStatement(cursorBasedSyncStatusQuery).executeQuery(), - resultSet -> JdbcUtils.getDefaultSourceOperations().rowToJson(resultSet)); + resultSet -> new MssqlSourceOperations().rowToJson(resultSet)); } catch (SQLException e) { throw new RuntimeException("Failed to read max cursor value from %s.%s".formatted(namespace, name), e); } @@ -241,7 +240,7 @@ private static List getTableEstimate(final JdbcDatabase database, fina String.format(TABLE_ESTIMATE_QUERY, namespace, name); LOGGER.info("Querying for table estimate size: {}", tableEstimateQuery); final List jsonNodes = database.bufferedResultSetQuery(conn -> conn.createStatement().executeQuery(tableEstimateQuery), - resultSet -> JdbcUtils.getDefaultSourceOperations().rowToJson(resultSet)); + resultSet -> new MssqlSourceOperations().rowToJson(resultSet)); Preconditions.checkState(jsonNodes.size() == 1); LOGGER.debug("Estimate: {}", jsonNodes); return jsonNodes; diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java index 3e6046da2f1d..e356644391a2 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSourceOperations.java @@ -4,7 +4,7 @@ package io.airbyte.integrations.source.mssql; -import static io.airbyte.cdk.db.DataTypeUtils.OFFSETDATETIME_FORMATTER; +import static io.airbyte.cdk.db.DataTypeUtils.TIMESTAMPTZ_FORMATTER; import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_NAME; import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE; import static io.airbyte.cdk.db.jdbc.JdbcConstants.INTERNAL_COLUMN_TYPE_NAME; @@ -183,7 +183,7 @@ public JsonSchemaType getAirbyteType(final JDBCType jdbcType) { protected void setTimestampWithTimezone(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { try { - final OffsetDateTime offsetDateTime = OffsetDateTime.parse(value, OFFSETDATETIME_FORMATTER); + final OffsetDateTime offsetDateTime = OffsetDateTime.parse(value, TIMESTAMPTZ_FORMATTER); final Timestamp timestamp = Timestamp.valueOf(offsetDateTime.atZoneSameInstant(offsetDateTime.getOffset()).toLocalDateTime()); // Final step of conversion from // OffsetDateTime (a Java construct) object -> Timestamp (a Java construct) -> diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java index e5f19ce6c431..6679f7987b55 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java @@ -436,7 +436,6 @@ static Optional getOrderedColumnInfo(final JdbcDatabase datab final JDBCType ocFieldType = table.getFields().stream() .filter(field -> field.getName().equals(ocFieldName)) .findFirst().get().getType(); - final String ocMaxValue = MssqlQueryUtils.getMaxOcValueForStream(database, stream, ocFieldName, quoteString); return Optional.of(new OrderedColumnInfo(ocFieldName, ocFieldType, ocMaxValue)); } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceOperationsTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceOperationsTest.java index 14cbe17c8c24..7f1de60c6eac 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceOperationsTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/MssqlSourceOperationsTest.java @@ -61,7 +61,7 @@ public void setDateTimeOffsetColumnAsCursor() throws SQLException { executeQuery(insertQuery); expectedRecords.add(jsonNode); } - final String cursorAnchorValue = "2023-01-01 00:00:00.0000000 +00:00"; + final String cursorAnchorValue = "2023-01-01T00:00:00.000000+00:00"; final List actualRecords = new ArrayList<>(); try (final Connection connection = testdb.getContainer().createConnection("")) { final PreparedStatement preparedStatement = connection.prepareStatement( diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index 07297888099d..03778dbb1dea 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -422,6 +422,7 @@ WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configura | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +| 4.1.11 | 2024-09-04 | [45142](https://github.com/airbytehq/airbyte/pull/45142) | Fix incorrect datetimeoffset format in cursor state. | | 4.1.10 | 2024-08-27 | [44759](https://github.com/airbytehq/airbyte/pull/44759) | Improve null safety in parsing debezium change events. | | 4.1.9 | 2024-08-27 | [44841](https://github.com/airbytehq/airbyte/pull/44841) | Adopt latest CDK. | | 4.1.8 | 2024-08-08 | [43410](https://github.com/airbytehq/airbyte/pull/43410) | Adopt latest CDK. | From 3b79bd70c02c8f66684df8f0e893d41cec6caf0f Mon Sep 17 00:00:00 2001 From: Augustin Date: Thu, 5 Sep 2024 12:43:46 +0200 Subject: [PATCH 28/51] metadata-service[orchestrator]: fix missing isReleaseCandidate field (#45154) --- .../models/generated/ConnectorRegistryDestinationDefinition.py | 1 + .../models/generated/ConnectorRegistryReleases.py | 1 + .../models/generated/ConnectorRegistrySourceDefinition.py | 1 + .../metadata_service/models/generated/ConnectorRegistryV0.py | 1 + .../metadata_service/models/src/ConnectorRegistryReleases.yaml | 3 +++ airbyte-ci/connectors/metadata_service/lib/pyproject.toml | 2 +- .../orchestrator/orchestrator/assets/registry_entry.py | 1 + .../connectors/metadata_service/orchestrator/poetry.lock | 2 +- .../connectors/metadata_service/orchestrator/pyproject.toml | 2 +- 9 files changed, 11 insertions(+), 3 deletions(-) diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py index 92e64e1c11c4..009a7f225018 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryDestinationDefinition.py @@ -241,6 +241,7 @@ class ConnectorRegistryReleases(BaseModel): class Config: extra = Extra.forbid + isReleaseCandidate: Optional[bool] = Field(None, description="Whether the current version is a release candidate.") releaseCandidates: Optional[ConnectorReleaseCandidates] = None rolloutConfiguration: Optional[RolloutConfiguration] = None breakingChanges: Optional[ConnectorBreakingChanges] = None diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py index 80c930e04109..21696bf0158c 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryReleases.py @@ -202,6 +202,7 @@ class ConnectorRegistryReleases(BaseModel): class Config: extra = Extra.forbid + isReleaseCandidate: Optional[bool] = Field(None, description="Whether the current version is a release candidate.") releaseCandidates: Optional[ConnectorReleaseCandidates] = None rolloutConfiguration: Optional[RolloutConfiguration] = None breakingChanges: Optional[ConnectorBreakingChanges] = None diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py index be46a26c7213..4b0124284fd6 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistrySourceDefinition.py @@ -238,6 +238,7 @@ class ConnectorRegistryReleases(BaseModel): class Config: extra = Extra.forbid + isReleaseCandidate: Optional[bool] = Field(None, description="Whether the current version is a release candidate.") releaseCandidates: Optional[ConnectorReleaseCandidates] = None rolloutConfiguration: Optional[RolloutConfiguration] = None breakingChanges: Optional[ConnectorBreakingChanges] = None diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py index 739f395e33d2..715bbc0dfe62 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/generated/ConnectorRegistryV0.py @@ -246,6 +246,7 @@ class ConnectorRegistryReleases(BaseModel): class Config: extra = Extra.forbid + isReleaseCandidate: Optional[bool] = Field(None, description="Whether the current version is a release candidate.") releaseCandidates: Optional[ConnectorReleaseCandidates] = None rolloutConfiguration: Optional[RolloutConfiguration] = None breakingChanges: Optional[ConnectorBreakingChanges] = None diff --git a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml index c219572fb393..51a9436b26de 100644 --- a/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml +++ b/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src/ConnectorRegistryReleases.yaml @@ -6,6 +6,9 @@ description: Contains information about different types of releases for a connec type: object additionalProperties: false properties: + isReleaseCandidate: + type: boolean + description: Whether the current version is a release candidate. releaseCandidates: $ref: "#/definitions/ConnectorReleaseCandidates" rolloutConfiguration: diff --git a/airbyte-ci/connectors/metadata_service/lib/pyproject.toml b/airbyte-ci/connectors/metadata_service/lib/pyproject.toml index bbdeadf96d2d..839b6dfd9c88 100644 --- a/airbyte-ci/connectors/metadata_service/lib/pyproject.toml +++ b/airbyte-ci/connectors/metadata_service/lib/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metadata-service" -version = "0.14.0" +version = "0.14.1" description = "" authors = ["Ben Church "] readme = "README.md" diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py index 77f821b6d3e0..7c9a22a0be34 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/registry_entry.py @@ -113,6 +113,7 @@ def apply_connector_releases(metadata: dict) -> Optional[pd.DataFrame]: if metadata.get("releases", {}).get("rolloutConfiguration"): final_registry_releases["rolloutConfiguration"] = metadata["releases"]["rolloutConfiguration"] + final_registry_releases["isReleaseCandidate"] = metadata.get("releases", {}).get("isReleaseCandidate", False) return final_registry_releases diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock b/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock index 65b4c30a2a86..f5a95ec53cf8 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock +++ b/airbyte-ci/connectors/metadata_service/orchestrator/poetry.lock @@ -1747,7 +1747,7 @@ files = [ [[package]] name = "metadata-service" -version = "0.14.0" +version = "0.14.1" description = "" optional = false python-versions = "^3.9" diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml index b96544b70d21..a8957712a764 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml +++ b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "orchestrator" -version = "0.5.0" +version = "0.5.1" description = "" authors = ["Ben Church "] readme = "README.md" From dd2d4435720055352709e9bb9f65877d746a7a0e Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Thu, 5 Sep 2024 06:44:44 -0700 Subject: [PATCH 29/51] gradle: remove deps.toml (#45160) --- .../airbyte-cdk/db-destinations/build.gradle | 4 +- airbyte-ci/connectors/pipelines/README.md | 1 + .../pipelines/airbyte_ci/steps/gradle.py | 1 - .../connectors/pipelines/pyproject.toml | 2 +- .../build.gradle | 4 +- .../destination-clickhouse/build.gradle | 4 +- .../build.gradle | 4 +- .../destination-elasticsearch/build.gradle | 4 +- .../destination-iceberg/build.gradle | 4 +- .../connectors/destination-kafka/build.gradle | 2 +- .../build.gradle | 2 +- .../connectors/destination-mssql/build.gradle | 2 +- .../build.gradle | 2 +- .../connectors/destination-mysql/build.gradle | 4 +- .../build.gradle | 2 +- .../destination-oracle/build.gradle | 2 +- .../connectors/destination-redis/build.gradle | 2 +- .../destination-s3-glue/build.gradle | 4 +- deps.toml | 121 ------------------ settings.gradle | 6 - 20 files changed, 25 insertions(+), 152 deletions(-) delete mode 100644 deps.toml diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/build.gradle b/airbyte-cdk/java/airbyte-cdk/db-destinations/build.gradle index 34e0eed3f43d..dae007abfb24 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/build.gradle +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/build.gradle @@ -10,8 +10,8 @@ dependencies { implementation 'io.aesy:datasize:1.0.0' - testFixturesCompileOnly libs.lombok - testFixturesAnnotationProcessor libs.lombok + testFixturesCompileOnly 'org.projectlombok:lombok:1.18.30' + testFixturesAnnotationProcessor 'org.projectlombok:lombok:1.18.30' testFixturesImplementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies') testFixturesImplementation testFixtures(project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-dependencies')) testFixturesImplementation project(':airbyte-cdk:java:airbyte-cdk:airbyte-cdk-core') diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 8cb2d86da43e..effc227180c9 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -843,6 +843,7 @@ airbyte-ci connectors --language=low-code migrate-to-manifest-only | Version | PR | Description | | ------- | ---------------------------------------------------------- |------------------------------------------------------------------------------------------------------------------------------| +| 4.35.1 | [#45160](https://github.com/airbytehq/airbyte/pull/45160) | Remove deps.toml dependency for java connectors. | | 4.35.0 | [#44879](https://github.com/airbytehq/airbyte/pull/44879) | Mount `components.py` when building manifest-only connector image | | 4.34.2 | [#44786](https://github.com/airbytehq/airbyte/pull/44786) | Pre-emptively skip archived connectors when searching for modified files | | 4.34.1 | [#44557](https://github.com/airbytehq/airbyte/pull/44557) | Conditionally propagate parameters in manifest-only migration | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py index 65b06060cc84..5b5d897d4d63 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py @@ -83,7 +83,6 @@ async def _run(self, *args: Any, **kwargs: Any) -> StepResult: ".root", ".env", "build.gradle", - "deps.toml", "gradle.properties", "gradle", "gradlew", diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index a349ad9d1329..5ad7203d1b35 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.35.0" +version = "4.35.1" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle index 584fd3a0dc72..79554f9c8194 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/build.gradle @@ -29,8 +29,8 @@ dependencies { implementation 'com.clickhouse:clickhouse-jdbc:0.3.2-patch10:all' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - testImplementation libs.testcontainers.clickhouse + testImplementation 'org.testcontainers:clickhouse:1.19.0' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - integrationTestJavaImplementation libs.testcontainers.clickhouse + integrationTestJavaImplementation 'org.testcontainers:clickhouse:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-clickhouse/build.gradle b/airbyte-integrations/connectors/destination-clickhouse/build.gradle index 25cb081d4263..9bbd131e0a06 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/destination-clickhouse/build.gradle @@ -28,8 +28,8 @@ dependencies { implementation 'com.clickhouse:clickhouse-jdbc:0.3.2-patch10:all' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - testImplementation libs.testcontainers.clickhouse + testImplementation 'org.testcontainers:clickhouse:1.19.0' // https://mvnrepository.com/artifact/org.testcontainers/clickhouse - integrationTestJavaImplementation libs.testcontainers.clickhouse + integrationTestJavaImplementation 'org.testcontainers:clickhouse:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle index 6cd2f88febbe..564af8c86d15 100644 --- a/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-elasticsearch-strict-encrypt/build.gradle @@ -40,6 +40,6 @@ dependencies { // MIT // https://www.testcontainers.org/ - testImplementation libs.testcontainers.elasticsearch - integrationTestJavaImplementation libs.testcontainers.elasticsearch + testImplementation 'org.testcontainers:elasticsearch:1.19.0' + integrationTestJavaImplementation 'org.testcontainers:elasticsearch:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-elasticsearch/build.gradle b/airbyte-integrations/connectors/destination-elasticsearch/build.gradle index 52c7536993c1..1cc4d534b085 100644 --- a/airbyte-integrations/connectors/destination-elasticsearch/build.gradle +++ b/airbyte-integrations/connectors/destination-elasticsearch/build.gradle @@ -39,6 +39,6 @@ dependencies { // MIT // https://www.testcontainers.org/ - testImplementation libs.testcontainers.elasticsearch - integrationTestJavaImplementation libs.testcontainers.elasticsearch + testImplementation 'org.testcontainers:elasticsearch:1.19.0' + integrationTestJavaImplementation 'org.testcontainers:elasticsearch:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-iceberg/build.gradle b/airbyte-integrations/connectors/destination-iceberg/build.gradle index 628a123b7aed..bcaa4f9a6062 100644 --- a/airbyte-integrations/connectors/destination-iceberg/build.gradle +++ b/airbyte-integrations/connectors/destination-iceberg/build.gradle @@ -55,8 +55,8 @@ dependencies { } } - testImplementation libs.testcontainers.postgresql - integrationTestJavaImplementation libs.testcontainers.postgresql + testImplementation 'org.testcontainers:postgresql:1.19.0' + integrationTestJavaImplementation 'org.testcontainers:postgresql:1.19.0' testImplementation 'org.mockito:mockito-inline:4.7.0' } diff --git a/airbyte-integrations/connectors/destination-kafka/build.gradle b/airbyte-integrations/connectors/destination-kafka/build.gradle index 69da18f35960..c26504dccb74 100644 --- a/airbyte-integrations/connectors/destination-kafka/build.gradle +++ b/airbyte-integrations/connectors/destination-kafka/build.gradle @@ -28,5 +28,5 @@ dependencies { implementation 'org.apache.kafka:kafka-clients:2.8.0' implementation 'org.apache.kafka:connect-json:2.8.0' - integrationTestJavaImplementation libs.testcontainers.kafka + integrationTestJavaImplementation 'org.testcontainers:kafka:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle index 34a4d34785fd..1ad0c0f7f7a9 100644 --- a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/build.gradle @@ -34,6 +34,6 @@ dependencies { implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.mssqlserver + testImplementation 'org.testcontainers:mssqlserver:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-mssql/build.gradle b/airbyte-integrations/connectors/destination-mssql/build.gradle index 8b35d8e7f474..9509d3e00c49 100644 --- a/airbyte-integrations/connectors/destination-mssql/build.gradle +++ b/airbyte-integrations/connectors/destination-mssql/build.gradle @@ -33,5 +33,5 @@ dependencies { implementation 'com.microsoft.sqlserver:mssql-jdbc:8.4.1.jre14' testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.mssqlserver + testImplementation 'org.testcontainers:mssqlserver:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle index 146026800170..b0b6e4cdcb99 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/build.gradle @@ -29,5 +29,5 @@ dependencies { implementation 'mysql:mysql-connector-java:8.0.22' integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-mysql') - integrationTestJavaImplementation libs.testcontainers.mysql + integrationTestJavaImplementation 'org.testcontainers:mysql:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-mysql/build.gradle b/airbyte-integrations/connectors/destination-mysql/build.gradle index 4b907bb16ab9..ae0268e774ff 100644 --- a/airbyte-integrations/connectors/destination-mysql/build.gradle +++ b/airbyte-integrations/connectors/destination-mysql/build.gradle @@ -27,6 +27,6 @@ application { dependencies { implementation 'mysql:mysql-connector-java:8.0.22' - integrationTestJavaImplementation libs.testcontainers.mysql - testFixturesApi libs.testcontainers.mysql + integrationTestJavaImplementation 'org.testcontainers:mysql:1.19.0' + testFixturesApi 'org.testcontainers:mysql:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle index c14752b64f8f..7da6a40ff771 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/build.gradle @@ -33,7 +33,7 @@ dependencies { implementation "com.oracle.database.jdbc:ojdbc8-production:19.7.0.0" testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.oracle.xe + testImplementation 'org.testcontainers:oracle-xe:1.19.0' integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-oracle') } diff --git a/airbyte-integrations/connectors/destination-oracle/build.gradle b/airbyte-integrations/connectors/destination-oracle/build.gradle index 861a1174ff4f..93cdf2671885 100644 --- a/airbyte-integrations/connectors/destination-oracle/build.gradle +++ b/airbyte-integrations/connectors/destination-oracle/build.gradle @@ -32,5 +32,5 @@ dependencies { implementation "com.oracle.database.jdbc:ojdbc8-production:19.7.0.0" testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation libs.testcontainers.oracle.xe + testImplementation 'org.testcontainers:oracle-xe:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-redis/build.gradle b/airbyte-integrations/connectors/destination-redis/build.gradle index 83cf3a207e87..6b02197a38e5 100644 --- a/airbyte-integrations/connectors/destination-redis/build.gradle +++ b/airbyte-integrations/connectors/destination-redis/build.gradle @@ -37,5 +37,5 @@ dependencies { // https://mvnrepository.com/artifact/org.assertj/assertj-core testImplementation "org.assertj:assertj-core:${assertVersion}" // https://mvnrepository.com/artifact/org.testcontainers/testcontainers - testImplementation libs.testcontainers + testImplementation 'org.testcontainers:testcontainers:1.19.0' } diff --git a/airbyte-integrations/connectors/destination-s3-glue/build.gradle b/airbyte-integrations/connectors/destination-s3-glue/build.gradle index 23f96e515697..d1f83e0ee510 100644 --- a/airbyte-integrations/connectors/destination-s3-glue/build.gradle +++ b/airbyte-integrations/connectors/destination-s3-glue/build.gradle @@ -27,6 +27,6 @@ dependencies { // https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-glue implementation 'com.amazonaws:aws-java-sdk-glue:1.12.334' - implementation libs.aws.java.sdk.s3 - implementation libs.s3 + implementation 'com.amazonaws:aws-java-sdk-s3:1.12.6' + implementation 'software.amazon.awssdk:s3:2.20.20' } diff --git a/deps.toml b/deps.toml deleted file mode 100644 index 52702d874d12..000000000000 --- a/deps.toml +++ /dev/null @@ -1,121 +0,0 @@ -[versions] -airbyte-protocol = "0.5.0" -commons_io = "2.7" -testcontainers = "1.19.0" -datadog-version = "0.111.0" -fasterxml_version = "2.15.2" -flyway = "7.14.0" -glassfish_version = "2.31" -hikaricp = "5.0.1" -jmh = "1.36" -jooq = "3.13.4" -junit-bom = "5.10.1" -kotlin = "2.0.0" -log4j = "2.21.1" -lombok = "1.18.30" -postgresql = "42.7.3" -reactor = "3.5.2" -segment = "2.1.1" -slf4j = "2.0.9" -temporal = "1.17.0" -debezium = "2.4.0.Final" -mockito-version = "5.11.0" - -[libraries] -airbyte-protocol = { module = "io.airbyte.airbyte-protocol:protocol-models", version.ref = "airbyte-protocol" } -apache-commons = { module = "org.apache.commons:commons-compress", version = "1.20" } -apache-commons-lang = { module = "org.apache.commons:commons-lang3", version = "3.11" } -appender-log4j2 = { module = "com.therealvan:appender-log4j2", version = "3.6.0" } -assertj-core = { module = "org.assertj:assertj-core", version = "3.21.0" } -aws-java-sdk-s3 = { module = "com.amazonaws:aws-java-sdk-s3", version = "1.12.6" } -commons-io = { module = "commons-io:commons-io", version.ref = "commons_io" } -testcontainers = { module = "org.testcontainers:testcontainers", version.ref = "testcontainers" } -testcontainers-cassandra = { module = "org.testcontainers:cassandra", version.ref = "testcontainers" } -testcontainers-clickhouse = { module = "org.testcontainers:clickhouse", version.ref = "testcontainers" } -testcontainers-cockroachdb = { module = "org.testcontainers:cockroachdb", version.ref = "testcontainers" } -testcontainers-db2 = { module = "org.testcontainers:db2", version.ref = "testcontainers" } -testcontainers-elasticsearch = { module = "org.testcontainers:elasticsearch", version.ref = "testcontainers" } -testcontainers-jdbc = { module = "org.testcontainers:jdbc", version.ref = "testcontainers" } -testcontainers-kafka = { module = "org.testcontainers:kafka", version.ref = "testcontainers" } -testcontainers-mariadb = { module = "org.testcontainers:mariadb", version.ref = "testcontainers" } -testcontainers-mongodb = { module = "org.testcontainers:mongodb", version.ref = "testcontainers" } -testcontainers-mssqlserver = { module = "org.testcontainers:mssqlserver", version.ref = "testcontainers" } -testcontainers-mysql = { module = "org.testcontainers:mysql", version.ref = "testcontainers" } -testcontainers-oracle-xe = { module = "org.testcontainers:oracle-xe", version.ref = "testcontainers" } -testcontainers-postgresql = { module = "org.testcontainers:postgresql", version.ref = "testcontainers" } -testcontainers-pulsar = { module = "org.testcontainers:pulsar", version.ref = "testcontainers" } -testcontainers-scylla = { module = "org.testcontainers:testcontainers", version.ref = "testcontainers" } -testcontainers-tidb = { module = "org.testcontainers:testcontainers", version.ref = "testcontainers" } -testcontainers-tidb-source = { module = "org.testcontainers:tidb", version.ref = "testcontainers" } -datadog-trace-api = { module = "com.datadoghq:dd-trace-api", version.ref = "datadog-version" } -datadog-trace-ot = { module = "com.datadoghq:dd-trace-ot", version.ref = "datadog-version" } -fasterxml = { module = "com.fasterxml.jackson:jackson-bom", version.ref = "fasterxml_version" } -findsecbugs-plugin = { module = "com.h3xstream.findsecbugs:findsecbugs-plugin", version = "1.12.0" } -flyway-core = { module = "org.flywaydb:flyway-core", version.ref = "flyway" } -glassfish = { module = "org.glassfish.jersey:jackson-bom", version.ref = "glassfish_version" } -google-cloud-storage = { module = "com.google.cloud:google-cloud-storage", version = "2.17.2" } -guava = { module = "com.google.guava:guava", version = "31.1-jre" } -hikaricp = { module = "com.zaxxer:HikariCP", version.ref = "hikaricp" } -jackson-annotations = { module = "com.fasterxml.jackson.core:jackson-annotations", version.ref = "fasterxml_version" } -jackson-databind = { module = "com.fasterxml.jackson.core:jackson-databind", version.ref = "fasterxml_version" } -jackson-dataformat = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml", version.ref = "fasterxml_version" } -jackson-datatype = { module = "com.fasterxml.jackson.datatype:jackson-datatype-jsr310", version.ref = "fasterxml_version" } -jackson-kotlin = { module = "com.fasterxml.jackson.module:jackson-module-kotlin", version.ref = "fasterxml_version" } -java-dogstatsd-client = { module = "com.datadoghq:java-dogstatsd-client", version = "4.1.0" } -java-faker = { module = "com.github.javafaker:javafaker", version = "1.0.2" } -javax-databind = { module = "javax.xml.bind:jaxb-api", version = "2.4.0-b180830.0359" } -jcl-over-slf4j = { module = "org.slf4j:jcl-over-slf4j", version.ref = "slf4j" } -jmh-core = { module = "org.openjdk.jmh:jmh-core", version.ref = "jmh" } -jmh-annotations = { module = "org.openjdk.jmh:jmh-generator-annprocess", version.ref = "jmh" } -jooq = { module = "org.jooq:jooq", version.ref = "jooq" } -jooq-codegen = { module = "org.jooq:jooq-codegen", version.ref = "jooq" } -jooq-meta = { module = "org.jooq:jooq-meta", version.ref = "jooq" } -jul-to-slf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" } -junit-jupiter-system-stubs = { module = "uk.org.webcompere:system-stubs-jupiter", version = "2.0.1" } -kotlin-logging = { module = "io.github.oshai:kotlin-logging-jvm", version = "5.1.0" } -kotlinx-cli = { module = "org.jetbrains.kotlinx:kotlinx-cli", version = "0.3.5" } -kotlinx-cli-jvm = { module = "org.jetbrains.kotlinx:kotlinx-cli-jvm", version = "0.3.5" } -launchdarkly = { module = "com.launchdarkly:launchdarkly-java-server-sdk", version = "7.2.6" } -log4j-api = { module = "org.apache.logging.log4j:log4j-api", version.ref = "log4j" } -log4j-core = { module = "org.apache.logging.log4j:log4j-core", version.ref = "log4j" } -log4j-slf4j2-impl = { module = "org.apache.logging.log4j:log4j-slf4j2-impl", version.ref = "log4j" } -log4j-slf4j-impl = { module = "org.apache.logging.log4j:log4j-slf4j-impl", version.ref = "log4j" } -log4j-over-slf4j = { module = "org.slf4j:log4j-over-slf4j", version.ref = "slf4j" } -log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "log4j" } -lombok = { module = "org.projectlombok:lombok", version.ref = "lombok" } -micrometer-statsd = { module = "io.micrometer:micrometer-registry-statsd", version = "1.9.3" } -mockk = { module = "io.mockk:mockk", version = "1.13.3" } -mongo-driver-sync = { module = "org.mongodb:mongodb-driver-sync", version = "4.10.2" } -otel-bom = { module = "io.opentelemetry:opentelemetry-bom", version = "1.14.0" } -otel-sdk = { module = "io.opentelemetry:opentelemetry-sdk-metrics", version = "1.14.0" } -otel-sdk-testing = { module = "io.opentelemetry:opentelemetry-sdk-metrics-testing", version = "1.13.0-alpha" } -otel-semconv = { module = "io.opentelemetry:opentelemetry-semconv", version = "1.14.0-alpha" } -postgresql = { module = "org.postgresql:postgresql", version.ref = "postgresql" } -quartz-scheduler = { module = "org.quartz-scheduler:quartz", version = "2.3.2" } -reactor-core = { module = "io.projectreactor:reactor-core", version.ref = "reactor" } -reactor-test = { module = "io.projectreactor:reactor-test", version.ref = "reactor" } -s3 = { module = "software.amazon.awssdk:s3", version = "2.20.20" } -segment-java-analytics = { module = "com.segment.analytics.java:analytics", version.ref = "segment" } -slf4j-api = { module = "org.slf4j:slf4j-api", version.ref = "slf4j" } -slf4j-simple = { module = "org.slf4j:slf4j-simple", version.ref = "slf4j" } -spotbugs-annotations = { module = "com.github.spotbugs:spotbugs-annotations", version = "4.7.3" } -temporal-sdk = { module = "io.temporal:temporal-sdk", version.ref = "temporal" } -temporal-serviceclient = { module = "io.temporal:temporal-serviceclient", version.ref = "temporal" } -temporal-testing = { module = "io.temporal:temporal-testing", version.ref = "temporal" } -debezium-api = { module = "io.debezium:debezium-api", version.ref = "debezium"} -debezium-embedded = { module = "io.debezium:debezium-embedded", version.ref = "debezium"} -debezium-sqlserver = { module = "io.debezium:debezium-connector-sqlserver", version.ref = "debezium"} -debezium-mongodb = { module = "io.debezium:debezium-connector-mongodb", version.ref = "debezium" } -debezium-mysql = { module = "io.debezium:debezium-connector-mysql", version.ref = "debezium"} -debezium-postgres = { module = "io.debezium:debezium-connector-postgres", version.ref = "debezium"} - -[bundles] -apache = ["apache-commons", "apache-commons-lang"] -datadog = ["datadog-trace-api", "datadog-trace-ot"] -jackson = ["jackson-databind", "jackson-annotations", "jackson-dataformat", "jackson-datatype"] -log4j = ["log4j-api", "log4j-core", "log4j-slf4j-impl", "log4j-slf4j2-impl", "log4j-web"] -slf4j = ["jul-to-slf4j", "jcl-over-slf4j", "log4j-over-slf4j"] -temporal = ["temporal-sdk", "temporal-serviceclient"] - -[plugins] -kotlin-jvm = { id = "org.jetbrains.kotlin.jvm", version.ref = "kotlin" } diff --git a/settings.gradle b/settings.gradle index 2a4e5e292fd6..f89f86c38e7d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -102,12 +102,6 @@ dependencyResolutionManagement { } } } - - versionCatalogs { - libs { - from(files("deps.toml")) - } - } } gradleEnterprise { From 155d58d0fba98abf9c0c5fd52f14ca49f194a316 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Thu, 5 Sep 2024 19:32:22 +0200 Subject: [PATCH 30/51] docs(airbyte-cdk): update migration guide (#45153) Signed-off-by: Artem Inzhyyants --- airbyte-cdk/python/cdk-migrations.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/airbyte-cdk/python/cdk-migrations.md b/airbyte-cdk/python/cdk-migrations.md index 02ebf2e751c4..05f72009dddf 100644 --- a/airbyte-cdk/python/cdk-migrations.md +++ b/airbyte-cdk/python/cdk-migrations.md @@ -9,11 +9,23 @@ The changes to Airbyte CDK itself are backwards-compatible, but some changes are - uses the `airbyte_protocol` models directly, or `airbyte_cdk.models`, which points to `airbyte_protocol` models - uses third-party libraries, such as `pandas`, to read data from sources, which output non-native Python objects that cannot be serialized by the [orjson](https://github.com/ijl/orjson) library. +> [!NOTE] +> All Serializers have omit_none=True parameter that is applied recursively. Thus, all None values are excluded from output. +> This is expected behaviour and does not break anything in protocol. + ### Updating direct usage of Pydantic based Airbyte Protocol Models If the connector uses Pydantic based Airbyte Protocol Models, the code will need to be updated to reflect the changes `pydantic`. It is recommended to import protocol classes not directly by `import airbyte_protocol` statement, but from `airbyte_cdk.models` package. -It is also recommended to use `Serializers` from `airbyte_cdk.models` to manipulate the data or convert to/from JSON. +It is also recommended to use *-`Serializer` from `airbyte_cdk.models` to manipulate the data or convert to/from JSON, e.g. +```python3 +# Before (pydantic model message serialization) +AirbyteMessage().model_dump_json() + +# After (dataclass model serialization) +orjson.dumps(AirbyteMessageSerializer.dump(AirbyteMessage())).decode() + +``` ### Updating third-party libraries From 011f40b782f801a07f8f516acae449f05b7d8911 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Thu, 5 Sep 2024 10:55:45 -0700 Subject: [PATCH 31/51] Revert "fix: allow review requirements check on forks (#44404)" (#45175) --- .github/workflows/community_ci.yml | 54 ------------------------------ 1 file changed, 54 deletions(-) diff --git a/.github/workflows/community_ci.yml b/.github/workflows/community_ci.yml index 64113338b726..a1c2827010bd 100644 --- a/.github/workflows/community_ci.yml +++ b/.github/workflows/community_ci.yml @@ -76,60 +76,6 @@ jobs: sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }} subcommand: "format check all" is_fork: "true" - - check-review-requirements: - name: Check if a review is required from Connector teams on fork - if: github.event.pull_request.head.repo.fork == true - environment: community-ci-auto - runs-on: community-tooling-test-small - needs: fail_on_protected_path_changes - timeout-minutes: 10 - env: - MAIN_BRANCH_NAME: "master" - permissions: - pull-requests: write - steps: - # This checkouts a fork which can contain untrusted code - # It's deemed safe as the review required check is not executing any checked out code - - name: Checkout fork - uses: actions/checkout@v4 - with: - repository: ${{ github.event.pull_request.head.repo.full_name }} - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: 1 - # This will sync the .github folder of the main repo with the fork - # This allows us to use up to date actions and CI logic from the main repo - - name: Pull .github folder from main repository - id: pull_github_folder - run: | - git remote add main https://github.com/airbytehq/airbyte.git - git fetch main ${MAIN_BRANCH_NAME} - git checkout main/${MAIN_BRANCH_NAME} -- .github - git checkout main/${MAIN_BRANCH_NAME} -- airbyte-ci - - name: Install Python - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - name: Install ci-connector-ops package - run: | - pip install pipx - pipx ensurepath - pipx install airbyte-ci/connectors/connector_ops - - name: Write review requirements file - id: write-review-requirements-file - run: write-review-requirements-file >> $GITHUB_OUTPUT - - name: Get mandatory reviewers - id: get-mandatory-reviewers - run: print-mandatory-reviewers >> $GITHUB_OUTPUT - - name: Check if the review requirements are met - if: steps.write-review-requirements-file.outputs.CREATED_REQUIREMENTS_FILE == 'true' - uses: Automattic/action-required-review@v3 - with: - status: ${{ steps.get-mandatory-reviewers.outputs.MANDATORY_REVIEWERS }} - token: ${{ secrets.OCTAVIA_4_ROOT_ACCESS }} - request-reviews: true - requirements-file: .github/connector_org_review_requirements.yaml - connectors_early_ci: name: Run connectors early CI on fork if: github.event.pull_request.head.repo.fork == true From f1812e3e3cbce8147aa4934a00b9e1b7c38ad33a Mon Sep 17 00:00:00 2001 From: Patrick Nilan Date: Thu, 5 Sep 2024 11:10:18 -0700 Subject: [PATCH 32/51] (source-tiktok-marketing) - Upgrade CDK 4.6.1 (#44910) --- .../source-tiktok-marketing/metadata.yaml | 2 +- .../source-tiktok-marketing/poetry.lock | 173 +++++++++++++++++- .../source-tiktok-marketing/pyproject.toml | 6 +- docs/integrations/sources/tiktok-marketing.md | 1 + 4 files changed, 171 insertions(+), 11 deletions(-) diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml b/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml index d8654d5a8c87..40fe258309a9 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 - dockerImageTag: 4.2.5 + dockerImageTag: 4.3.0 dockerRepository: airbyte/source-tiktok-marketing documentationUrl: https://docs.airbyte.com/integrations/sources/tiktok-marketing githubIssueLabel: source-tiktok-marketing diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock b/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock index 1f0cf1fbfaa1..8222b1c279a3 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock +++ b/airbyte-integrations/connectors/source-tiktok-marketing/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "3.9.6" +version = "4.6.1" description = "A framework for writing Airbyte Connectors." optional = false -python-versions = "<4.0,>=3.9" +python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-3.9.6-py3-none-any.whl", hash = "sha256:4d4cb095926249247d87437c8b4dd762df52bb8e24c76e05f166982842f6876d"}, - {file = "airbyte_cdk-3.9.6.tar.gz", hash = "sha256:9e31eadeb32b63c1b4b9fbcb26334239ca83c314645b4a164e01c95e9a8ab1f5"}, + {file = "airbyte_cdk-4.6.1-py3-none-any.whl", hash = "sha256:b0cb4a145e356327a922e74ae619637bf491a4c5b9fb4c88cefddbc246f28c67"}, + {file = "airbyte_cdk-4.6.1.tar.gz", hash = "sha256:64827fd277f6fccef220594caa37364c173bf093288640a57f36899c2e194094"}, ] [package.dependencies] @@ -24,6 +24,7 @@ Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" langchain_core = "0.1.42" +nltk = "3.8.1" pendulum = "<3.0.0" pydantic = ">=2.7,<3.0" pyjwt = ">=2.8.0,<3.0.0" @@ -36,7 +37,7 @@ requests_cache = "*" wcmatch = "8.4" [package.extras] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pandas (==2.2.0)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "python-calamine (==0.2.3)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -364,6 +365,20 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -591,6 +606,17 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + [[package]] name = "jsonpatch" version = "1.33" @@ -756,6 +782,31 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + [[package]] name = "orjson" version = "3.10.7" @@ -1252,6 +1303,94 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "regex" +version = "2024.7.24" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +files = [ + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, + {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, + {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, + {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, + {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, + {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, + {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, + {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, + {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, + {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, + {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, + {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, +] + [[package]] name = "requests" version = "2.32.3" @@ -1400,6 +1539,26 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +[[package]] +name = "tqdm" +version = "4.66.5" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -1537,5 +1696,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = "^3.9,<3.12" -content-hash = "ef6d6b0a52ced7b5c6f6dffddf21e884885c9009a20910fc6e09908e1158c3b2" +python-versions = "^3.10,<3.12" +content-hash = "aa0150d34b519527cabd212067812c373436f15616d6bed58d845188937679cd" diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml b/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml index 6879182a9796..ee11504aaced 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-tiktok-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.2.5" +version = "4.3.0" name = "source-tiktok-marketing" description = "Source implementation for Tiktok Marketing." authors = [ "Airbyte ",] @@ -16,8 +16,8 @@ repository = "https://github.com/airbytehq/airbyte" include = "source_tiktok_marketing" [tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "^3" +python = "^3.10,<3.12" +airbyte-cdk = "^4.5.4" [tool.poetry.scripts] source-tiktok-marketing = "source_tiktok_marketing.run:run" diff --git a/docs/integrations/sources/tiktok-marketing.md b/docs/integrations/sources/tiktok-marketing.md index a7a90cafb7cd..42e043ddd4e3 100644 --- a/docs/integrations/sources/tiktok-marketing.md +++ b/docs/integrations/sources/tiktok-marketing.md @@ -138,6 +138,7 @@ The connector is restricted by [requests limitation](https://business-api.tiktok | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------------------------| +| 4.3.0 | 2024-09-03 | [44910](https://github.com/airbytehq/airbyte/pull/44910) | Migrate to CDK v4 | | 4.2.5 | 2024-08-31 | [44954](https://github.com/airbytehq/airbyte/pull/44954) | Update dependencies | | 4.2.4 | 2024-08-24 | [43783](https://github.com/airbytehq/airbyte/pull/43783) | Update dependencies | | 4.2.3 | 2024-08-19 | [44048](https://github.com/airbytehq/airbyte/pull/44048) | Fix include_deleted toggle | From e6d6b8e025cb0ca0780f6ec7a0e937f1d97228c2 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Thu, 5 Sep 2024 14:20:58 -0400 Subject: [PATCH 33/51] Source Looker: Enable latest version in Cloud registry (#45161) Co-authored-by: Octavia Squidington III --- .../connectors/source-looker/metadata.yaml | 26 ++- .../connectors/source-looker/poetry.lock | 150 +++++++++--------- .../connectors/source-looker/pyproject.toml | 2 +- docs/integrations/sources/looker.md | 45 +++--- 4 files changed, 110 insertions(+), 113 deletions(-) diff --git a/airbyte-integrations/connectors/source-looker/metadata.yaml b/airbyte-integrations/connectors/source-looker/metadata.yaml index a3814b152344..3431427422f0 100644 --- a/airbyte-integrations/connectors/source-looker/metadata.yaml +++ b/airbyte-integrations/connectors/source-looker/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: api connectorType: source definitionId: 00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c - dockerImageTag: 1.0.6 + dockerImageTag: 1.0.7 dockerRepository: airbyte/source-looker githubIssueLabel: source-looker icon: looker.svg @@ -14,7 +14,7 @@ data: packageName: airbyte-source-looker registryOverrides: cloud: - enabled: false + enabled: true oss: enabled: true releaseStage: alpha @@ -31,19 +31,15 @@ data: sl: 100 ql: 100 supportLevel: community - # Disable the acceptanceTests suite for now - # They are not passing - # No Airbyte cloud usage - # - # connectorTestSuitesOptions: - # - suite: unitTests - # - suite: acceptanceTests - # testSecrets: - # - name: SECRET_SOURCE-LOOKER__CREDS - # fileName: config.json - # secretStore: - # type: GSM - # alias: airbyte-connector-testing-secret-store + connectorTestSuitesOptions: + - suite: unitTests + - suite: acceptanceTests + testSecrets: + - name: SECRET_SOURCE-LOOKER__CREDS + fileName: config.json + secretStore: + type: GSM + alias: airbyte-connector-testing-secret-store connectorBuildOptions: baseImage: docker.io/airbyte/python-connector-base:2.0.0@sha256:c44839ba84406116e8ba68722a0f30e8f6e7056c726f447681bb9e9ece8bd916 metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-looker/poetry.lock b/airbyte-integrations/connectors/source-looker/poetry.lock index b186547fa495..49eef44fa5d3 100644 --- a/airbyte-integrations/connectors/source-looker/poetry.lock +++ b/airbyte-integrations/connectors/source-looker/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "airbyte-cdk" @@ -180,78 +180,78 @@ files = [ [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -689,13 +689,13 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langsmith" -version = "0.1.108" +version = "0.1.114" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.108-py3-none-any.whl", hash = "sha256:407f318b0989e33f2cd30bc2fbd443e4ddfa7c2a93de7f795fb6b119b015583c"}, - {file = "langsmith-0.1.108.tar.gz", hash = "sha256:42f603e2d5770ba36093951bdb29eaab22451cb12ab8c062340c722cf60d4cec"}, + {file = "langsmith-0.1.114-py3-none-any.whl", hash = "sha256:2b6b6b49ddb1cea75f465da107ddc21e60d3c7242813dcc0de90f914e4957249"}, + {file = "langsmith-0.1.114.tar.gz", hash = "sha256:1683e1505d034d1bf7c960067c1357fd0d294172dd20540f913093e4b86857a2"}, ] [package.dependencies] @@ -1445,13 +1445,13 @@ fixture = ["fixtures"] [[package]] name = "setuptools" -version = "74.0.0" +version = "74.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, + {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, + {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, ] [package.extras] diff --git a/airbyte-integrations/connectors/source-looker/pyproject.toml b/airbyte-integrations/connectors/source-looker/pyproject.toml index 828bebdfd306..f2af56a65421 100644 --- a/airbyte-integrations/connectors/source-looker/pyproject.toml +++ b/airbyte-integrations/connectors/source-looker/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.0.6" +version = "1.0.7" name = "source-looker" description = "Source implementation for looker." authors = [ "Airbyte ",] diff --git a/docs/integrations/sources/looker.md b/docs/integrations/sources/looker.md index 78db9526bf2c..96da012ffa32 100644 --- a/docs/integrations/sources/looker.md +++ b/docs/integrations/sources/looker.md @@ -85,27 +85,28 @@ Please read the "API3 Key" section in [Looker's information for users docs](http | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------- | -| 1.0.6 | 2024-08-31 | [45014](https://github.com/airbytehq/airbyte/pull/45014) | Update dependencies | -| 1.0.5 | 2024-08-24 | [44730](https://github.com/airbytehq/airbyte/pull/44730) | Update dependencies | -| 1.0.4 | 2024-08-17 | [44252](https://github.com/airbytehq/airbyte/pull/44252) | Update dependencies | -| 1.0.3 | 2024-08-12 | [43873](https://github.com/airbytehq/airbyte/pull/43873) | Update dependencies | -| 1.0.2 | 2024-08-10 | [43504](https://github.com/airbytehq/airbyte/pull/43504) | Update dependencies | -| 1.0.1 | 2024-08-03 | [40148](https://github.com/airbytehq/airbyte/pull/40148) | Update dependencies | -| 1.0.0 | 2024-07-23 | [37464](https://github.com/airbytehq/airbyte/pull/37464) | Migrate to LowCode | -| 0.2.12 | 2024-06-06 | [39191](https://github.com/airbytehq/airbyte/pull/39191) | [autopull] Upgrade base image to v1.2.2 | -| 0.2.11 | 2024-06-03 | [38914](https://github.com/airbytehq/airbyte/pull/38914) | Replace AirbyteLogger with logging.Logger | -| 0.2.10 | 2024-06-03 | [38914](https://github.com/airbytehq/airbyte/pull/38914) | Replace AirbyteLogger with logging.Logger | -| 0.2.9 | 2024-05-20 | [38396](https://github.com/airbytehq/airbyte/pull/38396) | [autopull] base image + poetry + up_to_date | -| 0.2.8 | 2022-12-07 | [20182](https://github.com/airbytehq/airbyte/pull/20182) | Fix schema transformation issue | -| 0.2.7 | 2022-01-24 | [9609](https://github.com/airbytehq/airbyte/pull/9609) | Migrate to native CDK and fixing of intergration tests. | -| 0.2.6 | 2021-12-07 | [8578](https://github.com/airbytehq/airbyte/pull/8578) | Update titles and descriptions. | -| 0.2.5 | 2021-10-27 | [7284](https://github.com/airbytehq/airbyte/pull/7284) | Migrate Looker source to CDK structure, add SAT testing. | -| 0.2.4 | 2021-06-25 | [3911](https://github.com/airbytehq/airbyte/pull/3911) | Add `run_look` endpoint. | -| 0.2.3 | 2021-06-22 | [3587](https://github.com/airbytehq/airbyte/pull/3587) | Add support for self-hosted instances. | -| 0.2.2 | 2021-06-09 | [3973](https://github.com/airbytehq/airbyte/pull/3973) | Add `AIRBYTE_ENTRYPOINT` for kubernetes support. | -| 0.2.1 | 2021-04-02 | [2726](https://github.com/airbytehq/airbyte/pull/2726) | Fix connector base versioning. | -| 0.2.0 | 2021-03-09 | [2238](https://github.com/airbytehq/airbyte/pull/2238) | Allow future / unknown properties in the protocol. | -| 0.1.1 | 2021-01-27 | [1857](https://github.com/airbytehq/airbyte/pull/1857) | Fix failed CI tests. | -| 0.1.0 | 2020-12-24 | [1441](https://github.com/airbytehq/airbyte/pull/1441) | Add looker connector. | +| 1.0.7 | 2024-09-05 | [45161](https://github.com/airbytehq/airbyte/pull/45161) | Enable connector in Cloud registry | +| 1.0.6 | 2024-08-31 | [45014](https://github.com/airbytehq/airbyte/pull/45014) | Update dependencies | +| 1.0.5 | 2024-08-24 | [44730](https://github.com/airbytehq/airbyte/pull/44730) | Update dependencies | +| 1.0.4 | 2024-08-17 | [44252](https://github.com/airbytehq/airbyte/pull/44252) | Update dependencies | +| 1.0.3 | 2024-08-12 | [43873](https://github.com/airbytehq/airbyte/pull/43873) | Update dependencies | +| 1.0.2 | 2024-08-10 | [43504](https://github.com/airbytehq/airbyte/pull/43504) | Update dependencies | +| 1.0.1 | 2024-08-03 | [40148](https://github.com/airbytehq/airbyte/pull/40148) | Update dependencies | +| 1.0.0 | 2024-07-23 | [37464](https://github.com/airbytehq/airbyte/pull/37464) | Migrate to LowCode | +| 0.2.12 | 2024-06-06 | [39191](https://github.com/airbytehq/airbyte/pull/39191) | [autopull] Upgrade base image to v1.2.2 | +| 0.2.11 | 2024-06-03 | [38914](https://github.com/airbytehq/airbyte/pull/38914) | Replace AirbyteLogger with logging.Logger | +| 0.2.10 | 2024-06-03 | [38914](https://github.com/airbytehq/airbyte/pull/38914) | Replace AirbyteLogger with logging.Logger | +| 0.2.9 | 2024-05-20 | [38396](https://github.com/airbytehq/airbyte/pull/38396) | [autopull] base image + poetry + up_to_date | +| 0.2.8 | 2022-12-07 | [20182](https://github.com/airbytehq/airbyte/pull/20182) | Fix schema transformation issue | +| 0.2.7 | 2022-01-24 | [9609](https://github.com/airbytehq/airbyte/pull/9609) | Migrate to native CDK and fixing of intergration tests. | +| 0.2.6 | 2021-12-07 | [8578](https://github.com/airbytehq/airbyte/pull/8578) | Update titles and descriptions. | +| 0.2.5 | 2021-10-27 | [7284](https://github.com/airbytehq/airbyte/pull/7284) | Migrate Looker source to CDK structure, add SAT testing. | +| 0.2.4 | 2021-06-25 | [3911](https://github.com/airbytehq/airbyte/pull/3911) | Add `run_look` endpoint. | +| 0.2.3 | 2021-06-22 | [3587](https://github.com/airbytehq/airbyte/pull/3587) | Add support for self-hosted instances. | +| 0.2.2 | 2021-06-09 | [3973](https://github.com/airbytehq/airbyte/pull/3973) | Add `AIRBYTE_ENTRYPOINT` for kubernetes support. | +| 0.2.1 | 2021-04-02 | [2726](https://github.com/airbytehq/airbyte/pull/2726) | Fix connector base versioning. | +| 0.2.0 | 2021-03-09 | [2238](https://github.com/airbytehq/airbyte/pull/2238) | Allow future / unknown properties in the protocol. | +| 0.1.1 | 2021-01-27 | [1857](https://github.com/airbytehq/airbyte/pull/1857) | Fix failed CI tests. | +| 0.1.0 | 2020-12-24 | [1441](https://github.com/airbytehq/airbyte/pull/1441) | Add looker connector. | From 6730a3b47803d9a1e4d0e0d8a3bfce4405cb5e23 Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Thu, 5 Sep 2024 14:38:03 -0700 Subject: [PATCH 34/51] destination-s3: don't reuse names of existing objects (#45143) --- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../src/main/resources/version.properties | 2 +- .../destination/s3/S3StorageOperations.kt | 75 +++++++++---------- .../parquet/JsonRecordParquetPreprocessor.kt | 6 +- .../destination/s3/S3StorageOperationsTest.kt | 33 +++++++- .../connectors/destination-s3/build.gradle | 2 +- .../connectors/destination-s3/metadata.yaml | 2 +- docs/integrations/destinations/s3.md | 3 +- 8 files changed, 76 insertions(+), 48 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index e55547ab4834..0b5c423cca46 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -174,6 +174,7 @@ corresponds to that version. | Version | Date | Pull Request | Subject | | :--------- | :--------- | :----------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 0.44.21 | 2024-09-04 | [\#45143](https://github.com/airbytehq/airbyte/pull/45143) | S3-destination: don't overwrite existing files, skip those file indexes instead | | 0.44.20 | 2024-08-30 | [\#44933](https://github.com/airbytehq/airbyte/pull/44933) | Avro/Parquet destinations: handle `{}` schemas inside objects/arrays | | 0.44.19 | 2024-08-20 | [\#44476](https://github.com/airbytehq/airbyte/pull/44476) | Increase Jackson message length limit to 100mb | | 0.44.18 | 2024-08-22 | [\#44505](https://github.com/airbytehq/airbyte/pull/44505) | Improve handling of incoming debezium change events | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index 940c3205c2ba..5590734237c5 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.44.20 +version=0.44.21 diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.kt index b92f79722bb9..894e53789973 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.kt +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperations.kt @@ -45,6 +45,7 @@ open class S3StorageOperations( private val s3FilenameTemplateManager: S3FilenameTemplateManager = S3FilenameTemplateManager() private val partCounts: ConcurrentMap = ConcurrentHashMap() + private val objectNameByPrefix: ConcurrentMap> = ConcurrentHashMap() override fun getBucketObjectPath( namespace: String?, @@ -167,6 +168,32 @@ open class S3StorageOperations( * @return the uploaded filename, which is different from the serialized buffer filename * */ + @VisibleForTesting + fun getFileName( + objectPath: String, + recordsData: SerializableBuffer, + ): String { + var fullObjectKey: String + do { + val partId: String = getPartId(objectPath) + val fileExtension: String = getExtension(recordsData.filename) + fullObjectKey = + if (!s3Config.fileNamePattern.isNullOrBlank()) { + s3FilenameTemplateManager.applyPatternToFilename( + S3FilenameTemplateParameterObject.builder() + .partId(partId) + .recordsData(recordsData) + .objectPath(objectPath) + .fileExtension(fileExtension) + .fileNamePattern(s3Config.fileNamePattern) + .build(), + ) + } else { + objectPath + partId + fileExtension + } + } while (objectNameByPrefix.getValue(objectPath).contains(fullObjectKey)) + return fullObjectKey + } @Throws(IOException::class) private fun loadDataIntoBucket( objectPath: String, @@ -175,22 +202,7 @@ open class S3StorageOperations( ): String { val partSize: Long = DEFAULT_PART_SIZE.toLong() val bucket: String? = s3Config.bucketName - val partId: String = getPartId(objectPath) - val fileExtension: String = getExtension(recordsData.filename) - val fullObjectKey: String = - if (!s3Config.fileNamePattern.isNullOrBlank()) { - s3FilenameTemplateManager.applyPatternToFilename( - S3FilenameTemplateParameterObject.builder() - .partId(partId) - .recordsData(recordsData) - .objectPath(objectPath) - .fileExtension(fileExtension) - .fileNamePattern(s3Config.fileNamePattern) - .build(), - ) - } else { - objectPath + partId + fileExtension - } + val fullObjectKey: String = getFileName(objectPath, recordsData) val metadata: MutableMap = HashMap() for (blobDecorator: BlobDecorator in blobDecorators) { blobDecorator.updateMetadata(metadata, getMetadataMapping()) @@ -263,31 +275,14 @@ open class S3StorageOperations( ) { AtomicInteger(0) } - - if (partCount.get() == 0) { - var objects: ObjectListing? - var objectCount = 0 - - val bucket: String? = s3Config.bucketName - objects = s3Client.listObjects(bucket, objectPath) - - if (objects != null) { - objectCount += objects.objectSummaries.size - while (objects != null && objects.nextMarker != null) { - objects = - s3Client.listObjects( - ListObjectsRequest() - .withBucketName(bucket) - .withPrefix(objectPath) - .withMarker(objects.nextMarker), - ) - if (objects != null) { - objectCount += objects.objectSummaries.size - } - } + objectNameByPrefix.computeIfAbsent( + objectPath, + ) { + var objectList: Set = setOf() + forObjectsByPage(objectPath) { objectSummaries -> + objectList = objectList + objectSummaries.map { it.key } } - - partCount.set(objectCount) + objectList } return partCount.getAndIncrement().toString() diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/JsonRecordParquetPreprocessor.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/JsonRecordParquetPreprocessor.kt index b5c27a64ddc4..63b89f92cb4a 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/JsonRecordParquetPreprocessor.kt +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/kotlin/io/airbyte/cdk/integrations/destination/s3/parquet/JsonRecordParquetPreprocessor.kt @@ -12,7 +12,7 @@ import io.airbyte.cdk.integrations.destination.s3.jsonschema.JsonRecordIdentityM import io.airbyte.commons.jackson.MoreMappers class JsonRecordParquetPreprocessor : JsonRecordIdentityMapper() { - private fun mapCommon(record: JsonNode?, matchingOption: ObjectNode): JsonNode? { + private fun mapCommon(record: JsonNode?, matchingOption: ObjectNode): ObjectNode { val newObj = MoreMappers.initMapper().createObjectNode() val propertyName = JsonSchemaParquetPreprocessor.typeFieldName(matchingOption) @@ -24,7 +24,7 @@ class JsonRecordParquetPreprocessor : JsonRecordIdentityMapper() { return newObj } - override fun mapUnion(record: JsonNode?, schema: ObjectNode): JsonNode? { + override fun mapUnion(record: JsonNode?, schema: ObjectNode): ObjectNode? { if (record == null || record.isNull) { return null } @@ -35,7 +35,7 @@ class JsonRecordParquetPreprocessor : JsonRecordIdentityMapper() { return mapCommon(record, matchingOption) } - override fun mapCombined(record: JsonNode?, schema: ObjectNode): JsonNode? { + override fun mapCombined(record: JsonNode?, schema: ObjectNode): ObjectNode? { if (record == null || record.isNull) { return null } diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.kt b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.kt index 750a312f1380..947f96533964 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.kt +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/kotlin/io/airbyte/cdk/integrations/destination/s3/S3StorageOperationsTest.kt @@ -10,6 +10,7 @@ import com.amazonaws.services.s3.model.ListObjectsRequest import com.amazonaws.services.s3.model.ObjectListing import com.amazonaws.services.s3.model.S3ObjectSummary import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer import io.airbyte.cdk.integrations.destination.s3.util.S3NameTransformer import java.util.concurrent.Executors import java.util.concurrent.TimeUnit @@ -23,6 +24,7 @@ import org.junit.jupiter.api.Test import org.mockito.ArgumentCaptor import org.mockito.ArgumentMatchers import org.mockito.Mockito +import org.mockito.kotlin.eq class S3StorageOperationsTest { @@ -31,7 +33,9 @@ class S3StorageOperationsTest { private const val FAKE_BUCKET_PATH = "fake-bucketPath" private const val NAMESPACE = "namespace" private const val STREAM_NAME = "stream_name1" - private const val OBJECT_TO_DELETE = "$NAMESPACE/$STREAM_NAME/2022_04_04_123456789_0.csv.gz" + private const val OBJECT_PREFIX = "$NAMESPACE/$STREAM_NAME/2022_04_04_123456789_" + private const val OBJECT_EXTENSION = ".csv.gz" + private const val OBJECT_TO_DELETE = "${OBJECT_PREFIX}1$OBJECT_EXTENSION" } private lateinit var s3Client: AmazonS3 @@ -74,6 +78,15 @@ class S3StorageOperationsTest { ), ) .thenReturn(results) + Mockito.`when`( + s3Client.listObjects( + eq(BUCKET_NAME), + ArgumentMatchers.any( + String::class.java, + ), + ), + ) + .thenReturn(results) val s3Config = S3DestinationConfig.create(BUCKET_NAME, FAKE_BUCKET_PATH, "fake-region") @@ -210,4 +223,22 @@ class S3StorageOperationsTest { assertEquals("1", s3StorageOperations.getPartId(FAKE_BUCKET_PATH)) assertEquals("0", s3StorageOperations.getPartId("other_path")) } + + @Test + fun testGetFileName() { + val recordsData = + Mockito.mock( + SerializableBuffer::class.java, + ) + Mockito.`when`(recordsData.filename).thenReturn(".csv.gz") + assertEquals( + OBJECT_PREFIX + 0 + OBJECT_EXTENSION, + s3StorageOperations.getFileName(OBJECT_PREFIX, recordsData) + ) + // 1 is skipped because it's already existing + assertEquals( + OBJECT_PREFIX + 2 + OBJECT_EXTENSION, + s3StorageOperations.getFileName(OBJECT_PREFIX, recordsData) + ) + } } diff --git a/airbyte-integrations/connectors/destination-s3/build.gradle b/airbyte-integrations/connectors/destination-s3/build.gradle index 67bfe4e1dd63..93b032e93b04 100644 --- a/airbyte-integrations/connectors/destination-s3/build.gradle +++ b/airbyte-integrations/connectors/destination-s3/build.gradle @@ -4,7 +4,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.44.20' + cdkVersionRequired = '0.44.21' features = ['db-destinations', 's3-destinations'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-s3/metadata.yaml b/airbyte-integrations/connectors/destination-s3/metadata.yaml index fb887686ce4e..333b8d13f9ba 100644 --- a/airbyte-integrations/connectors/destination-s3/metadata.yaml +++ b/airbyte-integrations/connectors/destination-s3/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: file connectorType: destination definitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 - dockerImageTag: 1.0.4 + dockerImageTag: 1.0.5 dockerRepository: airbyte/destination-s3 githubIssueLabel: destination-s3 icon: s3.svg diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index 37b044fb9de2..e10e13ec427b 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -535,7 +535,8 @@ To see connector limitations, or troubleshoot your S3 connector, see more [in ou Expand to review | Version | Date | Pull Request | Subject | -| :------ | :--------- | :--------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------- | +|:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------| +| 1.0.5 | 2024-09-05 | [45143](https://github.com/airbytehq/airbyte/pull/45143) | don't overwrite (and delete) existing files, skip indexes instead | | 1.0.4 | 2024-08-30 | [44933](https://github.com/airbytehq/airbyte/pull/44933) | Fix: Avro/Parquet: handle empty schemas in nested objects/lists | | 1.0.3 | 2024-08-20 | [44476](https://github.com/airbytehq/airbyte/pull/44476) | Increase message parsing limit to 100mb | | 1.0.2 | 2024-08-19 | [44401](https://github.com/airbytehq/airbyte/pull/44401) | Fix: S3 Avro/Parquet: handle nullable top-level schema | From 081a0ca111c15e4693850cba5f05d53087ab8d84 Mon Sep 17 00:00:00 2001 From: Johnny Schmidt Date: Thu, 5 Sep 2024 15:04:33 -0700 Subject: [PATCH 35/51] Bulk Load CDK: Unit tests for memory manager (#45091) --- .../io/airbyte/cdk/state/MemoryManager.kt | 50 ++++++--- .../io/airbyte/cdk/state/MemoryManagerTest.kt | 102 ++++++++++++++++++ 2 files changed, 137 insertions(+), 15 deletions(-) create mode 100644 airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/MemoryManagerTest.kt diff --git a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/MemoryManager.kt b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/MemoryManager.kt index 4e03a2ab9b23..d191223b08fd 100644 --- a/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/MemoryManager.kt +++ b/airbyte-cdk/bulk/core/load/src/main/kotlin/io/airbyte/cdk/state/MemoryManager.kt @@ -4,10 +4,12 @@ package io.airbyte.cdk.state +import io.micronaut.context.annotation.Secondary import jakarta.inject.Singleton import java.util.concurrent.atomic.AtomicLong -import java.util.concurrent.locks.ReentrantLock -import kotlin.concurrent.withLock +import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock /** * Manages memory usage for the destination. @@ -17,31 +19,49 @@ import kotlin.concurrent.withLock * TODO: Some degree of logging/monitoring around how accurate we're actually being? */ @Singleton -class MemoryManager { - private val availableMemoryBytes: Long = Runtime.getRuntime().maxMemory() +class MemoryManager(availableMemoryProvider: AvailableMemoryProvider) { + private val totalMemoryBytes: Long = availableMemoryProvider.availableMemoryBytes private var usedMemoryBytes = AtomicLong(0L) - private val memoryLock = ReentrantLock() - private val memoryLockCondition = memoryLock.newCondition() + private val mutex = Mutex() + private val syncChannel = Channel(Channel.UNLIMITED) + val remainingMemoryBytes: Long + get() = totalMemoryBytes - usedMemoryBytes.get() + + /* Attempt to reserve memory. If enough memory is not available, waits until it is, then reserves. */ suspend fun reserveBlocking(memoryBytes: Long) { - memoryLock.withLock { - while (usedMemoryBytes.get() + memoryBytes > availableMemoryBytes) { - memoryLockCondition.await() + if (memoryBytes > totalMemoryBytes) { + throw IllegalArgumentException( + "Requested ${memoryBytes}b memory exceeds ${totalMemoryBytes}b total" + ) + } + + mutex.withLock { + while (usedMemoryBytes.get() + memoryBytes > totalMemoryBytes) { + syncChannel.receive() } usedMemoryBytes.addAndGet(memoryBytes) } } suspend fun reserveRatio(ratio: Double): Long { - val estimatedSize = (availableMemoryBytes.toDouble() * ratio).toLong() + val estimatedSize = (totalMemoryBytes.toDouble() * ratio).toLong() reserveBlocking(estimatedSize) return estimatedSize } - fun release(memoryBytes: Long) { - memoryLock.withLock { - usedMemoryBytes.addAndGet(-memoryBytes) - memoryLockCondition.signalAll() - } + suspend fun release(memoryBytes: Long) { + usedMemoryBytes.addAndGet(-memoryBytes) + syncChannel.send(Unit) } } + +interface AvailableMemoryProvider { + val availableMemoryBytes: Long +} + +@Singleton +@Secondary +class JavaRuntimeAvailableMemoryProvider : AvailableMemoryProvider { + override val availableMemoryBytes: Long = Runtime.getRuntime().maxMemory() +} diff --git a/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/MemoryManagerTest.kt b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/MemoryManagerTest.kt new file mode 100644 index 000000000000..5bc28a27cda1 --- /dev/null +++ b/airbyte-cdk/bulk/core/load/src/test/kotlin/io/airbyte/cdk/state/MemoryManagerTest.kt @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.state + +import io.micronaut.context.annotation.Replaces +import io.micronaut.context.annotation.Requires +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import jakarta.inject.Singleton +import java.util.concurrent.atomic.AtomicBoolean +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.launch +import kotlinx.coroutines.test.runTest +import kotlinx.coroutines.withContext +import kotlinx.coroutines.withTimeout +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +@MicronautTest +class MemoryManagerTest { + @Singleton + @Replaces(MemoryManager::class) + @Requires(env = ["test"]) + class MockAvailableMemoryProvider : AvailableMemoryProvider { + override val availableMemoryBytes: Long = 1000 + } + + @Test + fun testReserveBlocking() = runTest { + val memoryManager = MemoryManager(MockAvailableMemoryProvider()) + val reserved = AtomicBoolean(false) + + try { + withTimeout(5000) { memoryManager.reserveBlocking(900) } + } catch (e: Exception) { + Assertions.fail("Failed to reserve memory") + } + + Assertions.assertEquals(100, memoryManager.remainingMemoryBytes) + + val job = launch { + memoryManager.reserveBlocking(200) + reserved.set(true) + } + + memoryManager.reserveBlocking(0) + Assertions.assertFalse(reserved.get()) + + memoryManager.release(50) + memoryManager.reserveBlocking(0) + Assertions.assertEquals(150, memoryManager.remainingMemoryBytes) + Assertions.assertFalse(reserved.get()) + + memoryManager.release(25) + memoryManager.reserveBlocking(0) + Assertions.assertEquals(175, memoryManager.remainingMemoryBytes) + Assertions.assertFalse(reserved.get()) + + memoryManager.release(25) + try { + withTimeout(5000) { job.join() } + } catch (e: Exception) { + Assertions.fail("Failed to unblock reserving memory") + } + Assertions.assertEquals(0, memoryManager.remainingMemoryBytes) + Assertions.assertTrue(reserved.get()) + } + + @Test + fun testReserveBlockingMultithreaded() = runTest { + val memoryManager = MemoryManager(MockAvailableMemoryProvider()) + withContext(Dispatchers.IO) { + memoryManager.reserveBlocking(1000) + Assertions.assertEquals(0, memoryManager.remainingMemoryBytes) + val nIterations = 100000 + + val jobs = (0 until nIterations).map { launch { memoryManager.reserveBlocking(10) } } + + repeat(nIterations) { + memoryManager.release(10) + Assertions.assertTrue( + memoryManager.remainingMemoryBytes >= 0, + "Remaining memory is negative: ${memoryManager.remainingMemoryBytes}" + ) + } + jobs.forEach { it.join() } + Assertions.assertEquals(0, memoryManager.remainingMemoryBytes) + } + } + + @Test + fun testRequestingMoreThanAvailableThrows() = runTest { + val memoryManager = MemoryManager(MockAvailableMemoryProvider()) + try { + memoryManager.reserveBlocking(1001) + } catch (e: IllegalArgumentException) { + return@runTest + } + Assertions.fail("Requesting more memory than available should throw an exception") + } +} From 1de50aa83012c047a6396b2028db330a447a1a50 Mon Sep 17 00:00:00 2001 From: Xiaohan Song Date: Thu, 5 Sep 2024 16:27:39 -0700 Subject: [PATCH 36/51] [source-mysqlv2] A new mysqlv2 (#44606) Co-authored-by: Marius Posta --- .../connectors/source-mysql-v2/build.gradle | 23 ++ .../source-mysql-v2/gradle.properties | 2 + .../connectors/source-mysql-v2/icon.svg | 1 + .../connectors/source-mysql-v2/metadata.yaml | 28 ++ .../source/mysql/MysqlJdbcSelectQuerier.kt | 27 ++ .../integrations/source/mysql/MysqlSource.kt | 11 + .../source/mysql/MysqlSourceConfiguration.kt | 99 +++++ .../MysqlSourceConfigurationJsonObject.kt | 314 +++++++++++++++ .../mysql/MysqlSourceMetadataQuerier.kt | 159 ++++++++ .../source/mysql/MysqlSourceOperations.kt | 220 +++++++++++ .../src/main/resources/application.yml | 19 + .../source/mysql/MysqlContainerFactory.kt | 57 +++ .../MysqlSourceConfigurationJsonObjectTest.kt | 73 ++++ .../mysql/MysqlSourceConfigurationTest.kt | 20 + .../MysqlSourceDatatypeIntegrationTest.kt | 248 ++++++++++++ .../MysqlSourceSelectQueryGeneratorTest.kt | 145 +++++++ .../MysqlSourceTestConfigurationFactory.kt | 22 ++ .../source/mysql/MysqlSpecIntegrationTest.kt | 12 + .../src/test/resources/expected-spec.json | 361 ++++++++++++++++++ 19 files changed, 1841 insertions(+) create mode 100644 airbyte-integrations/connectors/source-mysql-v2/build.gradle create mode 100644 airbyte-integrations/connectors/source-mysql-v2/gradle.properties create mode 100644 airbyte-integrations/connectors/source-mysql-v2/icon.svg create mode 100644 airbyte-integrations/connectors/source-mysql-v2/metadata.yaml create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlJdbcSelectQuerier.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSource.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfiguration.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObject.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceMetadataQuerier.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceOperations.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/main/resources/application.yml create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObjectTest.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationTest.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceDatatypeIntegrationTest.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceSelectQueryGeneratorTest.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceTestConfigurationFactory.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSpecIntegrationTest.kt create mode 100644 airbyte-integrations/connectors/source-mysql-v2/src/test/resources/expected-spec.json diff --git a/airbyte-integrations/connectors/source-mysql-v2/build.gradle b/airbyte-integrations/connectors/source-mysql-v2/build.gradle new file mode 100644 index 000000000000..2b8bc919c6e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/build.gradle @@ -0,0 +1,23 @@ +plugins { + id 'airbyte-bulk-connector' +} + +application { + mainClass = 'io.airbyte.integrations.source.mysql.MysqlSource' +} + +airbyteBulkConnector { + core = 'extract' + toolkits = ['extract-jdbc'] + cdk = 'local' +} + +dependencies { + implementation 'mysql:mysql-connector-java:8.0.30' + implementation 'org.bouncycastle:bcpkix-jdk18on:1.77' + implementation 'org.bouncycastle:bcprov-jdk18on:1.77' + implementation 'org.bouncycastle:bctls-jdk18on:1.77' + + testImplementation platform('org.testcontainers:testcontainers-bom:1.19.8') + testImplementation 'org.testcontainers:mysql' +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/gradle.properties b/airbyte-integrations/connectors/source-mysql-v2/gradle.properties new file mode 100644 index 000000000000..04dcba8cefd7 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/gradle.properties @@ -0,0 +1,2 @@ +testExecutionConcurrency=1 +JunitMethodExecutionTimeout=5m \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mysql-v2/icon.svg b/airbyte-integrations/connectors/source-mysql-v2/icon.svg new file mode 100644 index 000000000000..607d361ed765 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/icon.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mysql-v2/metadata.yaml b/airbyte-integrations/connectors/source-mysql-v2/metadata.yaml new file mode 100644 index 000000000000..3e62995e4d98 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/metadata.yaml @@ -0,0 +1,28 @@ +data: + ab_internal: + ql: 200 + sl: 100 + allowedHosts: + hosts: + - ${host} + - ${tunnel_method.tunnel_host} + connectorSubtype: database + connectorType: source + definitionId: 561393ed-7e3a-4d0d-8b8b-90ded371754c + dockerImageTag: 0.0.1 + dockerRepository: airbyte/source-mysql-v2 + documentationUrl: https://docs.airbyte.com/integrations/sources/mysql + githubIssueLabel: source-mysql-v2 + icon: mysql.svg + license: ELv2 + name: Mysqlv2 Source + registryOverrides: + cloud: + enabled: false + oss: + enabled: false + releaseStage: alpha + supportLevel: archived + tags: + - language:java +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlJdbcSelectQuerier.kt b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlJdbcSelectQuerier.kt new file mode 100644 index 000000000000..d559f0f730df --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlJdbcSelectQuerier.kt @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.read.JdbcSelectQuerier +import io.airbyte.cdk.read.SelectQuerier +import io.airbyte.cdk.read.SelectQuery +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.context.annotation.Primary +import javax.inject.Singleton + +private val log = KotlinLogging.logger {} + +@Singleton +@Primary +class MysqlJdbcSelectQuerier(val base: JdbcSelectQuerier) : SelectQuerier by base { + + override fun executeQuery( + q: SelectQuery, + parameters: SelectQuerier.Parameters + ): SelectQuerier.Result { + log.info { "Executing query: ${q.sql}" } + return base.executeQuery(q, SelectQuerier.Parameters(fetchSize = Int.MIN_VALUE)) + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSource.kt b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSource.kt new file mode 100644 index 000000000000..c46a2c053471 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSource.kt @@ -0,0 +1,11 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.AirbyteSourceRunner + +object MysqlSource { + @JvmStatic + fun main(args: Array) { + AirbyteSourceRunner.run(*args) + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfiguration.kt b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfiguration.kt new file mode 100644 index 000000000000..70f05317b8ce --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfiguration.kt @@ -0,0 +1,99 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.ConfigErrorException +import io.airbyte.cdk.command.JdbcSourceConfiguration +import io.airbyte.cdk.command.SourceConfiguration +import io.airbyte.cdk.command.SourceConfigurationFactory +import io.airbyte.cdk.ssh.SshConnectionOptions +import io.airbyte.cdk.ssh.SshTunnelMethodConfiguration +import io.github.oshai.kotlinlogging.KotlinLogging +import jakarta.inject.Singleton +import java.net.URLDecoder +import java.nio.charset.StandardCharsets +import java.time.Duration + +private val log = KotlinLogging.logger {} + +/** Mysql-specific implementation of [SourceConfiguration] */ +data class MysqlSourceConfiguration( + override val realHost: String, + override val realPort: Int, + override val sshTunnel: SshTunnelMethodConfiguration, + override val sshConnectionOptions: SshConnectionOptions, + override val jdbcUrlFmt: String, + override val jdbcProperties: Map, + override val namespaces: Set, + val cursorConfiguration: CursorConfiguration, + override val maxConcurrency: Int, + override val resourceAcquisitionHeartbeat: Duration = Duration.ofMillis(100L), + override val checkpointTargetInterval: Duration, + override val checkPrivileges: Boolean, +) : JdbcSourceConfiguration { + override val global = cursorConfiguration is CdcCursor +} + +@Singleton +class MysqlSourceConfigurationFactory : + SourceConfigurationFactory { + override fun makeWithoutExceptionHandling( + pojo: MysqlSourceConfigurationJsonObject, + ): MysqlSourceConfiguration { + val realHost: String = pojo.host + val realPort: Int = pojo.port + val sshTunnel: SshTunnelMethodConfiguration = pojo.getTunnelMethodValue() + val jdbcProperties = mutableMapOf() + jdbcProperties["user"] = pojo.username + pojo.password?.let { jdbcProperties["password"] = it } + + // Parse URL parameters. + val pattern = "^([^=]+)=(.*)$".toRegex() + for (pair in (pojo.jdbcUrlParams ?: "").trim().split("&".toRegex())) { + if (pair.isBlank()) { + continue + } + val result: MatchResult? = pattern.matchEntire(pair) + if (result == null) { + log.warn { "ignoring invalid JDBC URL param '$pair'" } + } else { + val key: String = result.groupValues[1].trim() + val urlEncodedValue: String = result.groupValues[2].trim() + jdbcProperties[key] = URLDecoder.decode(urlEncodedValue, StandardCharsets.UTF_8) + } + } + // Determine protocol and configure encryption. + val encryption: Encryption = pojo.getEncryptionValue() + if (encryption is SslVerifyCertificate) { + // TODO: reuse JdbcSSLCOnnectionUtils; parse the input into properties + } + // Build JDBC URL + val address = "%s:%d" + val jdbcUrlFmt = "jdbc:mysql://${address}" + jdbcProperties["useCursorFetch"] = "true" + jdbcProperties["sessionVariables"] = "autocommit=0" + val defaultSchema: String = pojo.username.uppercase() + val sshOpts = SshConnectionOptions.fromAdditionalProperties(pojo.getAdditionalProperties()) + val checkpointTargetInterval: Duration = + Duration.ofSeconds(pojo.checkpointTargetIntervalSeconds?.toLong() ?: 0) + if (!checkpointTargetInterval.isPositive) { + throw ConfigErrorException("Checkpoint Target Interval should be positive") + } + val maxConcurrency: Int = pojo.concurrency ?: 0 + if ((pojo.concurrency ?: 0) <= 0) { + throw ConfigErrorException("Concurrency setting should be positive") + } + return MysqlSourceConfiguration( + realHost = realHost, + realPort = realPort, + sshTunnel = sshTunnel, + sshConnectionOptions = sshOpts, + jdbcUrlFmt = jdbcUrlFmt, + jdbcProperties = jdbcProperties, + namespaces = pojo.schemas?.toSet() ?: setOf(defaultSchema), + cursorConfiguration = pojo.getCursorConfigurationValue(), + checkpointTargetInterval = checkpointTargetInterval, + maxConcurrency = maxConcurrency, + checkPrivileges = pojo.checkPrivileges ?: true, + ) + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObject.kt b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObject.kt new file mode 100644 index 000000000000..493d0736dd58 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObject.kt @@ -0,0 +1,314 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import com.fasterxml.jackson.annotation.JsonAnyGetter +import com.fasterxml.jackson.annotation.JsonAnySetter +import com.fasterxml.jackson.annotation.JsonGetter +import com.fasterxml.jackson.annotation.JsonIgnore +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonPropertyDescription +import com.fasterxml.jackson.annotation.JsonPropertyOrder +import com.fasterxml.jackson.annotation.JsonSetter +import com.fasterxml.jackson.annotation.JsonSubTypes +import com.fasterxml.jackson.annotation.JsonTypeInfo +import com.fasterxml.jackson.annotation.JsonValue +import com.kjetland.jackson.jsonSchema.annotations.JsonSchemaArrayWithUniqueItems +import com.kjetland.jackson.jsonSchema.annotations.JsonSchemaDefault +import com.kjetland.jackson.jsonSchema.annotations.JsonSchemaDescription +import com.kjetland.jackson.jsonSchema.annotations.JsonSchemaInject +import com.kjetland.jackson.jsonSchema.annotations.JsonSchemaTitle +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.ConfigErrorException +import io.airbyte.cdk.command.CONNECTOR_CONFIG_PREFIX +import io.airbyte.cdk.command.ConfigurationJsonObjectBase +import io.airbyte.cdk.ssh.MicronautPropertiesFriendlySshTunnelMethodConfigurationJsonObject +import io.airbyte.cdk.ssh.SshTunnelMethodConfiguration +import io.micronaut.context.annotation.ConfigurationBuilder +import io.micronaut.context.annotation.ConfigurationProperties +import jakarta.inject.Singleton + +/** + * The object which is mapped to the Mysql source configuration JSON. + * + * Use [MysqlSourceConfiguration] instead wherever possible. This object also allows injecting + * values through Micronaut properties, this is made possible by the classes named + * `MicronautPropertiesFriendly.*`. + */ +@JsonSchemaTitle("Mysql Source Spec") +@JsonPropertyOrder( + value = + [ + "host", + "port", + "username", + "password", + "schemas", + "jdbc_url_params", + "encryption", + "tunnel_method", + "cursor", + ], +) +@Singleton +@ConfigurationProperties(CONNECTOR_CONFIG_PREFIX) +@SuppressFBWarnings(value = ["NP_NONNULL_RETURN_VIOLATION"], justification = "Micronaut DI") +class MysqlSourceConfigurationJsonObject : ConfigurationJsonObjectBase() { + @JsonProperty("host") + @JsonSchemaTitle("Host") + @JsonSchemaInject(json = """{"order":1}""") + @JsonPropertyDescription("Hostname of the database.") + lateinit var host: String + + @JsonProperty("port") + @JsonSchemaTitle("Port") + @JsonSchemaInject(json = """{"order":2,"minimum": 0,"maximum": 65536}""") + @JsonSchemaDefault("3306") + @JsonPropertyDescription( + "Port of the database.", + ) + var port: Int = 3306 + + @JsonProperty("username") + @JsonSchemaTitle("User") + @JsonPropertyDescription("The username which is used to access the database.") + @JsonSchemaInject(json = """{"order":4}""") + lateinit var username: String + + @JsonProperty("password") + @JsonSchemaTitle("Password") + @JsonPropertyDescription("The password associated with the username.") + @JsonSchemaInject(json = """{"order":5,"always_show":true,"airbyte_secret":true}""") + var password: String? = null + + @JsonProperty("schemas") + @JsonSchemaTitle("Schemas") + @JsonSchemaArrayWithUniqueItems("schemas") + @JsonPropertyDescription("The list of schemas to sync from. Defaults to user. Case sensitive.") + @JsonSchemaInject(json = """{"order":6,"always_show":true,"uniqueItems":true}""") + var schemas: List? = null + + @JsonProperty("jdbc_url_params") + @JsonSchemaTitle("JDBC URL Params") + @JsonPropertyDescription( + "Additional properties to pass to the JDBC URL string when connecting to the database " + + "formatted as 'key=value' pairs separated by the symbol '&'. " + + "(example: key1=value1&key2=value2&key3=value3).", + ) + @JsonSchemaInject(json = """{"order":7}""") + var jdbcUrlParams: String? = null + + @JsonIgnore + @ConfigurationBuilder(configurationPrefix = "encryption") + val encryption = MicronautPropertiesFriendlyEncryption() + + @JsonIgnore var encryptionJson: Encryption? = null + + @JsonSetter("encryption") + fun setEncryptionValue(value: Encryption) { + encryptionJson = value + } + + @JsonGetter("encryption") + @JsonSchemaTitle("Encryption") + @JsonPropertyDescription( + "The encryption method with is used when communicating with the database.", + ) + @JsonSchemaInject(json = """{"order":8}""") + fun getEncryptionValue(): Encryption = encryptionJson ?: encryption.asEncryption() + + @JsonIgnore + @ConfigurationBuilder(configurationPrefix = "tunnel_method") + val tunnelMethod = MicronautPropertiesFriendlySshTunnelMethodConfigurationJsonObject() + + @JsonIgnore var tunnelMethodJson: SshTunnelMethodConfiguration? = null + + @JsonSetter("tunnel_method") + fun setTunnelMethodValue(value: SshTunnelMethodConfiguration) { + tunnelMethodJson = value + } + + @JsonGetter("tunnel_method") + @JsonSchemaTitle("SSH Tunnel Method") + @JsonPropertyDescription( + "Whether to initiate an SSH tunnel before connecting to the database, " + + "and if so, which kind of authentication to use.", + ) + @JsonSchemaInject(json = """{"order":9}""") + fun getTunnelMethodValue(): SshTunnelMethodConfiguration = + tunnelMethodJson ?: tunnelMethod.asSshTunnelMethod() + + @JsonIgnore + @ConfigurationBuilder(configurationPrefix = "cursor") + val cursor = MicronautPropertiesFriendlyCursorConfiguration() + + @JsonIgnore var cursorJson: CursorConfiguration? = null + + @JsonSetter("cursor") + fun setCursorMethodValue(value: CursorConfiguration) { + cursorJson = value + } + + @JsonGetter("cursor") + @JsonSchemaTitle("Update Method") + @JsonPropertyDescription("Configures how data is extracted from the database.") + @JsonSchemaInject(json = """{"order":10,"display_type":"radio"}""") + fun getCursorConfigurationValue(): CursorConfiguration = + cursorJson ?: cursor.asCursorConfiguration() + + @JsonProperty("checkpoint_target_interval_seconds") + @JsonSchemaTitle("Checkpoint Target Time Interval") + @JsonSchemaInject(json = """{"order":11}""") + @JsonSchemaDefault("300") + @JsonPropertyDescription("How often (in seconds) a stream should checkpoint, when possible.") + var checkpointTargetIntervalSeconds: Int? = 300 + + @JsonProperty("concurrency") + @JsonSchemaTitle("Concurrency") + @JsonSchemaInject(json = """{"order":12}""") + @JsonSchemaDefault("1") + @JsonPropertyDescription("Maximum number of concurrent queries to the database.") + var concurrency: Int? = 1 + + @JsonProperty("check_privileges") + @JsonSchemaTitle("Check Table and Column Access Privileges") + @JsonSchemaInject(json = """{"order":13,"display_type":"check"}""") + @JsonSchemaDefault("true") + @JsonPropertyDescription( + "When this feature is enabled, during schema discovery the connector " + + "will query each table or view individually to check access privileges " + + "and inaccessible tables, views, or columns therein will be removed. " + + "In large schemas, this might cause schema discovery to take too long, " + + "in which case it might be advisable to disable this feature.", + ) + var checkPrivileges: Boolean? = true + + @JsonIgnore var additionalPropertiesMap = mutableMapOf() + + @JsonAnyGetter fun getAdditionalProperties(): Map = additionalPropertiesMap + + @JsonAnySetter + fun setAdditionalProperty( + name: String, + value: Any, + ) { + additionalPropertiesMap[name] = value + } +} + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "encryption_method") +@JsonSubTypes( + JsonSubTypes.Type(value = EncryptionPreferred::class, name = "preferred"), + JsonSubTypes.Type(value = EncryptionRequired::class, name = "required"), + JsonSubTypes.Type(value = SslVerifyCertificate::class, name = "Verify CA"), + JsonSubTypes.Type(value = SslVerifyCertificate::class, name = "Verify Identity"), +) +@JsonSchemaTitle("Encryption") +@JsonSchemaDescription("The encryption method which is used when communicating with the database.") +sealed interface Encryption + +@JsonSchemaTitle("preferred") +@JsonSchemaDescription( + "To allow unencrypted communication only when the source doesn't support encryption.", +) +data object EncryptionPreferred : Encryption + +@JsonSchemaTitle("required") +@JsonSchemaDescription( + "To always require encryption. Note: The connection will fail if the source doesn't support encryption.", +) +data object EncryptionRequired : Encryption + +@JsonSchemaTitle("Verify CA") +@JsonSchemaDescription( + "To always require encryption and verify that the source has a valid SSL certificate." +) +@SuppressFBWarnings(value = ["NP_NONNULL_RETURN_VIOLATION"], justification = "Micronaut DI") +class SslVerifyCertificate : Encryption { + @JsonProperty("ssl_certificate", required = true) + @JsonSchemaTitle("CA certificate") + @JsonPropertyDescription( + "CA certificate", + ) + @JsonSchemaInject(json = """{"airbyte_secret":true,"multiline":true}""") + lateinit var sslCertificate: String + + @JsonProperty("ssl_client_certificate") + @JsonSchemaTitle("Client certificate File") + @JsonPropertyDescription( + "Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)", + ) + @JsonSchemaInject(json = """{"airbyte_secret":true,"multiline":true}""") + lateinit var sslClientCertificate: String + + @JsonProperty("ssl_client_key") + @JsonSchemaTitle("Client Key") + @JsonPropertyDescription( + "Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)", + ) + @JsonSchemaInject(json = """{"airbyte_secret":true,"multiline":true}""") + lateinit var sslClientKey: String + + @JsonProperty("ssl_client_key_password") + @JsonSchemaTitle("Client key password") + @JsonPropertyDescription( + "Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.", + ) + @JsonSchemaInject(json = """{"airbyte_secret":true,"multiline":true}""") + lateinit var sslClientPassword: String +} + +@ConfigurationProperties("$CONNECTOR_CONFIG_PREFIX.encryption") +class MicronautPropertiesFriendlyEncryption { + var encryptionMethod: String = "preferred" + var sslCertificate: String? = null + + @JsonValue + fun asEncryption(): Encryption = + when (encryptionMethod) { + "preferred" -> EncryptionPreferred + "required" -> EncryptionRequired + "verify_ca" -> SslVerifyCertificate().also { it.sslCertificate = sslCertificate!! } + "verify_identity" -> + SslVerifyCertificate().also { it.sslCertificate = sslCertificate!! } + else -> throw ConfigErrorException("invalid value $encryptionMethod") + } +} + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "cursor_method") +@JsonSubTypes( + JsonSubTypes.Type(value = UserDefinedCursor::class, name = "user_defined"), + // TODO: add CDC support + ) +@JsonSchemaTitle("Update Method") +@JsonSchemaDescription("Configures how data is extracted from the database.") +sealed interface CursorConfiguration + +@JsonSchemaTitle("Scan Changes with User Defined Cursor") +@JsonSchemaDescription( + "Incrementally detects new inserts and updates using the " + + "cursor column chosen when configuring a connection " + + "(e.g. created_at, updated_at).", +) +data object UserDefinedCursor : CursorConfiguration + +@JsonSchemaTitle("Read Changes using Change Data Capture (CDC)") +@JsonSchemaDescription( + "Recommended - " + + "Incrementally reads new inserts, updates, and deletes using Mysql's change data capture feature. This must be enabled on your database.", +) +data object CdcCursor : CursorConfiguration + +@ConfigurationProperties("$CONNECTOR_CONFIG_PREFIX.cursor") +class MicronautPropertiesFriendlyCursorConfiguration { + var cursorMethod: String = "user_defined" + + fun asCursorConfiguration(): CursorConfiguration = + when (cursorMethod) { + "user_defined" -> UserDefinedCursor + "cdc" -> CdcCursor + else -> throw ConfigErrorException("invalid value $cursorMethod") + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceMetadataQuerier.kt b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceMetadataQuerier.kt new file mode 100644 index 000000000000..0e3cd9d231ee --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceMetadataQuerier.kt @@ -0,0 +1,159 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.check.JdbcCheckQueries +import io.airbyte.cdk.command.SourceConfiguration +import io.airbyte.cdk.discover.Field +import io.airbyte.cdk.discover.JdbcMetadataQuerier +import io.airbyte.cdk.discover.MetadataQuerier +import io.airbyte.cdk.discover.TableName +import io.airbyte.cdk.jdbc.DefaultJdbcConstants +import io.airbyte.cdk.jdbc.JdbcConnectionFactory +import io.airbyte.cdk.read.SelectQueryGenerator +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.context.annotation.Primary +import jakarta.inject.Singleton +import java.sql.ResultSet +import java.sql.Statement + +private val log = KotlinLogging.logger {} + +/** Delegates to [JdbcMetadataQuerier] except for [fields]. */ +class MysqlSourceMetadataQuerier( + val base: JdbcMetadataQuerier, +) : MetadataQuerier by base { + + override fun fields( + streamName: String, + streamNamespace: String?, + ): List { + val table: TableName = findTableName(streamName, streamNamespace) ?: return listOf() + if (table !in base.memoizedColumnMetadata) return listOf() + return base.memoizedColumnMetadata[table]!!.map { + Field(it.label, base.fieldTypeMapper.toFieldType(it)) + } + } + + override fun streamNamespaces(): List = base.config.namespaces.toList() + + override fun streamNames(streamNamespace: String?): List = + base.memoizedTableNames + .filter { (it.schema ?: it.catalog) == streamNamespace } + .map { it.name } + + fun findTableName( + streamName: String, + streamNamespace: String?, + ): TableName? = + base.memoizedTableNames.find { + it.name == streamName && (it.schema ?: it.catalog) == streamNamespace + } + + val memoizedPrimaryKeys: Map>> by lazy { + val results = mutableListOf() + val schemas: List = streamNamespaces() + val sql: String = PK_QUERY_FMTSTR.format(schemas.joinToString { "\'$it\'" }) + log.info { "Querying Mysql system tables for all primary keys for catalog discovery." } + try { + // Get primary keys for the specified table + base.conn.createStatement().use { stmt: Statement -> + stmt.executeQuery(sql).use { rs: ResultSet -> + while (rs.next()) { + results.add( + AllPrimaryKeysRow( + rs.getString("table_schema"), + rs.getString("table_name"), + rs.getString("constraint_name"), + rs.getInt("ordinal_position").takeUnless { rs.wasNull() }, + rs.getString("column_name").takeUnless { rs.wasNull() }, + ), + ) + } + } + } + log.info { "Discovered all primary keys in ${schemas.size} Mysql schema(s)." } + return@lazy results + .groupBy { findTableName(it.tableName, "public") } + .mapNotNull { (table, rowsByTable) -> + if (table == null) return@mapNotNull null + val pkRows: List = + rowsByTable + .groupBy { it.constraintName } + .filterValues { rowsByPK: List -> + rowsByPK.all { it.position != null && it.columnName != null } + } + .values + .firstOrNull() + ?: return@mapNotNull null + val pkColumnNames: List> = + pkRows + .sortedBy { it.position } + .mapNotNull { it.columnName } + .map { listOf(it) } + table to pkColumnNames + } + .toMap() + } catch (e: Exception) { + throw RuntimeException("Mysql primary key discovery query failed: ${e.message}", e) + } + } + + override fun primaryKey( + streamName: String, + streamNamespace: String?, + ): List> { + val table: TableName = findTableName(streamName, streamNamespace) ?: return listOf() + return memoizedPrimaryKeys[table] ?: listOf() + } + + private data class AllPrimaryKeysRow( + val tableSchema: String, + val tableName: String, + val constraintName: String, + val position: Int?, + val columnName: String?, + ) + + companion object { + + const val PK_QUERY_FMTSTR = + """ + SELECT + table_schema, + table_name, + column_name, + ordinal_position, + constraint_name + FROM + information_schema.key_column_usage + WHERE + table_schema IN (%s) + AND constraint_name = 'PRIMARY'; + """ + } + + /** Mysql implementation of [MetadataQuerier.Factory]. */ + @Singleton + @Primary + class Factory( + val constants: DefaultJdbcConstants, + val selectQueryGenerator: SelectQueryGenerator, + val fieldTypeMapper: JdbcMetadataQuerier.FieldTypeMapper, + val checkQueries: JdbcCheckQueries, + ) : MetadataQuerier.Factory { + /** The [SourceConfiguration] is deliberately not injected in order to support tests. */ + override fun session(config: MysqlSourceConfiguration): MetadataQuerier { + val jdbcConnectionFactory = JdbcConnectionFactory(config) + val base = + JdbcMetadataQuerier( + constants, + config, + selectQueryGenerator, + fieldTypeMapper, + checkQueries, + jdbcConnectionFactory, + ) + return MysqlSourceMetadataQuerier(base) + } + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceOperations.kt b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceOperations.kt new file mode 100644 index 000000000000..48a91db7a074 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/main/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceOperations.kt @@ -0,0 +1,220 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.discover.Field +import io.airbyte.cdk.discover.FieldType +import io.airbyte.cdk.discover.JdbcMetadataQuerier +import io.airbyte.cdk.discover.SystemType +import io.airbyte.cdk.discover.UserDefinedArray +import io.airbyte.cdk.discover.UserDefinedType +import io.airbyte.cdk.jdbc.ArrayFieldType +import io.airbyte.cdk.jdbc.BigDecimalFieldType +import io.airbyte.cdk.jdbc.BigIntegerFieldType +import io.airbyte.cdk.jdbc.BinaryStreamFieldType +import io.airbyte.cdk.jdbc.BooleanFieldType +import io.airbyte.cdk.jdbc.ClobFieldType +import io.airbyte.cdk.jdbc.DoubleFieldType +import io.airbyte.cdk.jdbc.FloatFieldType +import io.airbyte.cdk.jdbc.JdbcFieldType +import io.airbyte.cdk.jdbc.JsonStringFieldType +import io.airbyte.cdk.jdbc.LocalDateFieldType +import io.airbyte.cdk.jdbc.LocalDateTimeFieldType +import io.airbyte.cdk.jdbc.LongFieldType +import io.airbyte.cdk.jdbc.LosslessJdbcFieldType +import io.airbyte.cdk.jdbc.NClobFieldType +import io.airbyte.cdk.jdbc.NStringFieldType +import io.airbyte.cdk.jdbc.OffsetDateTimeFieldType +import io.airbyte.cdk.jdbc.PokemonFieldType +import io.airbyte.cdk.jdbc.StringFieldType +import io.airbyte.cdk.read.And +import io.airbyte.cdk.read.Equal +import io.airbyte.cdk.read.From +import io.airbyte.cdk.read.FromNode +import io.airbyte.cdk.read.FromSample +import io.airbyte.cdk.read.Greater +import io.airbyte.cdk.read.GreaterOrEqual +import io.airbyte.cdk.read.Lesser +import io.airbyte.cdk.read.LesserOrEqual +import io.airbyte.cdk.read.Limit +import io.airbyte.cdk.read.LimitNode +import io.airbyte.cdk.read.NoFrom +import io.airbyte.cdk.read.NoLimit +import io.airbyte.cdk.read.NoOrderBy +import io.airbyte.cdk.read.NoWhere +import io.airbyte.cdk.read.Or +import io.airbyte.cdk.read.OrderBy +import io.airbyte.cdk.read.OrderByNode +import io.airbyte.cdk.read.SelectColumnMaxValue +import io.airbyte.cdk.read.SelectColumns +import io.airbyte.cdk.read.SelectNode +import io.airbyte.cdk.read.SelectQuery +import io.airbyte.cdk.read.SelectQueryGenerator +import io.airbyte.cdk.read.SelectQuerySpec +import io.airbyte.cdk.read.Where +import io.airbyte.cdk.read.WhereClauseLeafNode +import io.airbyte.cdk.read.WhereClauseNode +import io.airbyte.cdk.read.WhereNode +import io.airbyte.cdk.util.Jsons +import io.micronaut.context.annotation.Primary +import jakarta.inject.Singleton + +@Singleton +@Primary +class MysqlSourceOperations : JdbcMetadataQuerier.FieldTypeMapper, SelectQueryGenerator { + override fun toFieldType(c: JdbcMetadataQuerier.ColumnMetadata): FieldType = + when (val type = c.type) { + is SystemType -> leafType(c.type.typeName, type.scale != 0) + is UserDefinedArray -> ArrayFieldType(recursiveArrayType(type)) + is UserDefinedType -> PokemonFieldType + } + + private fun recursiveArrayType(type: UserDefinedArray): JdbcFieldType<*> = + when (val elementType = type.elementType) { + is SystemType -> { + val leafType: JdbcFieldType<*> = + leafType(elementType.typeName, elementType.scale != 0) + if (leafType == OffsetDateTimeFieldType) { + // Mysql's JDBC driver has a bug which prevents object conversions in + // ArrayDataResultSet instances. Fall back to strings instead. + PokemonFieldType + } else { + leafType + } + } + is UserDefinedArray -> ArrayFieldType(recursiveArrayType(elementType)) + is UserDefinedType -> PokemonFieldType + } + + private fun leafType( + typeName: String?, + notInteger: Boolean, + ): JdbcFieldType<*> = + // TODO: https://github.com/airbytehq/airbyte-internal-issues/issues/9670 + when (typeName) { + "BINARY_FLOAT" -> FloatFieldType + "BINARY_DOUBLE" -> DoubleFieldType + "FLOAT", + "DOUBLE PRECISION", + "REAL", -> BigDecimalFieldType + "NUMBER", + "NUMERIC", + "DECIMAL", + "DEC", -> if (notInteger) BigDecimalFieldType else BigIntegerFieldType + "INTEGER", + "INT", + "SMALLINT", -> BigIntegerFieldType + "BOOLEAN", + "BOOL", -> BooleanFieldType + "CHAR", + "VARCHAR2", + "VARCHAR", + "CHARACTER", + "CHARACTER VARYING", + "CHAR VARYING", -> StringFieldType + "NCHAR", + "NVARCHAR2", + "NCHAR VARYING", + "NATIONAL CHARACTER VARYING", + "NATIONAL CHARACTER", + "NATIONAL CHAR VARYING", + "NATIONAL CHAR", -> NStringFieldType + "BLOB" -> BinaryStreamFieldType + "CLOB" -> ClobFieldType + "NCLOB" -> NClobFieldType + "BFILE" -> BinaryStreamFieldType + "DATE" -> LocalDateFieldType + "INTERVALDS", + "INTERVAL DAY TO SECOND", + "INTERVALYM", + "INTERVAL YEAR TO MONTH", -> StringFieldType + "JSON" -> JsonStringFieldType + "LONG", + "LONG RAW", + "RAW", -> BinaryStreamFieldType + "TIMESTAMP", + "TIMESTAMP WITH LOCAL TIME ZONE", + "TIMESTAMP WITH LOCAL TZ", -> LocalDateTimeFieldType + "TIMESTAMP WITH TIME ZONE", + "TIMESTAMP WITH TZ", -> OffsetDateTimeFieldType + else -> PokemonFieldType + } + + override fun generate(ast: SelectQuerySpec): SelectQuery = + SelectQuery(ast.sql(), ast.select.columns, ast.bindings()) + + fun SelectQuerySpec.sql(): String { + val components: List = listOf(select.sql(), from.sql(), where.sql(), orderBy.sql()) + val sqlWithoutLimit: String = components.filter { it.isNotBlank() }.joinToString(" ") + val rownumClause: String = + when (limit) { + NoLimit -> return sqlWithoutLimit + Limit(0) -> "LIMIT 0" + is Limit -> "LIMIT ?" + } + return "$sqlWithoutLimit $rownumClause" + } + + fun SelectNode.sql(): String = + "SELECT " + + when (this) { + is SelectColumns -> columns.joinToString(", ") { it.sql() } + is SelectColumnMaxValue -> "MAX(${column.sql()})" + } + + fun Field.sql(): String = "`$id`" + + fun FromNode.sql(): String = + when (this) { + NoFrom -> "" + is From -> if (this.namespace == null) "FROM `$name`" else "FROM `$namespace`.`$name`" + is FromSample -> TODO("not implemented in mysql") + } + + fun WhereNode.sql(): String = + when (this) { + NoWhere -> "" + is Where -> "WHERE ${clause.sql()}" + } + + fun WhereClauseNode.sql(): String = + when (this) { + is And -> conj.joinToString(") AND (", "(", ")") { it.sql() } + is Or -> disj.joinToString(") OR (", "(", ")") { it.sql() } + is Equal -> "${column.sql()} = ?" + is Greater -> "${column.sql()} > ?" + is GreaterOrEqual -> "${column.sql()} >= ?" + is LesserOrEqual -> "${column.sql()} <= ?" + is Lesser -> "${column.sql()} < ?" + } + + fun OrderByNode.sql(): String = + when (this) { + NoOrderBy -> "" + is OrderBy -> "ORDER BY " + columns.joinToString(", ") { it.sql() } + } + + fun SelectQuerySpec.bindings(): List = where.bindings() + limit.bindings() + + fun WhereNode.bindings(): List = + when (this) { + is NoWhere -> listOf() + is Where -> clause.bindings() + } + + fun WhereClauseNode.bindings(): List = + when (this) { + is And -> conj.flatMap { it.bindings() } + is Or -> disj.flatMap { it.bindings() } + is WhereClauseLeafNode -> { + val type = column.type as LosslessJdbcFieldType<*, *> + listOf(SelectQuery.Binding(bindingValue, type)) + } + } + + fun LimitNode.bindings(): List = + when (this) { + NoLimit, + Limit(0) -> listOf() + is Limit -> listOf(SelectQuery.Binding(Jsons.numberNode(n), LongFieldType)) + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/main/resources/application.yml b/airbyte-integrations/connectors/source-mysql-v2/src/main/resources/application.yml new file mode 100644 index 000000000000..f75a759efab5 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/main/resources/application.yml @@ -0,0 +1,19 @@ +--- +airbyte: + connector: + extract: + jdbc: + mode: sequential + throughput-bytes-per-second: 10000000 + min-fetch-size: -2147483648 + default-fetch-size: -2147483648 + max-fetch-size: -2147483648 + memory-capacity-ratio: 0.6 + estimated-record-overhead-bytes: 16 + estimated-field-overhead-bytes: 16 + namespace-kind: CATALOG_AND_SCHEMA + check: + jdbc: + queries: + - >- + SELECT 1 where 1 = 0; diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt new file mode 100644 index 000000000000..f7757dd8aa37 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlContainerFactory.kt @@ -0,0 +1,57 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.testcontainers.TestContainerFactory +import io.github.oshai.kotlinlogging.KotlinLogging +import org.testcontainers.containers.MySQLContainer +import org.testcontainers.containers.Network +import org.testcontainers.utility.DockerImageName + +object MysqlContainerFactory { + const val COMPATIBLE_NAME = "mysql:8.0" + private val log = KotlinLogging.logger {} + + init { + TestContainerFactory.register(COMPATIBLE_NAME, ::MySQLContainer) + } + + sealed interface MysqlContainerModifier : + TestContainerFactory.ContainerModifier> + + data object WithNetwork : MysqlContainerModifier { + override fun modify(container: MySQLContainer<*>) { + container.withNetwork(Network.newNetwork()) + } + } + + fun exclusive( + imageName: String, + vararg modifiers: MysqlContainerModifier, + ): MySQLContainer<*> { + val dockerImageName = + DockerImageName.parse(imageName).asCompatibleSubstituteFor(COMPATIBLE_NAME) + return TestContainerFactory.exclusive(dockerImageName, *modifiers) + } + + fun shared( + imageName: String, + vararg modifiers: MysqlContainerModifier, + ): MySQLContainer<*> { + val dockerImageName = + DockerImageName.parse(imageName).asCompatibleSubstituteFor(COMPATIBLE_NAME) + return TestContainerFactory.shared(dockerImageName, *modifiers) + } + + @JvmStatic + fun config(mySQLContainer: MySQLContainer<*>): MysqlSourceConfigurationJsonObject = + MysqlSourceConfigurationJsonObject().apply { + host = mySQLContainer.host + port = mySQLContainer.getMappedPort(MySQLContainer.MYSQL_PORT) + username = mySQLContainer.username + password = mySQLContainer.password + jdbcUrlParams = "" + schemas = listOf("test") + checkpointTargetIntervalSeconds = 60 + concurrency = 1 + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObjectTest.kt b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObjectTest.kt new file mode 100644 index 000000000000..f4f32ba8e4ea --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationJsonObjectTest.kt @@ -0,0 +1,73 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.ConfigErrorException +import io.airbyte.cdk.command.ConfigurationJsonObjectSupplier +import io.airbyte.cdk.ssh.SshPasswordAuthTunnelMethod +import io.airbyte.cdk.ssh.SshTunnelMethodConfiguration +import io.micronaut.context.annotation.Property +import io.micronaut.context.env.Environment +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import jakarta.inject.Inject +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +@MicronautTest(environments = [Environment.TEST], rebuildContext = true) +class MysqlSourceConfigurationJsonObjectTest { + @Inject + lateinit var supplier: ConfigurationJsonObjectSupplier + + @Test + fun testSchemaViolation() { + Assertions.assertThrows(ConfigErrorException::class.java, supplier::get) + } + + @Test + @Property(name = "airbyte.connector.config.json", value = CONFIG_JSON) + fun testJson() { + val pojo: MysqlSourceConfigurationJsonObject = supplier.get() + Assertions.assertEquals("localhost", pojo.host) + Assertions.assertEquals(12345, pojo.port) + Assertions.assertEquals("FOO", pojo.username) + Assertions.assertEquals("BAR", pojo.password) + Assertions.assertEquals(listOf("FOO", "SYSTEM"), pojo.schemas) + val encryption: Encryption = pojo.getEncryptionValue() + Assertions.assertTrue(encryption is EncryptionPreferred, encryption::class.toString()) + val tunnelMethod: SshTunnelMethodConfiguration = pojo.getTunnelMethodValue() + Assertions.assertTrue( + tunnelMethod is SshPasswordAuthTunnelMethod, + tunnelMethod::class.toString(), + ) + Assertions.assertEquals(60, pojo.checkpointTargetIntervalSeconds) + Assertions.assertEquals(2, pojo.concurrency) + } +} + +const val CONFIG_JSON = + """ +{ + "host": "localhost", + "port": 12345, + "username": "FOO", + "password": "BAR", + "schemas": [ + "FOO", + "SYSTEM" + ], + "encryption": { + "encryption_method": "preferred" + }, + "tunnel_method": { + "tunnel_method": "SSH_PASSWORD_AUTH", + "tunnel_host": "localhost", + "tunnel_port": 2222, + "tunnel_user": "sshuser", + "tunnel_user_password": "***" + }, + "cursor": { + "cursor_method": "user_defined" + }, + "checkpoint_target_interval_seconds": 60, + "concurrency": 2 +} +""" diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationTest.kt b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationTest.kt new file mode 100644 index 000000000000..0a721977bae2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceConfigurationTest.kt @@ -0,0 +1,20 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.command.ConfigurationJsonObjectSupplier +import io.airbyte.cdk.command.SourceConfigurationFactory +import io.micronaut.context.env.Environment +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import jakarta.inject.Inject + +@MicronautTest(environments = [Environment.TEST], rebuildContext = true) +class MysqlSourceConfigurationTest { + @Inject + lateinit var pojoSupplier: ConfigurationJsonObjectSupplier + + @Inject + lateinit var factory: + SourceConfigurationFactory + + // TODO: add tests to cover SSL config to Jdbc Property conversion. +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceDatatypeIntegrationTest.kt b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceDatatypeIntegrationTest.kt new file mode 100644 index 000000000000..30a9d520a548 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceDatatypeIntegrationTest.kt @@ -0,0 +1,248 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.TestClockFactory +import io.airbyte.cdk.command.CliRunner +import io.airbyte.cdk.data.AirbyteType +import io.airbyte.cdk.data.LeafAirbyteType +import io.airbyte.cdk.jdbc.JdbcConnectionFactory +import io.airbyte.cdk.output.BufferingOutputConsumer +import io.airbyte.cdk.util.Jsons +import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessage +import io.airbyte.protocol.models.v0.AirbyteStream +import io.airbyte.protocol.models.v0.AirbyteTraceMessage +import io.airbyte.protocol.models.v0.CatalogHelpers +import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream +import io.airbyte.protocol.models.v0.SyncMode +import io.github.oshai.kotlinlogging.KotlinLogging +import java.sql.Connection +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.DynamicNode +import org.junit.jupiter.api.DynamicTest +import org.junit.jupiter.api.TestFactory +import org.junit.jupiter.api.Timeout +import org.testcontainers.containers.MySQLContainer + +private val log = KotlinLogging.logger {} + +/** Reference: https://docs.mysql.com/en/database/mysql/mysql-database/23/sqlrf/Data-Types.html */ +class MysqlSourceDatatypeIntegrationTest { + @TestFactory + @Timeout(300) + fun syncTests(): Iterable { + val discover: DynamicNode = + DynamicTest.dynamicTest("discover") { + Assertions.assertFalse(LazyValues.actualStreams.isEmpty()) + } + val read: DynamicNode = + DynamicTest.dynamicTest("read") { + Assertions.assertFalse(LazyValues.actualReads.isEmpty()) + } + val cases: List = + allStreamNamesAndRecordData.keys.map { streamName: String -> + DynamicContainer.dynamicContainer( + streamName, + listOf( + DynamicTest.dynamicTest("discover") { discover(streamName) }, + DynamicTest.dynamicTest("records") { records(streamName) }, + ), + ) + } + return listOf(discover, read) + cases + } + + object LazyValues { + val actualStreams: Map by lazy { + val output: BufferingOutputConsumer = CliRunner.runSource("discover", config()) + output.catalogs().firstOrNull()?.streams?.filterNotNull()?.associateBy { it.name } + ?: mapOf() + } + + val configuredCatalog: ConfiguredAirbyteCatalog by lazy { + val configuredStreams: List = + allStreamNamesAndRecordData.keys + .mapNotNull { actualStreams[it] } + .map(CatalogHelpers::toDefaultConfiguredStream) + for (configuredStream in configuredStreams) { + if (configuredStream.stream.supportedSyncModes.contains(SyncMode.INCREMENTAL)) { + configuredStream.syncMode = SyncMode.INCREMENTAL + } + } + ConfiguredAirbyteCatalog().withStreams(configuredStreams) + } + + val allReadMessages: List by lazy { + CliRunner.runSource("read", config(), configuredCatalog).messages() + } + + val actualReads: Map by lazy { + val result: Map = + allStreamNamesAndRecordData.keys.associateWith { + BufferingOutputConsumer(TestClockFactory().fixed()) + } + for (msg in allReadMessages) { + result[streamName(msg) ?: continue]?.accept(msg) + } + result + } + + fun streamName(msg: AirbyteMessage): String? = + when (msg.type) { + AirbyteMessage.Type.RECORD -> msg.record?.stream + AirbyteMessage.Type.STATE -> msg.state?.stream?.streamDescriptor?.name + AirbyteMessage.Type.TRACE -> + when (msg.trace?.type) { + AirbyteTraceMessage.Type.ERROR -> msg.trace?.error?.streamDescriptor?.name + AirbyteTraceMessage.Type.ESTIMATE -> msg.trace?.estimate?.name + AirbyteTraceMessage.Type.STREAM_STATUS -> + msg.trace?.streamStatus?.streamDescriptor?.name + AirbyteTraceMessage.Type.ANALYTICS -> null + null -> null + } + else -> null + } + } + + private fun discover(streamName: String) { + val actualStream: AirbyteStream? = LazyValues.actualStreams[streamName] + log.info { "discover result: ${LazyValues.actualStreams}" } + log.info { "streamName: $streamName" } + Assertions.assertNotNull(actualStream) + log.info { + "test case $streamName: discovered stream ${ + Jsons.valueToTree( + actualStream, + ) + }" + } + val testCase: TestCase = + testCases.find { it.streamNamesToRecordData.keys.contains(streamName) }!! + val isIncrementalSupported: Boolean = + actualStream!!.supportedSyncModes.contains(SyncMode.INCREMENTAL) + val jsonSchema: JsonNode = actualStream.jsonSchema?.get("properties")!! + if (streamName == testCase.tableName) { + val actualSchema: JsonNode? = jsonSchema[testCase.columnName] + Assertions.assertNotNull(actualSchema) + val expectedSchema: JsonNode = testCase.airbyteType.asJsonSchema() + Assertions.assertEquals(expectedSchema, actualSchema) + if (testCase.cursor) { + Assertions.assertTrue(isIncrementalSupported) + } else { + Assertions.assertFalse(isIncrementalSupported) + } + } + } + + private fun records(streamName: String) { + val actualRead: BufferingOutputConsumer? = LazyValues.actualReads[streamName] + Assertions.assertNotNull(actualRead) + + fun sortedRecordData(data: List): JsonNode = + Jsons.createArrayNode().apply { addAll(data.sortedBy { it.toString() }) } + + val actualRecords: List = actualRead?.records() ?: listOf() + val actual: JsonNode = sortedRecordData(actualRecords.mapNotNull { it.data }) + log.info { "test case $streamName: emitted records $actual" } + val expected: JsonNode = sortedRecordData(allStreamNamesAndRecordData[streamName]!!) + + Assertions.assertEquals(expected, actual) + } + + companion object { + lateinit var dbContainer: MySQLContainer<*> + + fun config(): MysqlSourceConfigurationJsonObject = MysqlContainerFactory.config(dbContainer) + + val connectionFactory: JdbcConnectionFactory by lazy { + JdbcConnectionFactory(MysqlSourceConfigurationFactory().make(config())) + } + + val stringValues = + mapOf( + "'abcdef'" to """"abcdef"""", + "'ABCD'" to """"ABCD"""", + "'OXBEEF'" to """"OXBEEF"""", + ) + + val testCases: List = + listOf( + TestCase("VARCHAR(10)", stringValues), + ) + + val allStreamNamesAndRecordData: Map> = + testCases.flatMap { it.streamNamesToRecordData.toList() }.toMap() + + @JvmStatic + @BeforeAll + @Timeout(value = 300) + fun startAndProvisionTestContainer() { + dbContainer = + MysqlContainerFactory.exclusive( + "mysql:8.0", + MysqlContainerFactory.WithNetwork, + ) + connectionFactory + .get() + .also { it.isReadOnly = false } + .use { connection: Connection -> + for (case in testCases) { + for (sql in case.sqlStatements) { + log.info { "test case ${case.id}: executing $sql" } + connection.createStatement().use { stmt -> stmt.execute(sql) } + } + } + } + } + } + + data class TestCase( + val sqlType: String, + val sqlToAirbyte: Map, + val airbyteType: AirbyteType = LeafAirbyteType.STRING, + val cursor: Boolean = true, + val noPK: Boolean = false, + val customDDL: List? = null, + ) { + val id: String + get() = + sqlType + .replace("[^a-zA-Z0-9]".toRegex(), " ") + .trim() + .replace(" +".toRegex(), "_") + .lowercase() + + val tableName: String + get() = "tbl_$id" + + val columnName: String + get() = "col_$id" + + val sqlStatements: List + get() { + val ddl: List = + listOf( + "CREATE DATABASE IF NOT EXISTS test", + "USE test", + "CREATE TABLE IF NOT EXISTS $tableName " + + "($columnName $sqlType ${if (noPK) "" else "PRIMARY KEY"})", + "TRUNCATE TABLE $tableName", + ) + val dml: List = + sqlToAirbyte.keys.map { "INSERT INTO $tableName ($columnName) VALUES ($it)" } + + return ddl + dml + } + + val streamNamesToRecordData: Map> + get() { + val recordData: List = + sqlToAirbyte.values.map { Jsons.readTree("""{"${columnName}":$it}""") } + return mapOf(tableName to recordData) + } + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceSelectQueryGeneratorTest.kt b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceSelectQueryGeneratorTest.kt new file mode 100644 index 000000000000..14fb19b67917 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceSelectQueryGeneratorTest.kt @@ -0,0 +1,145 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.cdk.discover.Field +import io.airbyte.cdk.jdbc.DoubleFieldType +import io.airbyte.cdk.jdbc.IntFieldType +import io.airbyte.cdk.jdbc.LongFieldType +import io.airbyte.cdk.jdbc.LosslessJdbcFieldType +import io.airbyte.cdk.jdbc.OffsetDateTimeFieldType +import io.airbyte.cdk.jdbc.StringFieldType +import io.airbyte.cdk.read.And +import io.airbyte.cdk.read.Equal +import io.airbyte.cdk.read.From +import io.airbyte.cdk.read.Greater +import io.airbyte.cdk.read.LesserOrEqual +import io.airbyte.cdk.read.Limit +import io.airbyte.cdk.read.Or +import io.airbyte.cdk.read.OrderBy +import io.airbyte.cdk.read.SelectColumnMaxValue +import io.airbyte.cdk.read.SelectColumns +import io.airbyte.cdk.read.SelectQuery +import io.airbyte.cdk.read.SelectQuerySpec +import io.airbyte.cdk.read.Where +import io.airbyte.cdk.read.optimize +import io.airbyte.cdk.util.Jsons +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +class MysqlSourceSelectQueryGeneratorTest { + @Test + fun testSelectLimit0() { + SelectQuerySpec( + SelectColumns( + listOf( + Field("k", IntFieldType), + Field("v", StringFieldType), + ), + ), + From("TBL", "SC"), + limit = Limit(0), + ) + .assertSqlEquals("""SELECT `k`, `v` FROM `SC`.`TBL` LIMIT 0""") + } + + @Test + fun testSelectMaxCursor() { + SelectQuerySpec( + SelectColumnMaxValue(Field("ts", OffsetDateTimeFieldType)), + From("TBL", "SC"), + ) + .assertSqlEquals("""SELECT MAX(`ts`) FROM `SC`.`TBL`""") + } + + @Test + fun testSelectForNonResumableInitialSync() { + SelectQuerySpec( + SelectColumns( + listOf( + Field("k", IntFieldType), + Field("v", StringFieldType), + ), + ), + From("TBL", "SC"), + ) + .assertSqlEquals("""SELECT `k`, `v` FROM `SC`.`TBL`""") + } + + @Test + fun testSelectForResumableInitialSync() { + val k1 = Field("k1", IntFieldType) + val v1 = Jsons.numberNode(10) + val k2 = Field("k2", IntFieldType) + val v2 = Jsons.numberNode(20) + val k3 = Field("k3", IntFieldType) + val v3 = Jsons.numberNode(30) + SelectQuerySpec( + SelectColumns(listOf(k1, k2, k3, Field("msg", StringFieldType))), + From("TBL", "SC"), + Where( + Or( + listOf( + And(listOf(Greater(k1, v1))), + And(listOf(Equal(k1, v1), Greater(k2, v2))), + And(listOf(Equal(k1, v1), Equal(k2, v2), Greater(k3, v3))), + ), + ), + ), + OrderBy(listOf(k1, k2, k3)), + Limit(1000), + ) + .assertSqlEquals( + """SELECT `k1`, `k2`, `k3`, `msg` FROM """ + + """`SC`.`TBL` WHERE (`k1` > ?) OR """ + + """((`k1` = ?) AND (`k2` > ?)) OR """ + + """((`k1` = ?) AND (`k2` = ?) AND (`k3` > ?)) """ + + """ORDER BY `k1`, `k2`, `k3`""" + + " LIMIT ?", + v1 to IntFieldType, + v1 to IntFieldType, + v2 to IntFieldType, + v1 to IntFieldType, + v2 to IntFieldType, + v3 to IntFieldType, + Jsons.numberNode(1000L) to LongFieldType, + ) + } + + @Test + fun testSelectForCursorBasedIncrementalSync() { + val c = Field("c", DoubleFieldType) + val lb = Jsons.numberNode(0.5) + val ub = Jsons.numberNode(0.5) + SelectQuerySpec( + SelectColumns(listOf(Field("msg", StringFieldType), c)), + From("TBL", "SC"), + Where(And(listOf(Greater(c, lb), LesserOrEqual(c, ub)))), + OrderBy(listOf(c)), + Limit(1000), + ) + .assertSqlEquals( + """SELECT `msg`, `c` FROM """ + + """`SC`.`TBL` """ + + """WHERE (`c` > ?) AND (`c` <= ?) ORDER BY `c`""" + + " LIMIT ?", + lb to DoubleFieldType, + ub to DoubleFieldType, + Jsons.numberNode(1000L) to LongFieldType, + ) + } + + private fun SelectQuerySpec.assertSqlEquals( + sql: String, + vararg bindings: Pair>, + ) { + val expected = + SelectQuery( + sql, + select.columns, + bindings.map { SelectQuery.Binding(it.first, it.second) }, + ) + val actual: SelectQuery = MysqlSourceOperations().generate(this.optimize()) + Assertions.assertEquals(expected, actual) + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceTestConfigurationFactory.kt b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceTestConfigurationFactory.kt new file mode 100644 index 000000000000..43bdc7183e1d --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSourceTestConfigurationFactory.kt @@ -0,0 +1,22 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.command.SourceConfigurationFactory +import io.micronaut.context.annotation.Primary +import io.micronaut.context.annotation.Requires +import io.micronaut.context.env.Environment +import jakarta.inject.Singleton +import java.time.Duration + +@Singleton +@Requires(env = [Environment.TEST]) +@Primary +class MysqlSourceTestConfigurationFactory : + SourceConfigurationFactory { + override fun makeWithoutExceptionHandling( + pojo: MysqlSourceConfigurationJsonObject, + ): MysqlSourceConfiguration = + MysqlSourceConfigurationFactory() + .makeWithoutExceptionHandling(pojo) + .copy(maxConcurrency = 1, checkpointTargetInterval = Duration.ofSeconds(3)) +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSpecIntegrationTest.kt b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSpecIntegrationTest.kt new file mode 100644 index 000000000000..63a29db649b6 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/kotlin/io/airbyte/integrations/source/mysql/MysqlSpecIntegrationTest.kt @@ -0,0 +1,12 @@ +/* Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ +package io.airbyte.integrations.source.mysql + +import io.airbyte.cdk.command.SyncsTestFixture +import org.junit.jupiter.api.Test + +class MysqlSpecIntegrationTest { + @Test + fun testSpec() { + SyncsTestFixture.testSpec("expected-spec.json") + } +} diff --git a/airbyte-integrations/connectors/source-mysql-v2/src/test/resources/expected-spec.json b/airbyte-integrations/connectors/source-mysql-v2/src/test/resources/expected-spec.json new file mode 100644 index 000000000000..f7a871076922 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql-v2/src/test/resources/expected-spec.json @@ -0,0 +1,361 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/mysql", + "connectionSpecification": { + "type": "object", + "title": "Mysql Source Spec", + "$schema": "http://json-schema.org/draft-07/schema#", + "required": [ + "host", + "port", + "username", + "encryption", + "tunnel_method", + "cursor" + ], + "properties": { + "host": { + "type": "string", + "order": 1, + "title": "Host", + "description": "Hostname of the database." + }, + "port": { + "type": "integer", + "order": 2, + "title": "Port", + "default": 3306, + "maximum": 65536, + "minimum": 0, + "description": "Port of the database." + }, + "cursor": { + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "Scan Changes with User Defined Cursor", + "required": ["cursor_method"], + "properties": { + "cursor_method": { + "enum": ["user_defined"], + "type": "string", + "default": "user_defined" + } + }, + "description": "Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).", + "additionalProperties": true + } + ], + "order": 10, + "title": "Update Method", + "description": "Configures how data is extracted from the database.", + "display_type": "radio" + }, + "schemas": { + "type": "array", + "items": { + "type": "string" + }, + "order": 6, + "title": "Schemas", + "always_show": true, + "description": "The list of schemas to sync from. Defaults to user. Case sensitive.", + "uniqueItems": true + }, + "password": { + "type": "string", + "order": 5, + "title": "Password", + "always_show": true, + "description": "The password associated with the username.", + "airbyte_secret": true + }, + "username": { + "type": "string", + "order": 4, + "title": "User", + "description": "The username which is used to access the database." + }, + "encryption": { + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "preferred", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "enum": ["preferred"], + "type": "string", + "default": "preferred" + } + }, + "description": "To allow unencrypted communication only when the source doesn't support encryption.", + "additionalProperties": true + }, + { + "type": "object", + "title": "required", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "enum": ["required"], + "type": "string", + "default": "required" + } + }, + "description": "To always require encryption. Note: The connection will fail if the source doesn't support encryption.", + "additionalProperties": true + }, + { + "type": "object", + "title": "Verify CA", + "required": [ + "encryption_method", + "ssl_certificate", + "ssl_client_certificate", + "ssl_client_key", + "ssl_client_key_password" + ], + "properties": { + "ssl_client_key": { + "type": "string", + "title": "Client Key", + "multiline": true, + "description": "Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)", + "airbyte_secret": true + }, + "ssl_certificate": { + "type": "string", + "title": "CA certificate", + "multiline": true, + "description": "CA certificate", + "airbyte_secret": true + }, + "encryption_method": { + "enum": ["Verify CA"], + "type": "string", + "default": "Verify CA" + }, + "ssl_client_certificate": { + "type": "string", + "title": "Client certificate File", + "multiline": true, + "description": "Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)", + "airbyte_secret": true + }, + "ssl_client_key_password": { + "type": "string", + "title": "Client key password", + "multiline": true, + "description": "Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true + } + }, + "description": "To always require encryption and verify that the source has a valid SSL certificate.", + "additionalProperties": true + }, + { + "type": "object", + "title": "Verify CA", + "required": [ + "encryption_method", + "ssl_certificate", + "ssl_client_certificate", + "ssl_client_key", + "ssl_client_key_password" + ], + "properties": { + "ssl_client_key": { + "type": "string", + "title": "Client Key", + "multiline": true, + "description": "Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)", + "airbyte_secret": true + }, + "ssl_certificate": { + "type": "string", + "title": "CA certificate", + "multiline": true, + "description": "CA certificate", + "airbyte_secret": true + }, + "encryption_method": { + "enum": ["Verify CA"], + "type": "string", + "default": "Verify CA" + }, + "ssl_client_certificate": { + "type": "string", + "title": "Client certificate File", + "multiline": true, + "description": "Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)", + "airbyte_secret": true + }, + "ssl_client_key_password": { + "type": "string", + "title": "Client key password", + "multiline": true, + "description": "Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true + } + }, + "description": "To always require encryption and verify that the source has a valid SSL certificate.", + "additionalProperties": true + } + ], + "order": 8, + "title": "Encryption", + "description": "The encryption method with is used when communicating with the database." + }, + "concurrency": { + "type": "integer", + "order": 12, + "title": "Concurrency", + "default": 1, + "description": "Maximum number of concurrent queries to the database." + }, + "tunnel_method": { + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "enum": ["NO_TUNNEL"], + "type": "string", + "default": "NO_TUNNEL" + } + }, + "description": "No ssh tunnel needed to connect to database", + "additionalProperties": true + }, + { + "type": "object", + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "ssh_key": { + "type": "string", + "order": 4, + "title": "SSH Private Key", + "multiline": true, + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "airbyte_secret": true + }, + "tunnel_host": { + "type": "string", + "order": 1, + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel." + }, + "tunnel_port": { + "type": "integer", + "order": 2, + "title": "SSH Connection Port", + "default": 22, + "maximum": 65536, + "minimum": 0, + "description": "Port on the proxy/jump server that accepts inbound ssh connections." + }, + "tunnel_user": { + "type": "string", + "order": 3, + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host" + }, + "tunnel_method": { + "enum": ["SSH_KEY_AUTH"], + "type": "string", + "default": "SSH_KEY_AUTH" + } + }, + "description": "Connect through a jump server tunnel host using username and ssh key", + "additionalProperties": true + }, + { + "type": "object", + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_host": { + "type": "string", + "order": 1, + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel." + }, + "tunnel_port": { + "type": "integer", + "order": 2, + "title": "SSH Connection Port", + "default": 22, + "maximum": 65536, + "minimum": 0, + "description": "Port on the proxy/jump server that accepts inbound ssh connections." + }, + "tunnel_user": { + "type": "string", + "order": 3, + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host" + }, + "tunnel_method": { + "enum": ["SSH_PASSWORD_AUTH"], + "type": "string", + "default": "SSH_PASSWORD_AUTH" + }, + "tunnel_user_password": { + "type": "string", + "order": 4, + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "airbyte_secret": true + } + }, + "description": "Connect through a jump server tunnel host using username and password authentication", + "additionalProperties": true + } + ], + "order": 9, + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use." + }, + "jdbc_url_params": { + "type": "string", + "order": 7, + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + }, + "check_privileges": { + "type": "boolean", + "order": 13, + "title": "Check Table and Column Access Privileges", + "default": true, + "description": "When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature.", + "display_type": "check" + }, + "checkpoint_target_interval_seconds": { + "type": "integer", + "order": 11, + "title": "Checkpoint Target Time Interval", + "default": 300, + "description": "How often (in seconds) a stream should checkpoint, when possible." + } + }, + "additionalProperties": true + } +} From 03b7e1ad2223235e16391a8e291d4c1c4a54081c Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Fri, 6 Sep 2024 14:55:14 +0300 Subject: [PATCH 37/51] feat(airbyte-cdk): Add limitation for number of partitions to PerPartitionCursor (#42406) --- .../incremental/per_partition_cursor.py | 20 +++- .../test_per_partition_cursor_integration.py | 107 +++++++++++++++++- 2 files changed, 122 insertions(+), 5 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py index 28c2f0eb6b8b..4babf99e4a0d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py @@ -2,7 +2,9 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional, Union +import logging +from collections import OrderedDict +from typing import Any, Callable, Iterable, Mapping, Optional, Union from airbyte_cdk.models import FailureType from airbyte_cdk.sources.declarative.incremental.declarative_cursor import DeclarativeCursor @@ -41,6 +43,7 @@ class PerPartitionCursor(DeclarativeCursor): Therefore, we need to manage state per partition. """ + DEFAULT_MAX_PARTITIONS_NUMBER = 10000 _NO_STATE: Mapping[str, Any] = {} _NO_CURSOR_STATE: Mapping[str, Any] = {} _KEY = 0 @@ -49,12 +52,17 @@ class PerPartitionCursor(DeclarativeCursor): def __init__(self, cursor_factory: CursorFactory, partition_router: PartitionRouter): self._cursor_factory = cursor_factory self._partition_router = partition_router - self._cursor_per_partition: MutableMapping[str, DeclarativeCursor] = {} + # The dict is ordered to ensure that once the maximum number of partitions is reached, + # the oldest partitions can be efficiently removed, maintaining the most recent partitions. + self._cursor_per_partition: OrderedDict[str, DeclarativeCursor] = OrderedDict() self._partition_serializer = PerPartitionKeySerializer() def stream_slices(self) -> Iterable[StreamSlice]: slices = self._partition_router.stream_slices() for partition in slices: + # Ensure the maximum number of partitions is not exceeded + self._ensure_partition_limit() + cursor = self._cursor_per_partition.get(self._to_partition_key(partition.partition)) if not cursor: cursor = self._create_cursor(self._NO_CURSOR_STATE) @@ -63,6 +71,14 @@ def stream_slices(self) -> Iterable[StreamSlice]: for cursor_slice in cursor.stream_slices(): yield StreamSlice(partition=partition, cursor_slice=cursor_slice) + def _ensure_partition_limit(self) -> None: + """ + Ensure the maximum number of partitions is not exceeded. If so, the oldest added partition will be dropped. + """ + while len(self._cursor_per_partition) > self.DEFAULT_MAX_PARTITIONS_NUMBER - 1: + oldest_partition = self._cursor_per_partition.popitem(last=False)[0] # Remove the oldest partition + logging.warning(f"The maximum number of partitions has been reached. Dropping the oldest partition: {oldest_partition}.") + def set_initial_state(self, stream_state: StreamState) -> None: """ Set the initial state for the cursors. diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py index 4d2141b42373..ae99a433c42f 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py @@ -2,10 +2,21 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from unittest.mock import patch +from unittest.mock import MagicMock, patch -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import StreamSlice +from airbyte_cdk.models import ( + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateType, + AirbyteStream, + AirbyteStreamState, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + StreamDescriptor, + SyncMode, +) +from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import PerPartitionCursor, StreamSlice from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever from airbyte_cdk.sources.types import Record @@ -268,3 +279,93 @@ def test_substream_without_input_state(): cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}, ), ] + + +def test_partition_limitation(): + source = ManifestDeclarativeSource( + source_config=ManifestBuilder() + .with_list_partition_router("Rates", "partition_field", ["1", "2", "3"]) + .with_incremental_sync( + "Rates", + start_datetime="2022-01-01", + end_datetime="2022-02-28", + datetime_format="%Y-%m-%d", + cursor_field=CURSOR_FIELD, + step="P1M", + cursor_granularity="P1D", + ) + .build() + ) + + partition_slices = [ + StreamSlice(partition={"partition_field": "1"}, cursor_slice={}), + StreamSlice(partition={"partition_field": "2"}, cursor_slice={}), + StreamSlice(partition={"partition_field": "3"}, cursor_slice={}), + ] + + records_list = [ + [ + Record({"a record key": "a record value", CURSOR_FIELD: "2022-01-15"}, partition_slices[0]), + Record({"a record key": "a record value", CURSOR_FIELD: "2022-01-16"}, partition_slices[0]), + ], + [Record({"a record key": "a record value", CURSOR_FIELD: "2022-02-15"}, partition_slices[0])], + [Record({"a record key": "a record value", CURSOR_FIELD: "2022-01-16"}, partition_slices[1])], + [], + [], + [Record({"a record key": "a record value", CURSOR_FIELD: "2022-02-17"}, partition_slices[2])], + ] + + configured_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="Rates", json_schema={}, supported_sync_modes=[SyncMode.full_refresh, SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream]) + + initial_state = [ + AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="post_comment_votes", namespace=None), + stream_state=AirbyteStateBlob.parse_obj( + { + "states": [ + { + "partition": {"partition_field": "1"}, + "cursor": {CURSOR_FIELD: "2022-01-01"}, + }, + { + "partition": {"partition_field": "2"}, + "cursor": {CURSOR_FIELD: "2022-01-02"}, + }, + { + "partition": {"partition_field": "3"}, + "cursor": {CURSOR_FIELD: "2022-01-03"}, + }, + ] + } + ), + ), + ) + ] + logger = MagicMock() + + # with patch.object(PerPartitionCursor, "stream_slices", return_value=partition_slices): + with patch.object(SimpleRetriever, "_read_pages", side_effect=records_list): + with patch.object(PerPartitionCursor, "DEFAULT_MAX_PARTITIONS_NUMBER", 2): + output = list(source.read(logger, {}, catalog, initial_state)) + + # assert output_data == expected_records + final_state = [message.state.stream.stream_state.dict() for message in output if message.state] + assert final_state[-1] == { + "states": [ + { + "partition": {"partition_field": "2"}, + "cursor": {CURSOR_FIELD: "2022-01-16"}, + }, + { + "partition": {"partition_field": "3"}, + "cursor": {CURSOR_FIELD: "2022-02-17"}, + }, + ] + } From e73a4dcf5866414902f865637ab95851009ee036 Mon Sep 17 00:00:00 2001 From: tolik0 Date: Fri, 6 Sep 2024 12:00:15 +0000 Subject: [PATCH 38/51] =?UTF-8?q?=F0=9F=A4=96=20minor=20bump=20Python=20CD?= =?UTF-8?q?K=20to=20version=205.1.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index fe8f4ca4aa44..77e1b66da0c4 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 5.1.0 +Add limitation for number of partitions to PerPartitionCursor + ## 5.0.1 Fix source-declarative-manifest diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index 2c5d56c676fd..684a196e7bca 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "airbyte-cdk" -version = "5.0.1" +version = "5.1.0" description = "A framework for writing Airbyte Connectors." authors = ["Airbyte "] license = "MIT" From b5756b778fc67a580519dbc74474d873c72455e1 Mon Sep 17 00:00:00 2001 From: tolik0 Date: Fri, 6 Sep 2024 12:06:25 +0000 Subject: [PATCH 39/51] =?UTF-8?q?=F0=9F=A4=96=20Cut=20version=205.1.0=20of?= =?UTF-8?q?=20source-declarative-manifest?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-declarative-manifest/metadata.yaml | 2 +- .../connectors/source-declarative-manifest/poetry.lock | 8 ++++---- .../connectors/source-declarative-manifest/pyproject.toml | 4 ++-- docs/integrations/sources/low-code.md | 1 + 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml index 727ed9bb937f..eea5d5ae636f 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml +++ b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml @@ -8,7 +8,7 @@ data: connectorType: source definitionId: 64a2f99c-542f-4af8-9a6f-355f1217b436 # This version should not be updated manually - it is updated by the CDK release workflow. - dockerImageTag: 5.0.1 + dockerImageTag: 5.1.0 dockerRepository: airbyte/source-declarative-manifest # This page is hidden from the docs for now, since the connector is not in any Airbyte registries. documentationUrl: https://docs.airbyte.com/integrations/sources/low-code diff --git a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock index efd0ccc8b9bf..adf82cd66082 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock +++ b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "5.0.1" +version = "5.1.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-5.0.1-py3-none-any.whl", hash = "sha256:9925a07a4f15c2d61775c09cf9359d477f7bd4414cc3986b643cfb2b8325b33d"}, - {file = "airbyte_cdk-5.0.1.tar.gz", hash = "sha256:cc1b6c55dbc4709784552132aeac06ab0ace97dcafdd9ed4db6738619fdab260"}, + {file = "airbyte_cdk-5.1.0-py3-none-any.whl", hash = "sha256:98cef0398459f8ccc35335f486a30d7e13f8b929879730dbc18296d4df6e4e23"}, + {file = "airbyte_cdk-5.1.0.tar.gz", hash = "sha256:e8c039f392bb2d17ddb3ea8e191642e231e92d9b097e0705a2e69a26e44a97b5"}, ] [package.dependencies] @@ -1605,4 +1605,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "fbf3a4e72f12e002577854e63cb758d4bad25d928595a272cc023c859f62c1be" +content-hash = "5b8febed2b745b6298948425c022b91d7c33fe9e60fce2aa1a7c5a2e081ed1ad" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml index da92760e60ae..74345ba37e4e 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml +++ b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml @@ -3,7 +3,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "5.0.1" +version = "5.1.0" name = "source-declarative-manifest" description = "Base source implementation for low-code sources." authors = ["Airbyte "] @@ -17,7 +17,7 @@ include = "source_declarative_manifest" [tool.poetry.dependencies] python = "^3.10,<3.12" -airbyte-cdk = "5.0.1" +airbyte-cdk = "5.1.0" [tool.poetry.scripts] source-declarative-manifest = "source_declarative_manifest.run:run" diff --git a/docs/integrations/sources/low-code.md b/docs/integrations/sources/low-code.md index 39211aba113e..09f9ea660f6b 100644 --- a/docs/integrations/sources/low-code.md +++ b/docs/integrations/sources/low-code.md @@ -9,6 +9,7 @@ The changelog below is automatically updated by the `bump_version` command as pa | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------- | +| 5.1.0 | 2024-09-06 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.1.0 | | 5.0.1 | 2024-09-03 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.1 | | 5.0.0 | 2024-09-02 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.0 | | 4.6.2 | 2024-08-29 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 4.6.2 | From 337e1fc341599d1392afd575ad05bbd1e820d02c Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Fri, 6 Sep 2024 14:57:43 +0200 Subject: [PATCH 40/51] feat(source-zendesk-support): update CDK to v5 (#45187) Signed-off-by: Artem Inzhyyants --- .../source-zendesk-support/metadata.yaml | 2 +- .../source-zendesk-support/poetry.lock | 439 ++++++++++-------- .../source-zendesk-support/pyproject.toml | 4 +- .../source_zendesk_support/streams.py | 3 +- .../integrations/test_post_comment_votes.py | 10 +- .../integrations/test_post_comments.py | 10 +- .../integrations/test_post_votes.py | 10 +- .../integrations/test_ticket_metrics.py | 6 +- .../unit_tests/integrations/utils.py | 2 +- .../unit_tests/unit_test.py | 2 +- docs/integrations/sources/zendesk-support.md | 207 +++++---- 11 files changed, 385 insertions(+), 310 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml index 70a0a02ac1a2..05e462c59eaa 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 - dockerImageTag: 4.0.2 + dockerImageTag: 4.1.0 dockerRepository: airbyte/source-zendesk-support documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-support githubIssueLabel: source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/poetry.lock b/airbyte-integrations/connectors/source-zendesk-support/poetry.lock index cf6177f2a30f..a0bc05d0053a 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/poetry.lock +++ b/airbyte-integrations/connectors/source-zendesk-support/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "4.6.2" +version = "5.0.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-4.6.2-py3-none-any.whl", hash = "sha256:3a37bd96c4b4f874b15fc18839b1e163eb30d1e4ef80d7dde2854e6a48efe934"}, - {file = "airbyte_cdk-4.6.2.tar.gz", hash = "sha256:c034f11ba6abe73dd7346ce2bc7017ff71ef0db1fd1ae86fb86beaeae35d8baf"}, + {file = "airbyte_cdk-5.0.1-py3-none-any.whl", hash = "sha256:9925a07a4f15c2d61775c09cf9359d477f7bd4414cc3986b643cfb2b8325b33d"}, + {file = "airbyte_cdk-5.0.1.tar.gz", hash = "sha256:cc1b6c55dbc4709784552132aeac06ab0ace97dcafdd9ed4db6738619fdab260"}, ] [package.dependencies] -airbyte-protocol-models-pdv2 = ">=0.12.2,<0.13.0" +airbyte-protocol-models-dataclasses = ">=0.13,<0.14" backoff = "*" cachetools = "*" cryptography = ">=42.0.5,<43.0.0" @@ -35,6 +35,7 @@ pytz = "2024.1" PyYAML = ">=6.0.1,<7.0.0" requests = "*" requests_cache = "*" +serpyco-rs = ">=1.10.2,<2.0.0" wcmatch = "8.4" [package.extras] @@ -43,19 +44,16 @@ sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] -name = "airbyte-protocol-models-pdv2" -version = "0.12.2" -description = "Declares the Airbyte Protocol." +name = "airbyte-protocol-models-dataclasses" +version = "0.13.0" +description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclasses in Python have less performance overhead compared to Pydantic models, making them a more efficient choice for scenarios where speed and memory usage are critical" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models_pdv2-0.12.2-py3-none-any.whl", hash = "sha256:8b3f9d0388928547cdf2e9134c0d589e4bcaa6f63bf71a21299f6824bfb7ad0e"}, - {file = "airbyte_protocol_models_pdv2-0.12.2.tar.gz", hash = "sha256:130c9ab289f3f53749ce63ff1abbfb67a44b7e5bd2794865315a2976138b672b"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0-py3-none-any.whl", hash = "sha256:0aedb99ffc4f9aab0ce91bba2c292fa17cd8fd4b42eeba196d6a16c20bbbd7a5"}, + {file = "airbyte_protocol_models_dataclasses-0.13.0.tar.gz", hash = "sha256:72e67850d661e2808406aec5839b3158ebb94d3553b798dbdae1b4a278548d2f"}, ] -[package.dependencies] -pydantic = ">=2.7.2,<3.0.0" - [[package]] name = "annotated-types" version = "0.7.0" @@ -99,6 +97,17 @@ files = [ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, ] +[[package]] +name = "attributes-doc" +version = "0.4.0" +description = "PEP 224 implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "attributes-doc-0.4.0.tar.gz", hash = "sha256:b1576c94a714e9fc2c65c47cf10d0c8e1a5f7c4f5ae7f69006be108d95cbfbfb"}, + {file = "attributes_doc-0.4.0-py2.py3-none-any.whl", hash = "sha256:4c3007d9e58f3a6cb4b9c614c4d4ce2d92161581f28e594ddd8241cc3a113bdd"}, +] + [[package]] name = "attrs" version = "24.2.0" @@ -190,78 +199,78 @@ files = [ [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -713,13 +722,13 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langsmith" -version = "0.1.108" +version = "0.1.115" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.108-py3-none-any.whl", hash = "sha256:407f318b0989e33f2cd30bc2fbd443e4ddfa7c2a93de7f795fb6b119b015583c"}, - {file = "langsmith-0.1.108.tar.gz", hash = "sha256:42f603e2d5770ba36093951bdb29eaab22451cb12ab8c062340c722cf60d4cec"}, + {file = "langsmith-0.1.115-py3-none-any.whl", hash = "sha256:04e35cfd4c2d4ff1ea10bb577ff43957b05ebb3d9eb4e06e200701f4a2b4ac9f"}, + {file = "langsmith-0.1.115.tar.gz", hash = "sha256:3b775377d858d32354f3ee0dd1ed637068cfe9a1f13e7b3bfa82db1615cdffc9"}, ] [package.dependencies] @@ -988,119 +997,120 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.0" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"}, + {file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +pydantic-core = "2.23.2" typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} +tzdata = {version = "*", markers = "python_version >= \"3.9\""} [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"}, + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"}, + {file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"}, + {file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"}, + {file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"}, + {file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"}, + {file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"}, + {file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"}, + {file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"}, + {file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"}, + {file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"}, + {file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"}, + {file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"}, + {file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"}, + {file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"}, ] [package.dependencies] @@ -1476,15 +1486,69 @@ six = "*" fixture = ["fixtures"] test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.18)", "testtools"] +[[package]] +name = "serpyco-rs" +version = "1.11.0" +description = "" +optional = false +python-versions = ">=3.9" +files = [ + {file = "serpyco_rs-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4b2bd933539bd8c84315e2fb5ae52ef7a58ace5a6dfe3f8b73f74dc71216779e"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:627f957889ff73c4d2269fc7b6bba93212381befe03633e7cb5495de66ba9a33"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0933620abc01434023e0e3e22255b7e4ab9b427b5a9a5ee00834656d792377a"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9ce46683d92e34abb20304817fc5ac6cb141a06fc7468dedb1d8865a8a9682f6"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bda437d86e8859bf91c189c1f4650899822f6d6d7b02b48f5729da904eb7bb7d"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a72bfbd282af17ebe76d122639013e802c09902543fdbbd828fb2159ec9755e"}, + {file = "serpyco_rs-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d4808df5384e3e8581e31a90ba7a1fa501c0837b1f174284bb8a4555b6864ea"}, + {file = "serpyco_rs-1.11.0-cp310-none-win_amd64.whl", hash = "sha256:c7b60aef4c16d68efb0d6241f05d0a434d873d98449cbb4366b0d385f0a7172b"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d47ee577cf4d69b53917615cb031ad8708eb2f59fe78194b1968c13130fc2f7"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6090d9a1487237cdd4e9362a823eede23249602019b917e7bd57846179286e79"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7192eb3df576386fefd595ea31ae25c62522841ffec7e7aeb37a80b55bdc3213"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b52ef8affb7e71b9b98a7d5216d6a7ad03b04e990acb147cd9211c8b931c5487"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3480e09e473560c60e74aaa789e6b4d079637371aae0a98235440111464bbba7"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c92e36b0ab6fe866601c2331f7e99c809a126d21963c03d8a5c29331526deed"}, + {file = "serpyco_rs-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84f497361952d4566bc1f77e9e15a84a2614f593cc671fbf0a0fa80046f9c3d7"}, + {file = "serpyco_rs-1.11.0-cp311-none-win_amd64.whl", hash = "sha256:37fc1cf192bef9784fbf1f4e03cec21750b9e704bef55cc0442f71a715eee920"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3ea93d485f03dc8b0cfb0d477f0ad2e86e78f0461b53010656ab5b4db1b41fb0"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7772410d15694b03f9c5500a2c47d62eed76e191bea4087ad042250346b1a38e"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42118463c1679846cffd2f06f47744c9b9eb33c5d0448afd88ea19e1a81a8ddd"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:79481a455b76cc56021dc55bb6d5bdda1b2b32bcb6a1ee711b597140d112e9b1"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8fd79051f9af9591fc03cf7d3033ff180416301f6a4fd3d1e3d92ebd2d68697"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d29c8f9aeed734a3b51f7349d04ec9063516ffa4e10b632d75e9b1309e4930e4"}, + {file = "serpyco_rs-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15609158b0d9591ffa118302cd9d0039970cb3faf91dce32975f7d276e7411d5"}, + {file = "serpyco_rs-1.11.0-cp312-none-win_amd64.whl", hash = "sha256:00081eae77fbf4c5d88371c5586317ab02ccb293a330b460869a283edf2b7b69"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3028893366a1985adcedb13fa8f6f98c087c185efc427f94c2ccdafa40f45832"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c18bf511316f3abf648a68ee62ef88617bec57d3fcde69466b4361102715ae5"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7dde9ef09cdfaf7c62378186b9e29f54ec76114be4c347be6a06dd559c5681e"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:18500ebc5e75285841e35585a238629a990b709e14f68933233640d15ca17d5f"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47c23132d4e03982703a7630aa09877b41e499722142f76b6153f6619b612f3"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f8e6ba499f6a0825bee0d8f8764569d367af871b563fc6512c171474e8e5383"}, + {file = "serpyco_rs-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15438a076047c34cff6601a977df54948e8d39d1a86f89d05c48bc60f4c12a61"}, + {file = "serpyco_rs-1.11.0-cp313-none-win_amd64.whl", hash = "sha256:84ee2c109415bd81904fc9abb9aec86a5dd13166808c21142cf23ec639f683bd"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5c97c16c865261577fac4effeccc7ef5e0a1e8e35e7a3ee6c90c77c3a4cd7ff9"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47825e70f86fd6ef7c4a835dea3d6e8eef4fee354ed7b39ced99f31aba74a86e"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24d220220365110edba2f778f41ab3cf396883da0f26e1361a3ada9bd0227f73"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3a46f334af5a9d77acc6e1e58f355ae497900a2798929371f0545e274f6e6166"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d72b748acce4b4e3c7c9724e1eb33d033a1c26b08a698b393e0288060e0901"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2b8b6f205e8cc038d4d30dd0e70eece7bbecc816eb2f3787c330dc2218e232d"}, + {file = "serpyco_rs-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038d748bfff31f150f0c3edab2766b8843edb952cb1bd3bf547886beb0912dae"}, + {file = "serpyco_rs-1.11.0-cp39-none-win_amd64.whl", hash = "sha256:0fee1c89ec2cb013dc232e4ebef88e2844357ce8631063b56639dbfb83762f20"}, + {file = "serpyco_rs-1.11.0.tar.gz", hash = "sha256:70a844615ffb229e6e89c204b3ab7404aacaf2838911814c7d847969b8da2e3a"}, +] + +[package.dependencies] +attributes-doc = "*" +typing-extensions = "*" + [[package]] name = "setuptools" -version = "74.0.0" +version = "74.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, + {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, + {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, ] [package.extras] @@ -1575,6 +1639,17 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "url-normalize" version = "1.4.3" @@ -1702,4 +1777,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "fa58aec9255618ae99b42b54933da906ca01c21d8fd519592dba40867d04947f" +content-hash = "6605e05cbdfd8379707277ab56130a2e45238e6135c654318e904d91c8d6470c" diff --git a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml index 528855f2fe1f..770275086183 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml +++ b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.0.2" +version = "4.1.0" name = "source-zendesk-support" description = "Source implementation for Zendesk Support." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_zendesk_support" [tool.poetry.dependencies] python = "^3.10,<3.12" -airbyte-cdk = "^4" +airbyte-cdk = "^5" pytz = "==2024.1" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index 71b4fb54a105..f1887a03442e 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -14,7 +14,7 @@ import pytz import requests from airbyte_cdk import BackoffStrategy -from airbyte_cdk.models import SyncMode +from airbyte_cdk.models import FailureType, SyncMode from airbyte_cdk.sources.streams.core import StreamData, package_name_from_class from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream from airbyte_cdk.sources.streams.http.error_handlers import ErrorHandler, ErrorResolution, HttpStatusErrorHandler, ResponseAction @@ -22,7 +22,6 @@ from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer from airbyte_cdk.utils import AirbyteTracedException -from airbyte_protocol.models import FailureType DATETIME_FORMAT: str = "%Y-%m-%dT%H:%M:%SZ" LAST_END_TIME_KEY: str = "_last_end_time" diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py index 7f365dec20b2..ffe141cdfcb4 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comment_votes.py @@ -6,12 +6,12 @@ import freezegun import pendulum +from airbyte_cdk.models import AirbyteStateBlob +from airbyte_cdk.models import Level as LogLevel +from airbyte_cdk.models import SyncMode from airbyte_cdk.test.mock_http import HttpMocker from airbyte_cdk.test.mock_http.response_builder import FieldPath from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import AirbyteStateBlob -from airbyte_protocol.models import Level as LogLevel -from airbyte_protocol.models import SyncMode from .config import ConfigBuilder from .helpers import given_post_comments, given_posts, given_ticket_forms @@ -209,7 +209,7 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_now(self assert len(output.records) == 1 assert output.most_recent_state.stream_descriptor.name == "post_comment_votes" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({"updated_at": post_comment_votes["updated_at"]}) + assert output.most_recent_state.stream_state == AirbyteStateBlob({"updated_at": post_comment_votes["updated_at"]}) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker): @@ -267,4 +267,4 @@ def test_given_state_and_pagination_when_read_then_return_records(self, http_moc assert len(output.records) == 2 assert output.most_recent_state.stream_descriptor.name == "post_comment_votes" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({"updated_at": datetime_to_string(last_page_record_updated_at)}) + assert output.most_recent_state.stream_state == AirbyteStateBlob({"updated_at": datetime_to_string(last_page_record_updated_at)}) diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py index 9f4200b44c05..a9d2d825c5bb 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_comments.py @@ -6,12 +6,12 @@ import freezegun import pendulum +from airbyte_cdk.models import AirbyteStateBlob +from airbyte_cdk.models import Level as LogLevel +from airbyte_cdk.models import SyncMode from airbyte_cdk.test.mock_http import HttpMocker from airbyte_cdk.test.mock_http.response_builder import FieldPath from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import AirbyteStateBlob -from airbyte_protocol.models import Level as LogLevel -from airbyte_protocol.models import SyncMode from .config import ConfigBuilder from .helpers import given_posts, given_ticket_forms @@ -179,7 +179,7 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_now(self post_comment = post_comments_record_builder.build() assert output.most_recent_state.stream_descriptor.name == "post_comments" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({"updated_at": post_comment["updated_at"]}) + assert output.most_recent_state.stream_state == AirbyteStateBlob({"updated_at": post_comment["updated_at"]}) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker): @@ -234,4 +234,4 @@ def test_given_state_and_pagination_when_read_then_return_records(self, http_moc assert len(output.records) == 2 assert output.most_recent_state.stream_descriptor.name == "post_comments" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({"updated_at": datetime_to_string(last_page_record_updated_at)}) + assert output.most_recent_state.stream_state == AirbyteStateBlob({"updated_at": datetime_to_string(last_page_record_updated_at)}) diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py index 17596c46d89b..a2617b5a07df 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_post_votes.py @@ -6,12 +6,12 @@ import freezegun import pendulum +from airbyte_cdk.models import AirbyteStateBlob +from airbyte_cdk.models import Level as LogLevel +from airbyte_cdk.models import SyncMode from airbyte_cdk.test.mock_http import HttpMocker from airbyte_cdk.test.mock_http.response_builder import FieldPath from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import AirbyteStateBlob -from airbyte_protocol.models import Level as LogLevel -from airbyte_protocol.models import SyncMode from .config import ConfigBuilder from .helpers import given_posts, given_ticket_forms @@ -179,7 +179,7 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_now(self post_comment = post_comments_record_builder.build() assert output.most_recent_state.stream_descriptor.name == "post_votes" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({"updated_at": post_comment["updated_at"]}) + assert output.most_recent_state.stream_state == AirbyteStateBlob({"updated_at": post_comment["updated_at"]}) @HttpMocker() def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker): @@ -231,4 +231,4 @@ def test_given_state_and_pagination_when_read_then_return_records(self, http_moc assert len(output.records) == 2 assert output.most_recent_state.stream_descriptor.name == "post_votes" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({"updated_at": datetime_to_string(last_page_record_updated_at)}) + assert output.most_recent_state.stream_state == AirbyteStateBlob({"updated_at": datetime_to_string(last_page_record_updated_at)}) diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_ticket_metrics.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_ticket_metrics.py index 131c808ab8ad..3a1eaf71dccd 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_ticket_metrics.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/test_ticket_metrics.py @@ -5,10 +5,10 @@ import freezegun import pendulum +from airbyte_cdk.models import AirbyteStateBlob, SyncMode from airbyte_cdk.test.mock_http import HttpMocker from airbyte_cdk.test.mock_http.response_builder import FieldPath from airbyte_cdk.test.state_builder import StateBuilder -from airbyte_protocol.models import AirbyteStateBlob, SyncMode from .config import ConfigBuilder from .helpers import given_tickets, given_tickets_with_state @@ -56,7 +56,7 @@ def test_given_no_state_and_successful_sync_when_read_then_set_state_to_most_rec assert len(output.records) == 1 assert output.most_recent_state.stream_descriptor.name == "ticket_metrics" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({ + assert output.most_recent_state.stream_state == AirbyteStateBlob({ "generated_timestamp": ticket["generated_timestamp"] }) @@ -84,6 +84,6 @@ def test_given_state_and_successful_sync_when_read_then_return_record(self, http assert len(output.records) == 1 assert output.most_recent_state.stream_descriptor.name == "ticket_metrics" - assert output.most_recent_state.stream_state == AirbyteStateBlob.model_validate({ + assert output.most_recent_state.stream_state == AirbyteStateBlob({ "generated_timestamp": record_cursor_value.int_timestamp }) diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/utils.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/utils.py index 50fed20d4074..6d78dac568ce 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/utils.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/integrations/utils.py @@ -6,9 +6,9 @@ import pendulum from airbyte_cdk.models import AirbyteMessage from airbyte_cdk.models import Level as LogLevel +from airbyte_cdk.models import SyncMode from airbyte_cdk.test.catalog_builder import CatalogBuilder from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read -from airbyte_protocol.models import SyncMode from pendulum.datetime import DateTime from source_zendesk_support import SourceZendeskSupport diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py index 04394cd72b09..9a33e3802b34 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py @@ -16,8 +16,8 @@ import pytest import pytz import requests +from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http.error_handlers import ResponseAction -from airbyte_protocol.models import SyncMode from source_zendesk_support.source import BasicApiTokenAuthenticator, SourceZendeskSupport from source_zendesk_support.streams import ( DATETIME_FORMAT, diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 68cf25d9e8d5..bf294c1063b0 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -182,109 +182,110 @@ The Zendesk connector ideally should not run into Zendesk API limitations under | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 4.0.2 | 2024-08-31 | [44965](https://github.com/airbytehq/airbyte/pull/44965) | Update dependencies | -| 4.0.1 | 2024-08-24 | [44692](https://github.com/airbytehq/airbyte/pull/44692) | Update dependencies | -| 4.0.0 | 2024-08-19 | [44096](https://github.com/airbytehq/airbyte/pull/44096) | Stream `Tags`: use cursor based pagination | -| 3.0.1 | 2024-08-17 | [44324](https://github.com/airbytehq/airbyte/pull/44324) | Update dependencies | -| 3.0.0 | 2024-08-13 | [43446](https://github.com/airbytehq/airbyte/pull/43446) | `TicketMetrics` stream: updates cursor field to `generated_timestamp` | -| 2.7.3 | 2024-08-12 | [43900](https://github.com/airbytehq/airbyte/pull/43900) | Update dependencies | -| 2.7.2 | 2024-08-10 | [43614](https://github.com/airbytehq/airbyte/pull/43614) | Update dependencies | -| 2.7.1 | 2024-08-03 | [41799](https://github.com/airbytehq/airbyte/pull/41799) | Update dependencies | -| 2.7.0 | 2024-08-02 | [42975](https://github.com/airbytehq/airbyte/pull/42975) | Migrate to CDK v4.3.0 | -| 2.6.13 | 2024-07-31 | [42892](https://github.com/airbytehq/airbyte/pull/42892) | Update BackoffStrategy interface to be up-to-date with latest parent interface. | -| 2.6.12 | 2024-07-25 | [42519](https://github.com/airbytehq/airbyte/pull/42519) | Update error message for permission issue. | -| 2.6.11 | 2024-07-18 | [42100](https://github.com/airbytehq/airbyte/pull/42100) | Raise config error on 403/404 status code. | -| 2.6.10 | 2024-07-10 | [41436](https://github.com/airbytehq/airbyte/pull/41436) | Fix unit test | -| 2.6.9 | 2024-07-10 | [41390](https://github.com/airbytehq/airbyte/pull/41390) | Update dependencies | -| 2.6.8 | 2024-07-09 | [40025](https://github.com/airbytehq/airbyte/pull/40025) | Update dependencies | -| 2.6.7 | 2024-07-09 | [41032](https://github.com/airbytehq/airbyte/pull/41032) | Use latest `CDK`: 3.0.0 | -| 2.6.6 | 2024-06-27 | [40592](https://github.com/airbytehq/airbyte/pull/40592) | Updated to use latest `CDK` version, fixed `cursor pagination` logic | -| 2.6.5 | 2024-05-23 | [38607](https://github.com/airbytehq/airbyte/pull/38607) | Migrate to cursor based pagination in stream `Organization memberships` | -| 2.6.4 | 2024-05-20 | [38310](https://github.com/airbytehq/airbyte/pull/38310) | Fix record filter for `Ticket Metrics` stream | -| 2.6.3 | 2024-05-02 | [36669](https://github.com/airbytehq/airbyte/pull/36669) | Schema descriptions | -| 2.6.2 | 2024-02-05 | [37761](https://github.com/airbytehq/airbyte/pull/37761) | Add stop condition for `Ticket Audits` when recieved old records; Ignore 403 and 404 status codes. | -| 2.6.1 | 2024-04-30 | [37723](https://github.com/airbytehq/airbyte/pull/37723) | Add %Y-%m-%dT%H:%M:%S%z to cursor_datetime_formats | -| 2.6.0 | 2024-04-29 | [36823](https://github.com/airbytehq/airbyte/pull/36823) | Migrate to low code; Add new stream `Ticket Activities` | -| 2.5.0 | 2024-04-25 | [36388](https://github.com/airbytehq/airbyte/pull/36388) | Fix data type of field in `Tickets` stream schema stream. | -| 2.4.1 | 2024-04-20 | [37450](https://github.com/airbytehq/airbyte/pull/37450) | Fix parsing response for `Ticket Metrics` stream. | -| 2.4.0 | 2024-04-09 | [36897](https://github.com/airbytehq/airbyte/pull/36897) | Fix long-running syncs for `Ticket Metrics`, `Ticket Audits` and `Satisfaction Ratings` streams. | -| 2.3.0 | 2024-03-26 | [36403](https://github.com/airbytehq/airbyte/pull/36403) | Unpin CDK version, add record counts to state messages | -| 2.2.8 | 2024-02-09 | [35083](https://github.com/airbytehq/airbyte/pull/35083) | Manage dependencies with Poetry. | -| 2.2.7 | 2024-02-05 | [34840](https://github.com/airbytehq/airbyte/pull/34840) | Fix missing fields in schema | -| 2.2.6 | 2024-01-11 | [34064](https://github.com/airbytehq/airbyte/pull/34064) | Skip 504 Error for stream `Ticket Audits` | -| 2.2.5 | 2024-01-08 | [34010](https://github.com/airbytehq/airbyte/pull/34010) | Prepare for airbyte-lib | -| 2.2.4 | 2023-12-20 | [33680](https://github.com/airbytehq/airbyte/pull/33680) | Fix pagination issue for streams related to incremental export sync | -| 2.2.3 | 2023-12-14 | [33435](https://github.com/airbytehq/airbyte/pull/33435) | Fix 504 Error for stream Ticket Audits | -| 2.2.2 | 2023-12-01 | [33012](https://github.com/airbytehq/airbyte/pull/33012) | Increase number of retries for backoff policy to 10 | -| 2.2.1 | 2023-11-10 | [32440](https://github.com/airbytehq/airbyte/pull/32440) | Made refactoring to improve code maintainability | -| 2.2.0 | 2023-10-31 | [31999](https://github.com/airbytehq/airbyte/pull/31999) | Extended the `CustomRoles` stream schema | -| 2.1.1 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 2.1.0 | 2023-10-19 | [31606](https://github.com/airbytehq/airbyte/pull/31606) | Added new field `reply_time_in_seconds` to the `Ticket Metrics` stream schema | -| 2.0.0 | 2023-09-15 | [30440](https://github.com/airbytehq/airbyte/pull/30440) | Remove stream `Deleted Tickets` | -| 1.7.0 | 2023-09-11 | [30259](https://github.com/airbytehq/airbyte/pull/30259) | Add stream `Deleted Tickets` | -| 1.6.0 | 2023-09-09 | [30168](https://github.com/airbytehq/airbyte/pull/30168) | Make `start_date` field optional | -| 1.5.1 | 2023-09-05 | [30142](https://github.com/airbytehq/airbyte/pull/30142) | Handle non-JSON Response | -| 1.5.0 | 2023-09-04 | [30138](https://github.com/airbytehq/airbyte/pull/30138) | Add new Streams: `Article Votes`, `Article Comments`, `Article Comment Votes` | -| 1.4.0 | 2023-09-04 | [30134](https://github.com/airbytehq/airbyte/pull/30134) | Add incremental support for streams: `custom Roles`, `Schedules`, `SLA Policies` | -| 1.3.0 | 2023-08-30 | [30031](https://github.com/airbytehq/airbyte/pull/30031) | Add new streams: `Articles`, `Organization Fields` | -| 1.2.2 | 2023-08-30 | [29998](https://github.com/airbytehq/airbyte/pull/29998) | Fix typo in stream `AttributeDefinitions`: field condition | -| 1.2.1 | 2023-08-30 | [29991](https://github.com/airbytehq/airbyte/pull/29991) | Remove Custom availability strategy | -| 1.2.0 | 2023-08-29 | [29940](https://github.com/airbytehq/airbyte/pull/29940) | Add undeclared fields to schemas | -| 1.1.1 | 2023-08-29 | [29904](https://github.com/airbytehq/airbyte/pull/29904) | Make `Organizations` stream incremental | -| 1.1.0 | 2023-08-28 | [29891](https://github.com/airbytehq/airbyte/pull/29891) | Add stream `UserFields` | -| 1.0.0 | 2023-07-27 | [28774](https://github.com/airbytehq/airbyte/pull/28774) | Fix retry logic & update cursor for `Tickets` stream | -| 0.11.0 | 2023-08-10 | [27208](https://github.com/airbytehq/airbyte/pull/27208) | Add stream `Topics` | -| 0.10.7 | 2023-08-09 | [29256](https://github.com/airbytehq/airbyte/pull/29256) | Update tooltip descriptions in spec | -| 0.10.6 | 2023-08-04 | [29031](https://github.com/airbytehq/airbyte/pull/29031) | Reverted `advancedAuth` spec changes | -| 0.10.5 | 2023-08-01 | [28910](https://github.com/airbytehq/airbyte/pull/28910) | Updated `advancedAuth` broken references | -| 0.10.4 | 2023-07-25 | [28397](https://github.com/airbytehq/airbyte/pull/28397) | Handle 404 Error | -| 0.10.3 | 2023-07-24 | [28612](https://github.com/airbytehq/airbyte/pull/28612) | Fix pagination for stream `TicketMetricEvents` | -| 0.10.2 | 2023-07-19 | [28487](https://github.com/airbytehq/airbyte/pull/28487) | Remove extra page from params | -| 0.10.1 | 2023-07-10 | [28096](https://github.com/airbytehq/airbyte/pull/28096) | Replace `offset` pagination with `cursor` pagination | -| 0.10.0 | 2023-07-06 | [27991](https://github.com/airbytehq/airbyte/pull/27991) | Add streams: `PostVotes`, `PostCommentVotes` | -| 0.9.0 | 2023-07-05 | [27961](https://github.com/airbytehq/airbyte/pull/27961) | Add stream: `Post Comments` | -| 0.8.1 | 2023-06-27 | [27765](https://github.com/airbytehq/airbyte/pull/27765) | Bugfix: Nonetype error while syncing more then 100000 organizations | -| 0.8.0 | 2023-06-09 | [27156](https://github.com/airbytehq/airbyte/pull/27156) | Add stream `Posts` | -| 0.7.0 | 2023-06-27 | [27436](https://github.com/airbytehq/airbyte/pull/27436) | Add Ticket Skips stream | -| 0.6.0 | 2023-06-27 | [27450](https://github.com/airbytehq/airbyte/pull/27450) | Add Skill Based Routing streams | -| 0.5.0 | 2023-06-26 | [27735](https://github.com/airbytehq/airbyte/pull/27735) | License Update: Elv2 stream stream | -| 0.4.0 | 2023-06-16 | [27431](https://github.com/airbytehq/airbyte/pull/27431) | Add Organization Memberships stream | -| 0.3.1 | 2023-06-02 | [26945](https://github.com/airbytehq/airbyte/pull/26945) | Make `Ticket Metrics` stream to use cursor pagination | -| 0.3.0 | 2023-05-23 | [26347](https://github.com/airbytehq/airbyte/pull/26347) | Add stream `Audit Logs` logs` | -| 0.2.30 | 2023-05-23 | [26414](https://github.com/airbytehq/airbyte/pull/26414) | Added missing handlers when `empty json` or `JSONDecodeError` is received | -| 0.2.29 | 2023-04-18 | [25214](https://github.com/airbytehq/airbyte/pull/25214) | Add missing fields to `Tickets` stream | -| 0.2.28 | 2023-03-21 | [24053](https://github.com/airbytehq/airbyte/pull/24053) | Fix stream `sla_policies` schema data type error (events.value) | -| 0.2.27 | 2023-03-22 | [22817](https://github.com/airbytehq/airbyte/pull/22817) | Specified date formatting in specification | -| 0.2.26 | 2023-03-20 | [24252](https://github.com/airbytehq/airbyte/pull/24252) | Handle invalid `start_date` when checking connection | -| 0.2.25 | 2023-02-28 | [22308](https://github.com/airbytehq/airbyte/pull/22308) | Add `AvailabilityStrategy` for all streams | -| 0.2.24 | 2023-02-17 | [23246](https://github.com/airbytehq/airbyte/pull/23246) | Handle `StartTimeTooRecent` error for Tickets stream | -| 0.2.23 | 2023-02-15 | [23035](https://github.com/airbytehq/airbyte/pull/23035) | Handle 403 Error | -| 0.2.22 | 2023-02-14 | [22483](https://github.com/airbytehq/airbyte/pull/22483) | Fix test; handle 400 error | -| 0.2.21 | 2023-01-27 | [22027](https://github.com/airbytehq/airbyte/pull/22027) | Set `AvailabilityStrategy` for streams explicitly to `None` | -| 0.2.20 | 2022-12-28 | [20900](https://github.com/airbytehq/airbyte/pull/20900) | Remove synchronous time.sleep, add logging, reduce backoff time | -| 0.2.19 | 2022-12-09 | [19967](https://github.com/airbytehq/airbyte/pull/19967) | Fix reading response for more than 100k records | -| 0.2.18 | 2022-11-29 | [19432](https://github.com/airbytehq/airbyte/pull/19432) | Revert changes from version 0.2.15, use a test read instead | -| 0.2.17 | 2022-11-24 | [19792](https://github.com/airbytehq/airbyte/pull/19792) | Transform `ticket_comments.via` "-" to null | -| 0.2.16 | 2022-09-28 | [17326](https://github.com/airbytehq/airbyte/pull/17326) | Migrate to per-stream states. | -| 0.2.15 | 2022-08-03 | [15233](https://github.com/airbytehq/airbyte/pull/15233) | Added `subscription plan` check on `streams discovery` step to remove streams that are not accessible for fetch due to subscription plan restrictions | -| 0.2.14 | 2022-07-27 | [15036](https://github.com/airbytehq/airbyte/pull/15036) | Convert `ticket_audits.previous_value` values to string | -| 0.2.13 | 2022-07-21 | [14829](https://github.com/airbytehq/airbyte/pull/14829) | Convert `tickets.custom_fields` values to string | -| 0.2.12 | 2022-06-30 | [14304](https://github.com/airbytehq/airbyte/pull/14304) | Fixed Pagination for Group Membership stream | -| 0.2.11 | 2022-06-24 | [14112](https://github.com/airbytehq/airbyte/pull/14112) | Fixed "Retry-After" non integer value | -| 0.2.10 | 2022-06-14 | [13757](https://github.com/airbytehq/airbyte/pull/13757) | Fixed the bug with `TicketMetrics` stream, HTTP Error 429, caused by lots of API requests | -| 0.2.9 | 2022-05-27 | [13261](https://github.com/airbytehq/airbyte/pull/13261) | Bugfix for the unhandled [ChunkedEncodingError](https://github.com/airbytehq/airbyte/issues/12591) and [ConnectionError](https://github.com/airbytehq/airbyte/issues/12155) | -| 0.2.8 | 2022-05-20 | [13055](https://github.com/airbytehq/airbyte/pull/13055) | Fixed minor issue for stream `ticket_audits` schema | -| 0.2.7 | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | -| 0.2.6 | 2022-04-19 | [12122](https://github.com/airbytehq/airbyte/pull/12122) | Fixed the bug when only 100,000 Users are synced [11895](https://github.com/airbytehq/airbyte/issues/11895) and fixed bug when `start_date` is not used on user stream [12059](https://github.com/airbytehq/airbyte/issues/12059). | -| 0.2.5 | 2022-04-05 | [11727](https://github.com/airbytehq/airbyte/pull/11727) | Fixed the bug when state was not parsed correctly | -| 0.2.4 | 2022-04-04 | [11688](https://github.com/airbytehq/airbyte/pull/11688) | Small documentation corrections | -| 0.2.3 | 2022-03-23 | [11349](https://github.com/airbytehq/airbyte/pull/11349) | Fixed the bug when Tickets stream didn't return deleted records | -| 0.2.2 | 2022-03-17 | [11237](https://github.com/airbytehq/airbyte/pull/11237) | Fixed the bug when TicketComments stream didn't return all records | -| 0.2.1 | 2022-03-15 | [11162](https://github.com/airbytehq/airbyte/pull/11162) | Added support of OAuth2.0 authentication method | -| 0.2.0 | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | -| 0.1.12 | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | -| 0.1.11 | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | -| 0.1.9 | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | +| 4.1.0 | 2024-09-06 | [45187](https://github.com/airbytehq/airbyte/pull/45187) | Migrate to CDK v5 | +| 4.0.2 | 2024-08-31 | [44965](https://github.com/airbytehq/airbyte/pull/44965) | Update dependencies | +| 4.0.1 | 2024-08-24 | [44692](https://github.com/airbytehq/airbyte/pull/44692) | Update dependencies | +| 4.0.0 | 2024-08-19 | [44096](https://github.com/airbytehq/airbyte/pull/44096) | Stream `Tags`: use cursor based pagination | +| 3.0.1 | 2024-08-17 | [44324](https://github.com/airbytehq/airbyte/pull/44324) | Update dependencies | +| 3.0.0 | 2024-08-13 | [43446](https://github.com/airbytehq/airbyte/pull/43446) | `TicketMetrics` stream: updates cursor field to `generated_timestamp` | +| 2.7.3 | 2024-08-12 | [43900](https://github.com/airbytehq/airbyte/pull/43900) | Update dependencies | +| 2.7.2 | 2024-08-10 | [43614](https://github.com/airbytehq/airbyte/pull/43614) | Update dependencies | +| 2.7.1 | 2024-08-03 | [41799](https://github.com/airbytehq/airbyte/pull/41799) | Update dependencies | +| 2.7.0 | 2024-08-02 | [42975](https://github.com/airbytehq/airbyte/pull/42975) | Migrate to CDK v4.3.0 | +| 2.6.13 | 2024-07-31 | [42892](https://github.com/airbytehq/airbyte/pull/42892) | Update BackoffStrategy interface to be up-to-date with latest parent interface. | +| 2.6.12 | 2024-07-25 | [42519](https://github.com/airbytehq/airbyte/pull/42519) | Update error message for permission issue. | +| 2.6.11 | 2024-07-18 | [42100](https://github.com/airbytehq/airbyte/pull/42100) | Raise config error on 403/404 status code. | +| 2.6.10 | 2024-07-10 | [41436](https://github.com/airbytehq/airbyte/pull/41436) | Fix unit test | +| 2.6.9 | 2024-07-10 | [41390](https://github.com/airbytehq/airbyte/pull/41390) | Update dependencies | +| 2.6.8 | 2024-07-09 | [40025](https://github.com/airbytehq/airbyte/pull/40025) | Update dependencies | +| 2.6.7 | 2024-07-09 | [41032](https://github.com/airbytehq/airbyte/pull/41032) | Use latest `CDK`: 3.0.0 | +| 2.6.6 | 2024-06-27 | [40592](https://github.com/airbytehq/airbyte/pull/40592) | Updated to use latest `CDK` version, fixed `cursor pagination` logic | +| 2.6.5 | 2024-05-23 | [38607](https://github.com/airbytehq/airbyte/pull/38607) | Migrate to cursor based pagination in stream `Organization memberships` | +| 2.6.4 | 2024-05-20 | [38310](https://github.com/airbytehq/airbyte/pull/38310) | Fix record filter for `Ticket Metrics` stream | +| 2.6.3 | 2024-05-02 | [36669](https://github.com/airbytehq/airbyte/pull/36669) | Schema descriptions | +| 2.6.2 | 2024-02-05 | [37761](https://github.com/airbytehq/airbyte/pull/37761) | Add stop condition for `Ticket Audits` when recieved old records; Ignore 403 and 404 status codes. | +| 2.6.1 | 2024-04-30 | [37723](https://github.com/airbytehq/airbyte/pull/37723) | Add %Y-%m-%dT%H:%M:%S%z to cursor_datetime_formats | +| 2.6.0 | 2024-04-29 | [36823](https://github.com/airbytehq/airbyte/pull/36823) | Migrate to low code; Add new stream `Ticket Activities` | +| 2.5.0 | 2024-04-25 | [36388](https://github.com/airbytehq/airbyte/pull/36388) | Fix data type of field in `Tickets` stream schema stream. | +| 2.4.1 | 2024-04-20 | [37450](https://github.com/airbytehq/airbyte/pull/37450) | Fix parsing response for `Ticket Metrics` stream. | +| 2.4.0 | 2024-04-09 | [36897](https://github.com/airbytehq/airbyte/pull/36897) | Fix long-running syncs for `Ticket Metrics`, `Ticket Audits` and `Satisfaction Ratings` streams. | +| 2.3.0 | 2024-03-26 | [36403](https://github.com/airbytehq/airbyte/pull/36403) | Unpin CDK version, add record counts to state messages | +| 2.2.8 | 2024-02-09 | [35083](https://github.com/airbytehq/airbyte/pull/35083) | Manage dependencies with Poetry. | +| 2.2.7 | 2024-02-05 | [34840](https://github.com/airbytehq/airbyte/pull/34840) | Fix missing fields in schema | +| 2.2.6 | 2024-01-11 | [34064](https://github.com/airbytehq/airbyte/pull/34064) | Skip 504 Error for stream `Ticket Audits` | +| 2.2.5 | 2024-01-08 | [34010](https://github.com/airbytehq/airbyte/pull/34010) | Prepare for airbyte-lib | +| 2.2.4 | 2023-12-20 | [33680](https://github.com/airbytehq/airbyte/pull/33680) | Fix pagination issue for streams related to incremental export sync | +| 2.2.3 | 2023-12-14 | [33435](https://github.com/airbytehq/airbyte/pull/33435) | Fix 504 Error for stream Ticket Audits | +| 2.2.2 | 2023-12-01 | [33012](https://github.com/airbytehq/airbyte/pull/33012) | Increase number of retries for backoff policy to 10 | +| 2.2.1 | 2023-11-10 | [32440](https://github.com/airbytehq/airbyte/pull/32440) | Made refactoring to improve code maintainability | +| 2.2.0 | 2023-10-31 | [31999](https://github.com/airbytehq/airbyte/pull/31999) | Extended the `CustomRoles` stream schema | +| 2.1.1 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 2.1.0 | 2023-10-19 | [31606](https://github.com/airbytehq/airbyte/pull/31606) | Added new field `reply_time_in_seconds` to the `Ticket Metrics` stream schema | +| 2.0.0 | 2023-09-15 | [30440](https://github.com/airbytehq/airbyte/pull/30440) | Remove stream `Deleted Tickets` | +| 1.7.0 | 2023-09-11 | [30259](https://github.com/airbytehq/airbyte/pull/30259) | Add stream `Deleted Tickets` | +| 1.6.0 | 2023-09-09 | [30168](https://github.com/airbytehq/airbyte/pull/30168) | Make `start_date` field optional | +| 1.5.1 | 2023-09-05 | [30142](https://github.com/airbytehq/airbyte/pull/30142) | Handle non-JSON Response | +| 1.5.0 | 2023-09-04 | [30138](https://github.com/airbytehq/airbyte/pull/30138) | Add new Streams: `Article Votes`, `Article Comments`, `Article Comment Votes` | +| 1.4.0 | 2023-09-04 | [30134](https://github.com/airbytehq/airbyte/pull/30134) | Add incremental support for streams: `custom Roles`, `Schedules`, `SLA Policies` | +| 1.3.0 | 2023-08-30 | [30031](https://github.com/airbytehq/airbyte/pull/30031) | Add new streams: `Articles`, `Organization Fields` | +| 1.2.2 | 2023-08-30 | [29998](https://github.com/airbytehq/airbyte/pull/29998) | Fix typo in stream `AttributeDefinitions`: field condition | +| 1.2.1 | 2023-08-30 | [29991](https://github.com/airbytehq/airbyte/pull/29991) | Remove Custom availability strategy | +| 1.2.0 | 2023-08-29 | [29940](https://github.com/airbytehq/airbyte/pull/29940) | Add undeclared fields to schemas | +| 1.1.1 | 2023-08-29 | [29904](https://github.com/airbytehq/airbyte/pull/29904) | Make `Organizations` stream incremental | +| 1.1.0 | 2023-08-28 | [29891](https://github.com/airbytehq/airbyte/pull/29891) | Add stream `UserFields` | +| 1.0.0 | 2023-07-27 | [28774](https://github.com/airbytehq/airbyte/pull/28774) | Fix retry logic & update cursor for `Tickets` stream | +| 0.11.0 | 2023-08-10 | [27208](https://github.com/airbytehq/airbyte/pull/27208) | Add stream `Topics` | +| 0.10.7 | 2023-08-09 | [29256](https://github.com/airbytehq/airbyte/pull/29256) | Update tooltip descriptions in spec | +| 0.10.6 | 2023-08-04 | [29031](https://github.com/airbytehq/airbyte/pull/29031) | Reverted `advancedAuth` spec changes | +| 0.10.5 | 2023-08-01 | [28910](https://github.com/airbytehq/airbyte/pull/28910) | Updated `advancedAuth` broken references | +| 0.10.4 | 2023-07-25 | [28397](https://github.com/airbytehq/airbyte/pull/28397) | Handle 404 Error | +| 0.10.3 | 2023-07-24 | [28612](https://github.com/airbytehq/airbyte/pull/28612) | Fix pagination for stream `TicketMetricEvents` | +| 0.10.2 | 2023-07-19 | [28487](https://github.com/airbytehq/airbyte/pull/28487) | Remove extra page from params | +| 0.10.1 | 2023-07-10 | [28096](https://github.com/airbytehq/airbyte/pull/28096) | Replace `offset` pagination with `cursor` pagination | +| 0.10.0 | 2023-07-06 | [27991](https://github.com/airbytehq/airbyte/pull/27991) | Add streams: `PostVotes`, `PostCommentVotes` | +| 0.9.0 | 2023-07-05 | [27961](https://github.com/airbytehq/airbyte/pull/27961) | Add stream: `Post Comments` | +| 0.8.1 | 2023-06-27 | [27765](https://github.com/airbytehq/airbyte/pull/27765) | Bugfix: Nonetype error while syncing more then 100000 organizations | +| 0.8.0 | 2023-06-09 | [27156](https://github.com/airbytehq/airbyte/pull/27156) | Add stream `Posts` | +| 0.7.0 | 2023-06-27 | [27436](https://github.com/airbytehq/airbyte/pull/27436) | Add Ticket Skips stream | +| 0.6.0 | 2023-06-27 | [27450](https://github.com/airbytehq/airbyte/pull/27450) | Add Skill Based Routing streams | +| 0.5.0 | 2023-06-26 | [27735](https://github.com/airbytehq/airbyte/pull/27735) | License Update: Elv2 stream stream | +| 0.4.0 | 2023-06-16 | [27431](https://github.com/airbytehq/airbyte/pull/27431) | Add Organization Memberships stream | +| 0.3.1 | 2023-06-02 | [26945](https://github.com/airbytehq/airbyte/pull/26945) | Make `Ticket Metrics` stream to use cursor pagination | +| 0.3.0 | 2023-05-23 | [26347](https://github.com/airbytehq/airbyte/pull/26347) | Add stream `Audit Logs` logs` | +| 0.2.30 | 2023-05-23 | [26414](https://github.com/airbytehq/airbyte/pull/26414) | Added missing handlers when `empty json` or `JSONDecodeError` is received | +| 0.2.29 | 2023-04-18 | [25214](https://github.com/airbytehq/airbyte/pull/25214) | Add missing fields to `Tickets` stream | +| 0.2.28 | 2023-03-21 | [24053](https://github.com/airbytehq/airbyte/pull/24053) | Fix stream `sla_policies` schema data type error (events.value) | +| 0.2.27 | 2023-03-22 | [22817](https://github.com/airbytehq/airbyte/pull/22817) | Specified date formatting in specification | +| 0.2.26 | 2023-03-20 | [24252](https://github.com/airbytehq/airbyte/pull/24252) | Handle invalid `start_date` when checking connection | +| 0.2.25 | 2023-02-28 | [22308](https://github.com/airbytehq/airbyte/pull/22308) | Add `AvailabilityStrategy` for all streams | +| 0.2.24 | 2023-02-17 | [23246](https://github.com/airbytehq/airbyte/pull/23246) | Handle `StartTimeTooRecent` error for Tickets stream | +| 0.2.23 | 2023-02-15 | [23035](https://github.com/airbytehq/airbyte/pull/23035) | Handle 403 Error | +| 0.2.22 | 2023-02-14 | [22483](https://github.com/airbytehq/airbyte/pull/22483) | Fix test; handle 400 error | +| 0.2.21 | 2023-01-27 | [22027](https://github.com/airbytehq/airbyte/pull/22027) | Set `AvailabilityStrategy` for streams explicitly to `None` | +| 0.2.20 | 2022-12-28 | [20900](https://github.com/airbytehq/airbyte/pull/20900) | Remove synchronous time.sleep, add logging, reduce backoff time | +| 0.2.19 | 2022-12-09 | [19967](https://github.com/airbytehq/airbyte/pull/19967) | Fix reading response for more than 100k records | +| 0.2.18 | 2022-11-29 | [19432](https://github.com/airbytehq/airbyte/pull/19432) | Revert changes from version 0.2.15, use a test read instead | +| 0.2.17 | 2022-11-24 | [19792](https://github.com/airbytehq/airbyte/pull/19792) | Transform `ticket_comments.via` "-" to null | +| 0.2.16 | 2022-09-28 | [17326](https://github.com/airbytehq/airbyte/pull/17326) | Migrate to per-stream states. | +| 0.2.15 | 2022-08-03 | [15233](https://github.com/airbytehq/airbyte/pull/15233) | Added `subscription plan` check on `streams discovery` step to remove streams that are not accessible for fetch due to subscription plan restrictions | +| 0.2.14 | 2022-07-27 | [15036](https://github.com/airbytehq/airbyte/pull/15036) | Convert `ticket_audits.previous_value` values to string | +| 0.2.13 | 2022-07-21 | [14829](https://github.com/airbytehq/airbyte/pull/14829) | Convert `tickets.custom_fields` values to string | +| 0.2.12 | 2022-06-30 | [14304](https://github.com/airbytehq/airbyte/pull/14304) | Fixed Pagination for Group Membership stream | +| 0.2.11 | 2022-06-24 | [14112](https://github.com/airbytehq/airbyte/pull/14112) | Fixed "Retry-After" non integer value | +| 0.2.10 | 2022-06-14 | [13757](https://github.com/airbytehq/airbyte/pull/13757) | Fixed the bug with `TicketMetrics` stream, HTTP Error 429, caused by lots of API requests | +| 0.2.9 | 2022-05-27 | [13261](https://github.com/airbytehq/airbyte/pull/13261) | Bugfix for the unhandled [ChunkedEncodingError](https://github.com/airbytehq/airbyte/issues/12591) and [ConnectionError](https://github.com/airbytehq/airbyte/issues/12155) | +| 0.2.8 | 2022-05-20 | [13055](https://github.com/airbytehq/airbyte/pull/13055) | Fixed minor issue for stream `ticket_audits` schema | +| 0.2.7 | 2022-04-27 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Adding fixtures to mock time.sleep for connectors that explicitly sleep | +| 0.2.6 | 2022-04-19 | [12122](https://github.com/airbytehq/airbyte/pull/12122) | Fixed the bug when only 100,000 Users are synced [11895](https://github.com/airbytehq/airbyte/issues/11895) and fixed bug when `start_date` is not used on user stream [12059](https://github.com/airbytehq/airbyte/issues/12059). | +| 0.2.5 | 2022-04-05 | [11727](https://github.com/airbytehq/airbyte/pull/11727) | Fixed the bug when state was not parsed correctly | +| 0.2.4 | 2022-04-04 | [11688](https://github.com/airbytehq/airbyte/pull/11688) | Small documentation corrections | +| 0.2.3 | 2022-03-23 | [11349](https://github.com/airbytehq/airbyte/pull/11349) | Fixed the bug when Tickets stream didn't return deleted records | +| 0.2.2 | 2022-03-17 | [11237](https://github.com/airbytehq/airbyte/pull/11237) | Fixed the bug when TicketComments stream didn't return all records | +| 0.2.1 | 2022-03-15 | [11162](https://github.com/airbytehq/airbyte/pull/11162) | Added support of OAuth2.0 authentication method | +| 0.2.0 | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | +| 0.1.12 | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add additional log messages | +| 0.1.11 | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | +| 0.1.9 | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules streams | | 0.1.8 | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents stream | | 0.1.7 | 2021-11-23 | [8058](https://github.com/airbytehq/airbyte/pull/8058) | Added support of AccessToken authentication | | 0.1.6 | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add TypeTransformer | From 04f40f903c1e901748d7dcdbb753188346a6e2a9 Mon Sep 17 00:00:00 2001 From: Augustin Date: Fri, 6 Sep 2024 15:01:53 +0200 Subject: [PATCH 41/51] metadata-service[orchestrator]: fix stale metadata detection job (#45188) --- .../metadata_service/orchestrator/orchestrator/assets/github.py | 2 +- .../connectors/metadata_service/orchestrator/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py index 137a7d7a7222..a1adfc772c70 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py +++ b/airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/assets/github.py @@ -123,7 +123,7 @@ def entry_should_be_on_gcs(metadata_entry: LatestMetadataEntry) -> bool: """ if metadata_entry.metadata_definition.data.supportLevel == "archived": return False - if getattr(metadata_entry.metadata_definition.releases, "isReleaseCandidate", False): + if getattr(metadata_entry.metadata_definition.data.releases, "isReleaseCandidate", False): return False if entry_is_younger_than_grace_period(metadata_entry): return False diff --git a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml index a8957712a764..ef4e82ae0f35 100644 --- a/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml +++ b/airbyte-ci/connectors/metadata_service/orchestrator/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "orchestrator" -version = "0.5.1" +version = "0.5.2" description = "" authors = ["Ben Church "] readme = "README.md" From 2fa35ab30bfe80be6b3ef4763e5e540101724462 Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Fri, 6 Sep 2024 16:44:34 +0300 Subject: [PATCH 42/51] feat(airbyte-cdk): Add Global Parent State Cursor (#39593) --- .../declarative_component_schema.yaml | 5 + .../declarative/extractors/record_filter.py | 11 +- .../declarative/incremental/__init__.py | 11 +- .../incremental/datetime_based_cursor.py | 17 +- .../incremental/global_substream_cursor.py | 281 ++++++++++ .../models/declarative_component_schema.py | 5 + .../parsers/model_to_component_factory.py | 22 +- .../test_per_partition_cursor_integration.py | 5 +- .../test_parent_state_stream.py | 497 ++++++++++++++++++ .../incremental-syncs.md | 60 +++ .../partition-router.md | 1 + 11 files changed, 903 insertions(+), 12 deletions(-) create mode 100644 airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index bfcf1f29774e..f05c342fa68c 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -815,6 +815,11 @@ definitions: description: Set to True if the target API does not accept queries where the start time equal the end time. type: boolean default: False + global_substream_cursor: + title: Whether to store cursor as one value instead of per partition + description: This setting optimizes performance when the parent stream has thousands of partitions by storing the cursor as a single value rather than per partition. Notably, the substream state is updated only at the end of the sync, which helps prevent data loss in case of a sync failure. See more info in the [docs](https://docs.airbyte.com/connector-development/config-based/understanding-the-yaml-file/incremental-syncs). + type: boolean + default: false lookback_window: title: Lookback Window description: Time interval before the start_datetime to read data for, e.g. P1M for looking back one month. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py index 21022589bbe9..c74f3d0e6148 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py @@ -48,11 +48,16 @@ class ClientSideIncrementalRecordFilterDecorator(RecordFilter): """ def __init__( - self, date_time_based_cursor: DatetimeBasedCursor, per_partition_cursor: Optional[PerPartitionCursor] = None, **kwargs: Any + self, + date_time_based_cursor: DatetimeBasedCursor, + per_partition_cursor: Optional[PerPartitionCursor] = None, + is_global_substream_cursor: bool = False, + **kwargs: Any, ): super().__init__(**kwargs) self._date_time_based_cursor = date_time_based_cursor self._per_partition_cursor = per_partition_cursor + self.is_global_substream_cursor = is_global_substream_cursor @property def _cursor_field(self) -> str: @@ -102,6 +107,10 @@ def _get_state_value(self, stream_state: StreamState, stream_slice: StreamSlice) # self._per_partition_cursor is the same object that DeclarativeStream uses to save/update stream_state partition_state = self._per_partition_cursor.select_state(stream_slice=stream_slice) return partition_state.get(self._cursor_field) if partition_state else None + + if self.is_global_substream_cursor: + return stream_state.get("state", {}).get(self._cursor_field) # type: ignore # state is inside a dict for GlobalSubstreamCursor + return stream_state.get(self._cursor_field) def _get_filter_date(self, state_value: Optional[str]) -> datetime.datetime: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/__init__.py index b3a4fbb7e0bc..5699000bd9f4 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/__init__.py @@ -4,7 +4,16 @@ from airbyte_cdk.sources.declarative.incremental.datetime_based_cursor import DatetimeBasedCursor from airbyte_cdk.sources.declarative.incremental.declarative_cursor import DeclarativeCursor +from airbyte_cdk.sources.declarative.incremental.global_substream_cursor import GlobalSubstreamCursor from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import CursorFactory, PerPartitionCursor from airbyte_cdk.sources.declarative.incremental.resumable_full_refresh_cursor import ResumableFullRefreshCursor, ChildPartitionResumableFullRefreshCursor -__all__ = ["CursorFactory", "DatetimeBasedCursor", "DeclarativeCursor", "PerPartitionCursor", "ResumableFullRefreshCursor", "ChildPartitionResumableFullRefreshCursor"] +__all__ = [ + "CursorFactory", + "DatetimeBasedCursor", + "DeclarativeCursor", + "GlobalSubstreamCursor", + "PerPartitionCursor", + "ResumableFullRefreshCursor", + "ChildPartitionResumableFullRefreshCursor" +] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py index 1811ca181628..fafdabae03b1 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py @@ -4,6 +4,7 @@ import datetime from dataclasses import InitVar, dataclass, field +from datetime import timedelta from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Union from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level, Type @@ -15,7 +16,7 @@ from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType from airbyte_cdk.sources.message import MessageRepository from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState -from isodate import Duration, parse_duration +from isodate import Duration, duration_isoformat, parse_duration @dataclass @@ -363,3 +364,17 @@ def is_greater_than_or_equal(self, first: Record, second: Record) -> bool: return True else: return False + + def set_runtime_lookback_window(self, lookback_window_in_seconds: int) -> None: + """ + Updates the lookback window based on a given number of seconds if the new duration + is greater than the currently configured lookback window. + + :param lookback_window_in_seconds: The lookback duration in seconds to potentially update to. + """ + runtime_lookback_window = duration_isoformat(timedelta(seconds=lookback_window_in_seconds)) + config_lookback = parse_duration(self._lookback_window.eval(self.config) if self._lookback_window else "P0D") + + # Check if the new runtime lookback window is greater than the current config lookback + if parse_duration(runtime_lookback_window) > config_lookback: + self._lookback_window = InterpolatedString.create(runtime_lookback_window, parameters={}) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py new file mode 100644 index 000000000000..9e37bc34385e --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py @@ -0,0 +1,281 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import threading +import time +from typing import Any, Iterable, Mapping, Optional, Union + +from airbyte_cdk.sources.declarative.incremental.datetime_based_cursor import DatetimeBasedCursor +from airbyte_cdk.sources.declarative.incremental.declarative_cursor import DeclarativeCursor +from airbyte_cdk.sources.declarative.partition_routers.partition_router import PartitionRouter +from airbyte_cdk.sources.types import Record, StreamSlice, StreamState + + +class Timer: + """ + A simple timer class that measures elapsed time in seconds using a high-resolution performance counter. + """ + + def __init__(self) -> None: + self._start: Optional[int] = None + + def start(self) -> None: + self._start = time.perf_counter_ns() + + def finish(self) -> int: + if self._start: + return int((time.perf_counter_ns() - self._start) // 1e9) + else: + raise RuntimeError("Global substream cursor timer not started") + + +class GlobalSubstreamCursor(DeclarativeCursor): + """ + The GlobalSubstreamCursor is designed to track the state of substreams using a single global cursor. + This class is beneficial for streams with many partitions, as it allows the state to be managed globally + instead of per partition, simplifying state management and reducing the size of state messages. + + This cursor is activated by setting the `global_substream_cursor` parameter for incremental sync. + + Warnings: + - This class enforces a minimal lookback window for substream based on the duration of the previous sync to avoid losing records. This lookback ensures that any records added or updated during the sync are captured in subsequent syncs. + - The global cursor is updated only at the end of the sync. If the sync ends prematurely (e.g., due to an exception), the state will not be updated. + - When using the `incremental_dependency` option, the sync will progress through parent records, preventing the sync from getting infinitely stuck. However, it is crucial to understand the requirements for both the `global_substream_cursor` and `incremental_dependency` options to avoid data loss. + """ + + def __init__(self, stream_cursor: DatetimeBasedCursor, partition_router: PartitionRouter): + self._stream_cursor = stream_cursor + self._partition_router = partition_router + self._timer = Timer() + self._lock = threading.Lock() + self._slice_semaphore = threading.Semaphore(0) # Start with 0, indicating no slices being tracked + self._all_slices_yielded = False + self._lookback_window: Optional[int] = None + + def stream_slices(self) -> Iterable[StreamSlice]: + """ + Generates stream slices, ensuring the last slice is properly flagged and processed. + + This method creates a sequence of stream slices by iterating over partitions and cursor slices. + It holds onto one slice in memory to set `_all_slices_yielded` to `True` before yielding the + final slice. A semaphore is used to track the processing of slices, ensuring that `close_slice` + is called only after all slices have been processed. + + We expect the following events: + * Yields all the slices except the last one. At this point, `close_slice` won't actually close the global slice as `self._all_slices_yielded == False` + * Release the semaphore one last time before setting `self._all_slices_yielded = True`. This will cause `close_slice` to know about all the slices before we indicate that all slices have been yielded so the left side of `if self._all_slices_yielded and self._slice_semaphore._value == 0` will be false if not everything is closed + * Setting `self._all_slices_yielded = True`. We do that before actually yielding the last slice as the caller of `stream_slices` might stop iterating at any point and hence the code after `yield` might not be executed + * Yield the last slice. At that point, once there are as many slices yielded as closes, the global slice will be closed too + """ + previous_slice = None + + slice_generator = ( + StreamSlice(partition=partition, cursor_slice=cursor_slice) + for partition in self._partition_router.stream_slices() + for cursor_slice in self._stream_cursor.stream_slices() + ) + self._timer.start() + + for slice in slice_generator: + if previous_slice is not None: + # Release the semaphore to indicate that a slice has been yielded + self._slice_semaphore.release() + yield previous_slice + + # Store the current slice as the previous slice for the next iteration + previous_slice = slice + + # After all slices have been generated, release the semaphore one final time + # and flag that all slices have been yielded + self._slice_semaphore.release() + self._all_slices_yielded = True + + # Yield the last slice + if previous_slice is not None: + yield previous_slice + + def set_initial_state(self, stream_state: StreamState) -> None: + """ + Set the initial state for the cursors. + + This method initializes the state for the global cursor using the provided stream state. + + Additionally, it sets the parent state for partition routers that are based on parent streams. If a partition router + does not have parent streams, this step will be skipped due to the default PartitionRouter implementation. + + Args: + stream_state (StreamState): The state of the streams to be set. The format of the stream state should be: + { + "state": { + "last_updated": "2023-05-27T00:00:00Z" + }, + "parent_state": { + "parent_stream_name": { + "last_updated": "2023-05-27T00:00:00Z" + } + }, + "lookback_window": 132 + } + """ + if not stream_state: + return + + if "lookback_window" in stream_state: + self._lookback_window = stream_state["lookback_window"] + self._inject_lookback_into_stream_cursor(stream_state["lookback_window"]) + + self._stream_cursor.set_initial_state(stream_state["state"]) + + # Set parent state for partition routers based on parent streams + self._partition_router.set_initial_state(stream_state) + + def _inject_lookback_into_stream_cursor(self, lookback_window: int) -> None: + """ + Modifies the stream cursor's lookback window based on the duration of the previous sync. + This adjustment ensures the cursor is set to the minimal lookback window necessary for + avoiding missing data. + + Parameters: + lookback_window (int): The lookback duration in seconds to be set, derived from + the previous sync. + + Raises: + ValueError: If the cursor does not support dynamic lookback window adjustments. + """ + if hasattr(self._stream_cursor, "set_runtime_lookback_window"): + self._stream_cursor.set_runtime_lookback_window(lookback_window) + else: + raise ValueError("The cursor class for Global Substream Cursor does not have a set_runtime_lookback_window method") + + def observe(self, stream_slice: StreamSlice, record: Record) -> None: + self._stream_cursor.observe(StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), record) + + def close_slice(self, stream_slice: StreamSlice, *args: Any) -> None: + """ + Close the current stream slice. + + This method is called when a stream slice is completed. For the global parent cursor, we close the child cursor + only after reading all slices. This ensures that we do not miss any child records from a later parent record + if the child cursor is earlier than a record from the first parent record. + + Args: + stream_slice (StreamSlice): The stream slice to be closed. + *args (Any): Additional arguments. + """ + with self._lock: + self._slice_semaphore.acquire() + if self._all_slices_yielded and self._slice_semaphore._value == 0: + self._lookback_window = self._timer.finish() + self._stream_cursor.close_slice(StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), *args) + + def get_stream_state(self) -> StreamState: + state: dict[str, Any] = {"state": self._stream_cursor.get_stream_state()} + + parent_state = self._partition_router.get_stream_state() + if parent_state: + state["parent_state"] = parent_state + + if self._lookback_window is not None: + state["lookback_window"] = self._lookback_window + + return state + + def select_state(self, stream_slice: Optional[StreamSlice] = None) -> Optional[StreamState]: + # stream_slice is ignored as cursor is global + return self._stream_cursor.get_stream_state() + + def get_request_params( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + if stream_slice: + return self._partition_router.get_request_params( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._stream_cursor.get_request_params( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request params") + + def get_request_headers( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + if stream_slice: + return self._partition_router.get_request_headers( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._stream_cursor.get_request_headers( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request headers") + + def get_request_body_data( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Union[Mapping[str, Any], str]: + if stream_slice: + return self._partition_router.get_request_body_data( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._stream_cursor.get_request_body_data( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request body data") + + def get_request_body_json( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Mapping[str, Any]: + if stream_slice: + return self._partition_router.get_request_body_json( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._stream_cursor.get_request_body_json( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request body json") + + def should_be_synced(self, record: Record) -> bool: + return self._stream_cursor.should_be_synced(self._convert_record_to_cursor_record(record)) + + def is_greater_than_or_equal(self, first: Record, second: Record) -> bool: + return self._stream_cursor.is_greater_than_or_equal( + self._convert_record_to_cursor_record(first), self._convert_record_to_cursor_record(second) + ) + + @staticmethod + def _convert_record_to_cursor_record(record: Record) -> Record: + return Record( + record.data, + StreamSlice(partition={}, cursor_slice=record.associated_slice.cursor_slice) if record.associated_slice else None, + ) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index f9e4856402b0..2af4b5a0ac26 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1107,6 +1107,11 @@ class DatetimeBasedCursor(BaseModel): description='Set to True if the target API does not accept queries where the start time equal the end time.', title='Whether to skip requests if the start time equals the end time', ) + global_substream_cursor: Optional[bool] = Field( + False, + description='This setting optimizes performance when the parent stream has thousands of partitions by storing the cursor as a single value rather than per partition. Notably, the substream state is updated only at the end of the sync, which helps prevent data loss in case of a sync failure. See more info in the [docs](https://docs.airbyte.com/connector-development/config-based/understanding-the-yaml-file/incremental-syncs).', + title='Whether to store cursor as one value instead of per partition', + ) lookback_window: Optional[str] = Field( None, description='Time interval before the start_datetime to read data for, e.g. P1M for looking back one month.', diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 686eb5d3f9d6..c0e66d1ff43d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -35,6 +35,7 @@ CursorFactory, DatetimeBasedCursor, DeclarativeCursor, + GlobalSubstreamCursor, PerPartitionCursor, ResumableFullRefreshCursor, ) @@ -629,11 +630,13 @@ def create_declarative_stream(self, model: DeclarativeStreamModel, config: Confi and hasattr(model.incremental_sync, "is_client_side_incremental") and model.incremental_sync.is_client_side_incremental ): - if combined_slicers and not isinstance(combined_slicers, (DatetimeBasedCursor, PerPartitionCursor)): + supported_slicers = (DatetimeBasedCursor, GlobalSubstreamCursor, PerPartitionCursor) + if combined_slicers and not isinstance(combined_slicers, supported_slicers): raise ValueError("Unsupported Slicer is used. PerPartitionCursor should be used here instead") client_side_incremental_sync = { "date_time_based_cursor": self._create_component_from_model(model=model.incremental_sync, config=config), "per_partition_cursor": combined_slicers if isinstance(combined_slicers, PerPartitionCursor) else None, + "is_global_substream_cursor": isinstance(combined_slicers, GlobalSubstreamCursor), } transformations = [] if model.transformations: @@ -686,6 +689,7 @@ def _merge_stream_slicers(self, model: DeclarativeStreamModel, config: Config) - and model.retriever.partition_router ): stream_slicer_model = model.retriever.partition_router + if isinstance(stream_slicer_model, list): stream_slicer = CartesianProductStreamSlicer( [self._create_component_from_model(model=slicer, config=config) for slicer in stream_slicer_model], parameters={} @@ -695,12 +699,16 @@ def _merge_stream_slicers(self, model: DeclarativeStreamModel, config: Config) - if model.incremental_sync and stream_slicer: incremental_sync_model = model.incremental_sync - return PerPartitionCursor( - cursor_factory=CursorFactory( - lambda: self._create_component_from_model(model=incremental_sync_model, config=config), - ), - partition_router=stream_slicer, - ) + if hasattr(incremental_sync_model, "global_substream_cursor") and incremental_sync_model.global_substream_cursor: + cursor_component = self._create_component_from_model(model=incremental_sync_model, config=config) + return GlobalSubstreamCursor(stream_cursor=cursor_component, partition_router=stream_slicer) + else: + return PerPartitionCursor( + cursor_factory=CursorFactory( + lambda: self._create_component_from_model(model=incremental_sync_model, config=config), + ), + partition_router=stream_slicer, + ) elif model.incremental_sync: return self._create_component_from_model(model=model.incremental_sync, config=config) if model.incremental_sync else None elif stream_slicer: diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py index ae99a433c42f..3e2c7b77f0f8 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py @@ -20,6 +20,7 @@ from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever from airbyte_cdk.sources.types import Record +from orjson import orjson CURSOR_FIELD = "cursor_field" SYNC_MODE = SyncMode.incremental @@ -327,7 +328,7 @@ def test_partition_limitation(): type=AirbyteStateType.STREAM, stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="post_comment_votes", namespace=None), - stream_state=AirbyteStateBlob.parse_obj( + stream_state=AirbyteStateBlob( { "states": [ { @@ -356,7 +357,7 @@ def test_partition_limitation(): output = list(source.read(logger, {}, catalog, initial_state)) # assert output_data == expected_records - final_state = [message.state.stream.stream_state.dict() for message in output if message.state] + final_state = [orjson.loads(orjson.dumps(message.state.stream.stream_state)) for message in output if message.state] assert final_state[-1] == { "states": [ { diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py index 9ced561742f6..a99909fefa0d 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py @@ -1,5 +1,6 @@ # Copyright (c) 2024 Airbyte, Inc., all rights reserved. +import copy from typing import Any, List, Mapping, MutableMapping, Optional, Union from unittest.mock import MagicMock @@ -627,3 +628,499 @@ def test_incremental_parent_state_no_incremental_dependency( assert output_data == expected_records final_state = [orjson.loads(orjson.dumps(message.state.stream.stream_state)) for message in output if message.state] assert final_state[-1] == expected_state + + +SUBSTREAM_MANIFEST_GLOBAL_PARENT_CURSOR: MutableMapping[str, Any] = { + "version": "0.51.42", + "type": "DeclarativeSource", + "check": {"type": "CheckStream", "stream_names": ["post_comment_votes"]}, + "definitions": { + "basic_authenticator": { + "type": "BasicHttpAuthenticator", + "username": "{{ config['credentials']['email'] + '/token' }}", + "password": "{{ config['credentials']['api_token'] }}", + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://api.example.com", + "http_method": "GET", + "authenticator": "#/definitions/basic_authenticator", + }, + "record_selector": { + "type": "RecordSelector", + "extractor": { + "type": "DpathExtractor", + "field_path": ["{{ parameters.get('data_path') or parameters['name'] }}"], + }, + "schema_normalization": "Default", + }, + "paginator": { + "type": "DefaultPaginator", + "page_size_option": {"type": "RequestOption", "field_name": "per_page", "inject_into": "request_parameter"}, + "pagination_strategy": { + "type": "CursorPagination", + "page_size": 100, + "cursor_value": "{{ response.get('next_page', {}) }}", + "stop_condition": "{{ not response.get('next_page', {}) }}", + }, + "page_token_option": {"type": "RequestPath"}, + }, + }, + "cursor_incremental_sync": { + "type": "DatetimeBasedCursor", + "cursor_datetime_formats": ["%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S%z"], + "datetime_format": "%Y-%m-%dT%H:%M:%SZ", + "cursor_field": "{{ parameters.get('cursor_field', 'updated_at') }}", + "start_datetime": {"datetime": "{{ config.get('start_date')}}"}, + "start_time_option": {"inject_into": "request_parameter", "field_name": "start_time", "type": "RequestOption"}, + }, + "posts_stream": { + "type": "DeclarativeStream", + "name": "posts", + "primary_key": ["id"], + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": { + "$schema": "http://json-schema.org/schema#", + "properties": { + "id": {"type": "integer"}, + "updated_at": {"type": "string", "format": "date-time"}, + "title": {"type": "string"}, + "content": {"type": "string"}, + }, + "type": "object", + }, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://api.example.com", + "path": "/community/posts", + "http_method": "GET", + "authenticator": "#/definitions/basic_authenticator", + }, + "record_selector": "#/definitions/retriever/record_selector", + "paginator": "#/definitions/retriever/paginator", + }, + "incremental_sync": "#/definitions/cursor_incremental_sync", + "$parameters": { + "name": "posts", + "path": "community/posts", + "data_path": "posts", + "cursor_field": "updated_at", + "primary_key": "id", + }, + }, + "post_comments_stream": { + "type": "DeclarativeStream", + "name": "post_comments", + "primary_key": ["id"], + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": { + "$schema": "http://json-schema.org/schema#", + "properties": { + "id": {"type": "integer"}, + "updated_at": {"type": "string", "format": "date-time"}, + "post_id": {"type": "integer"}, + "comment": {"type": "string"}, + }, + "type": "object", + }, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://api.example.com", + "path": "/community/posts/{{ stream_slice.id }}/comments", + "http_method": "GET", + "authenticator": "#/definitions/basic_authenticator", + }, + "record_selector": { + "type": "RecordSelector", + "extractor": {"type": "DpathExtractor", "field_path": ["comments"]}, + "record_filter": { + "condition": "{{ record['updated_at'] >= stream_state.get('updated_at', config.get('start_date')) }}" + }, + }, + "paginator": "#/definitions/retriever/paginator", + "partition_router": { + "type": "SubstreamPartitionRouter", + "parent_stream_configs": [ + { + "stream": "#/definitions/posts_stream", + "parent_key": "id", + "partition_field": "id", + "incremental_dependency": True, + } + ], + }, + }, + "incremental_sync": { + "type": "DatetimeBasedCursor", + "cursor_datetime_formats": ["%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S%z"], + "datetime_format": "%Y-%m-%dT%H:%M:%SZ", + "cursor_field": "{{ parameters.get('cursor_field', 'updated_at') }}", + "start_datetime": {"datetime": "{{ config.get('start_date') }}"}, + }, + "$parameters": { + "name": "post_comments", + "path": "community/posts/{{ stream_slice.id }}/comments", + "data_path": "comments", + "cursor_field": "updated_at", + "primary_key": "id", + }, + }, + "post_comment_votes_stream": { + "type": "DeclarativeStream", + "name": "post_comment_votes", + "primary_key": ["id"], + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": { + "$schema": "http://json-schema.org/schema#", + "properties": { + "id": {"type": "integer"}, + "created_at": {"type": "string", "format": "date-time"}, + "comment_id": {"type": "integer"}, + "vote": {"type": "number"}, + }, + "type": "object", + }, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://api.example.com", + "path": "/community/posts/{{ stream_slice.parent_slice.id }}/comments/{{ stream_slice.id }}/votes", + "http_method": "GET", + "authenticator": "#/definitions/basic_authenticator", + }, + "record_selector": "#/definitions/retriever/record_selector", + "paginator": "#/definitions/retriever/paginator", + "partition_router": { + "type": "SubstreamPartitionRouter", + "parent_stream_configs": [ + { + "stream": "#/definitions/post_comments_stream", + "parent_key": "id", + "partition_field": "id", + "incremental_dependency": True, + } + ], + }, + }, + "incremental_sync": { + "type": "DatetimeBasedCursor", + "cursor_datetime_formats": ["%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S%z"], + "datetime_format": "%Y-%m-%dT%H:%M:%SZ", + "cursor_field": "{{ parameters.get('cursor_field', 'updated_at') }}", + "start_datetime": {"datetime": "{{ config.get('start_date')}}"}, + "start_time_option": {"inject_into": "request_parameter", "field_name": "start_time", "type": "RequestOption"}, + "global_substream_cursor": True, + }, + "$parameters": { + "name": "post_comment_votes", + "path": "community/posts/{{ stream_slice.parent_slice.id }}/comments/{{ stream_slice.id }}/votes", + "data_path": "votes", + "cursor_field": "created_at", + "primary_key": "id", + }, + }, + }, + "streams": [ + {"$ref": "#/definitions/posts_stream"}, + {"$ref": "#/definitions/post_comments_stream"}, + {"$ref": "#/definitions/post_comment_votes_stream"}, + ], +} +SUBSTREAM_MANIFEST_GLOBAL_PARENT_CURSOR_NO_DEPENDENCY = copy.deepcopy(SUBSTREAM_MANIFEST_GLOBAL_PARENT_CURSOR) +SUBSTREAM_MANIFEST_GLOBAL_PARENT_CURSOR_NO_DEPENDENCY["definitions"]["post_comment_votes_stream"]["retriever"]["partition_router"][ + "parent_stream_configs" +][0]["incremental_dependency"] = False + + +@pytest.mark.parametrize( + "test_name, manifest, mock_requests, expected_records, initial_state, expected_state", + [ + ( + "test_global_substream_cursor", + SUBSTREAM_MANIFEST_GLOBAL_PARENT_CURSOR, + [ + # Fetch the first page of posts + ( + "https://api.example.com/community/posts?per_page=100&start_time=2024-01-05T00:00:00Z", + { + "posts": [{"id": 1, "updated_at": "2024-01-30T00:00:00Z"}, {"id": 2, "updated_at": "2024-01-29T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts?per_page=100&start_time=2024-01-05T00:00:00Z&page=2", + }, + ), + # Fetch the second page of posts + ( + "https://api.example.com/community/posts?per_page=100&start_time=2024-01-05T00:00:00Z&page=2", + {"posts": [{"id": 3, "updated_at": "2024-01-28T00:00:00Z"}]}, + ), + # Fetch the first page of comments for post 1 + ( + "https://api.example.com/community/posts/1/comments?per_page=100", + { + "comments": [ + {"id": 9, "post_id": 1, "updated_at": "2023-01-01T00:00:00Z"}, + {"id": 10, "post_id": 1, "updated_at": "2024-01-25T00:00:00Z"}, + {"id": 11, "post_id": 1, "updated_at": "2024-01-24T00:00:00Z"}, + ], + "next_page": "https://api.example.com/community/posts/1/comments?per_page=100&page=2", + }, + ), + # Fetch the second page of comments for post 1 + ( + "https://api.example.com/community/posts/1/comments?per_page=100&page=2", + {"comments": [{"id": 12, "post_id": 1, "updated_at": "2024-01-23T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 10 of post 1 + ( + "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + { + "votes": [{"id": 100, "comment_id": 10, "created_at": "2024-01-15T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-03T00:00:01Z", + }, + ), + # Fetch the second page of votes for comment 10 of post 1 + ( + "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-03T00:00:01Z", + {"votes": [{"id": 101, "comment_id": 10, "created_at": "2024-01-14T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 11 of post 1 + ( + "https://api.example.com/community/posts/1/comments/11/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 102, "comment_id": 11, "created_at": "2024-01-13T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 12 of post 1 + ("https://api.example.com/community/posts/1/comments/12/votes?per_page=100&start_time=2024-01-03T00:00:00Z", {"votes": []}), + # Fetch the first page of comments for post 2 + ( + "https://api.example.com/community/posts/2/comments?per_page=100", + { + "comments": [{"id": 20, "post_id": 2, "updated_at": "2024-01-22T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts/2/comments?per_page=100&page=2", + }, + ), + # Fetch the second page of comments for post 2 + ( + "https://api.example.com/community/posts/2/comments?per_page=100&page=2", + {"comments": [{"id": 21, "post_id": 2, "updated_at": "2024-01-21T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 20 of post 2 + ( + "https://api.example.com/community/posts/2/comments/20/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 200, "comment_id": 20, "created_at": "2024-01-12T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 21 of post 2 + ( + "https://api.example.com/community/posts/2/comments/21/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 201, "comment_id": 21, "created_at": "2024-01-12T00:00:15Z"}]}, + ), + # Fetch the first page of comments for post 3 + ( + "https://api.example.com/community/posts/3/comments?per_page=100", + {"comments": [{"id": 30, "post_id": 3, "updated_at": "2024-01-09T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 30 of post 3 + ( + "https://api.example.com/community/posts/3/comments/30/votes?per_page=100", + {"votes": [{"id": 300, "comment_id": 30, "created_at": "2024-01-10T00:00:00Z"}]}, + ), + ], + # Expected records + [ + {"id": 100, "comment_id": 10, "created_at": "2024-01-15T00:00:00Z"}, + {"id": 101, "comment_id": 10, "created_at": "2024-01-14T00:00:00Z"}, + {"id": 102, "comment_id": 11, "created_at": "2024-01-13T00:00:00Z"}, + {"id": 200, "comment_id": 20, "created_at": "2024-01-12T00:00:00Z"}, + {"id": 201, "comment_id": 21, "created_at": "2024-01-12T00:00:15Z"}, + {"id": 300, "comment_id": 30, "created_at": "2024-01-10T00:00:00Z"}, + ], + # Initial state + [ + AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="post_comment_votes", namespace=None), + stream_state=AirbyteStateBlob( + { + "parent_state": { + "post_comments": { + "states": [ + {"partition": {"id": 1, "parent_slice": {}}, "cursor": {"updated_at": "2023-01-04T00:00:00Z"}} + ], + "parent_state": {"posts": {"updated_at": "2024-01-05T00:00:00Z"}}, + } + }, + "state": {"created_at": "2024-01-04T02:03:04Z"}, + "lookback_window": 93784, + } + ), + ), + ) + ], + # Expected state + { + "state": {"created_at": "2024-01-15T00:00:00Z"}, + "parent_state": { + "post_comments": { + "states": [ + {"partition": {"id": 1, "parent_slice": {}}, "cursor": {"updated_at": "2024-01-25T00:00:00Z"}}, + {"partition": {"id": 2, "parent_slice": {}}, "cursor": {"updated_at": "2024-01-22T00:00:00Z"}}, + {"partition": {"id": 3, "parent_slice": {}}, "cursor": {"updated_at": "2024-01-09T00:00:00Z"}}, + ], + "parent_state": {"posts": {"updated_at": "2024-01-30T00:00:00Z"}}, + } + }, + }, + ), + ( + "test_global_substream_cursor_no_dependency", + SUBSTREAM_MANIFEST_GLOBAL_PARENT_CURSOR_NO_DEPENDENCY, + [ + # Fetch the first page of posts + ( + "https://api.example.com/community/posts?per_page=100&start_time=2024-01-01T00:00:01Z", + { + "posts": [{"id": 1, "updated_at": "2024-01-30T00:00:00Z"}, {"id": 2, "updated_at": "2024-01-29T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts?per_page=100&start_time=2024-01-01T00:00:01Z&page=2", + }, + ), + # Fetch the second page of posts + ( + "https://api.example.com/community/posts?per_page=100&start_time=2024-01-01T00:00:01Z&page=2", + {"posts": [{"id": 3, "updated_at": "2024-01-28T00:00:00Z"}]}, + ), + # Fetch the first page of comments for post 1 + ( + "https://api.example.com/community/posts/1/comments?per_page=100", + { + "comments": [ + {"id": 9, "post_id": 1, "updated_at": "2023-01-01T00:00:00Z"}, + {"id": 10, "post_id": 1, "updated_at": "2024-01-25T00:00:00Z"}, + {"id": 11, "post_id": 1, "updated_at": "2024-01-24T00:00:00Z"}, + ], + "next_page": "https://api.example.com/community/posts/1/comments?per_page=100&page=2", + }, + ), + # Fetch the second page of comments for post 1 + ( + "https://api.example.com/community/posts/1/comments?per_page=100&page=2", + {"comments": [{"id": 12, "post_id": 1, "updated_at": "2024-01-23T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 10 of post 1 + ( + "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + { + "votes": [{"id": 100, "comment_id": 10, "created_at": "2024-01-15T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-03T00:00:00Z", + }, + ), + # Fetch the second page of votes for comment 10 of post 1 + ( + "https://api.example.com/community/posts/1/comments/10/votes?per_page=100&page=2&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 101, "comment_id": 10, "created_at": "2024-01-14T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 11 of post 1 + ( + "https://api.example.com/community/posts/1/comments/11/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 102, "comment_id": 11, "created_at": "2024-01-13T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 12 of post 1 + ("https://api.example.com/community/posts/1/comments/12/votes?per_page=100&start_time=2024-01-03T00:00:00Z", {"votes": []}), + # Fetch the first page of comments for post 2 + ( + "https://api.example.com/community/posts/2/comments?per_page=100", + { + "comments": [{"id": 20, "post_id": 2, "updated_at": "2024-01-22T00:00:00Z"}], + "next_page": "https://api.example.com/community/posts/2/comments?per_page=100&page=2", + }, + ), + # Fetch the second page of comments for post 2 + ( + "https://api.example.com/community/posts/2/comments?per_page=100&page=2", + {"comments": [{"id": 21, "post_id": 2, "updated_at": "2024-01-21T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 20 of post 2 + ( + "https://api.example.com/community/posts/2/comments/20/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 200, "comment_id": 20, "created_at": "2024-01-12T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 21 of post 2 + ( + "https://api.example.com/community/posts/2/comments/21/votes?per_page=100&start_time=2024-01-03T00:00:00Z", + {"votes": [{"id": 201, "comment_id": 21, "created_at": "2024-01-12T00:00:15Z"}]}, + ), + # Fetch the first page of comments for post 3 + ( + "https://api.example.com/community/posts/3/comments?per_page=100", + {"comments": [{"id": 30, "post_id": 3, "updated_at": "2024-01-09T00:00:00Z"}]}, + ), + # Fetch the first page of votes for comment 30 of post 3 + ( + "https://api.example.com/community/posts/3/comments/30/votes?per_page=100", + {"votes": [{"id": 300, "comment_id": 30, "created_at": "2024-01-10T00:00:00Z"}]}, + ), + ], + # Expected records + [ + {"id": 100, "comment_id": 10, "created_at": "2024-01-15T00:00:00Z"}, + {"id": 101, "comment_id": 10, "created_at": "2024-01-14T00:00:00Z"}, + {"id": 102, "comment_id": 11, "created_at": "2024-01-13T00:00:00Z"}, + {"id": 200, "comment_id": 20, "created_at": "2024-01-12T00:00:00Z"}, + {"id": 201, "comment_id": 21, "created_at": "2024-01-12T00:00:15Z"}, + {"id": 300, "comment_id": 30, "created_at": "2024-01-10T00:00:00Z"}, + ], + # Initial state + [ + AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="post_comment_votes", namespace=None), + stream_state=AirbyteStateBlob( + { + "parent_state": { + "post_comments": { + "states": [ + {"partition": {"id": 1, "parent_slice": {}}, "cursor": {"updated_at": "2023-01-04T00:00:00Z"}} + ], + "parent_state": {"posts": {"updated_at": "2024-01-05T00:00:00Z"}}, + } + }, + "state": {"created_at": "2024-01-04T02:03:04Z"}, + "lookback_window": 93784, + } + ), + ), + ) + ], + # Expected state + {"state": {"created_at": "2024-01-15T00:00:00Z"}}, + ), + ], +) +def test_incremental_global_parent_state(test_name, manifest, mock_requests, expected_records, initial_state, expected_state): + _stream_name = "post_comment_votes" + config = {"start_date": "2024-01-01T00:00:01Z", "credentials": {"email": "email", "api_token": "api_token"}} + + with requests_mock.Mocker() as m: + for url, response in mock_requests: + m.get(url, json=response) + + output = _run_read(manifest, config, _stream_name, initial_state) + output_data = [message.record.data for message in output if message.record] + + assert output_data == expected_records + final_state = [orjson.loads(orjson.dumps(message.state.stream.stream_state)) for message in output if message.state][-1] + assert "lookback_window" in final_state + final_state.pop("lookback_window") + assert final_state == expected_state diff --git a/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md b/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md index 7616f3822fc4..85a408027ccb 100644 --- a/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md +++ b/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md @@ -70,6 +70,11 @@ DatetimeBasedCursor: lookback_window: description: How many days before start_datetime to read data for (ISO8601 duration) type: string + global_substream_cursor: + title: Whether to store cursor as one value instead of per partition + description: If parent stream have thousands of partitions, it can be more efficient to store cursor as one value instead of per partition. Lookback window should be used to avoid missing records that where added during the sync. + type: boolean + default: false start_time_option: description: Request option for start time "$ref": "#/definitions/RequestOption" @@ -166,6 +171,61 @@ incremental_sync: inject_into: "request_parameter" ``` +### Nested Streams + +Nested streams, subresources, or streams that depend on other streams can be implemented using a [`SubstreamPartitionRouter`](#SubstreamPartitionRouter) + +The default state format is **per partition**, but there are options to enhance efficiency depending on your use case: **incremental_dependency** and **global_substream_cursor**. Here's when and how to use each option, with examples: + +#### Per Partition (Default) +- **Description**: This is the default state format, where each partition has its own cursor. +- **Limitation**: The per partition state has a limit of 10,000 partitions. When this limit is exceeded, the oldest partitions are deleted. During the next sync, deleted partitions will be read in full refresh, which can be inefficient. +- **When to Use**: Use this option if the number of partitions is manageable (under 10,000). + +- **Example State**: + ```json + [ + { "partition": "A", "timestamp": "2024-08-01T00:00:00" }, + { "partition": "B", "timestamp": "2024-08-01T01:00:00" }, + { "partition": "C", "timestamp": "2024-08-01T02:00:00" } + ] + ``` + +#### Incremental Dependency +- **Description**: This option allows the parent stream to be read incrementally, ensuring that only new data is synced. +- **Requirement**: The API must ensure that the parent record's cursor is updated whenever child records are added or updated. If this requirement is not met, child records added to older parent records will be lost. +- **When to Use**: Use this option if the parent stream is incremental and you want to read it with the state. The parent state is updated after processing all the child records for the parent record. +- **Example State**: + ```json + { + "parent_state": { + "parent_stream": { "timestamp": "2024-08-01T00:00:00" } + }, + "child_state": [ + { "partition": "A", "timestamp": "2024-08-01T00:00:00" }, + { "partition": "B", "timestamp": "2024-08-01T01:00:00" } + ] + } + ``` + +#### Global Substream Cursor +- **Description**: This option uses a single global cursor for all partitions, significantly reducing the state size. It enforces a minimal lookback window for substream based on the duration of the previous sync to avoid losing records. This lookback ensures that any records added or updated during the sync are captured in subsequent syncs. +- **When to Use**: Use this option if the number of partitions in the parent stream is significantly higher than the 10,000 partition limit (e.g., millions of records per sync). This prevents the inefficiency of reading most partitions in full refresh and avoids duplicates during the next sync. +- **Operational Detail**: The global cursor's value is updated only at the end of the sync. If the sync fails, only the parent state is updated if the incremental dependency is enabled. +- **Example State**: + ```json + [ + { "timestamp": "2024-08-01"} + ] + ``` + +### Summary +- **Per Partition**: Default, use for manageable partitions (<10k). +- **Incremental Dependency**: Use for incremental parent streams with a dependent child cursor. Ensure API updates parent cursor with child records. +- **Global Substream Cursor**: Ideal for large-scale parent streams with many partitions to optimize performance. + +Choose the option that best fits your data structure and sync requirements to optimize performance and data integrity. + ## More readings - [Incremental reads](../../cdk-python/incremental-stream.md) diff --git a/docs/connector-development/config-based/understanding-the-yaml-file/partition-router.md b/docs/connector-development/config-based/understanding-the-yaml-file/partition-router.md index 062dd1a0e544..357918cbab79 100644 --- a/docs/connector-development/config-based/understanding-the-yaml-file/partition-router.md +++ b/docs/connector-development/config-based/understanding-the-yaml-file/partition-router.md @@ -146,6 +146,7 @@ retriever: - stream: "#/repositories_stream" parent_key: "id" partition_field: "repository" + incremental_dependency: true ``` ## Nested streams From 9700e4b94fdb50a294088103fb7ce940b5329ecd Mon Sep 17 00:00:00 2001 From: tolik0 Date: Fri, 6 Sep 2024 13:48:24 +0000 Subject: [PATCH 43/51] =?UTF-8?q?=F0=9F=A4=96=20minor=20bump=20Python=20CD?= =?UTF-8?q?K=20to=20version=205.2.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 77e1b66da0c4..a3fd46e94550 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 5.2.0 +Add Global Parent State Cursor + ## 5.1.0 Add limitation for number of partitions to PerPartitionCursor diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index 684a196e7bca..50ead471967c 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "airbyte-cdk" -version = "5.1.0" +version = "5.2.0" description = "A framework for writing Airbyte Connectors." authors = ["Airbyte "] license = "MIT" From 950ea0692efaa5ec7ee4dfd59265aefb8ac692c0 Mon Sep 17 00:00:00 2001 From: tolik0 Date: Fri, 6 Sep 2024 13:54:40 +0000 Subject: [PATCH 44/51] =?UTF-8?q?=F0=9F=A4=96=20Cut=20version=205.2.0=20of?= =?UTF-8?q?=20source-declarative-manifest?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-declarative-manifest/metadata.yaml | 2 +- .../connectors/source-declarative-manifest/poetry.lock | 8 ++++---- .../connectors/source-declarative-manifest/pyproject.toml | 4 ++-- docs/integrations/sources/low-code.md | 1 + 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml index eea5d5ae636f..30b93e15d0f7 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml +++ b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml @@ -8,7 +8,7 @@ data: connectorType: source definitionId: 64a2f99c-542f-4af8-9a6f-355f1217b436 # This version should not be updated manually - it is updated by the CDK release workflow. - dockerImageTag: 5.1.0 + dockerImageTag: 5.2.0 dockerRepository: airbyte/source-declarative-manifest # This page is hidden from the docs for now, since the connector is not in any Airbyte registries. documentationUrl: https://docs.airbyte.com/integrations/sources/low-code diff --git a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock index adf82cd66082..5effc4cb55dd 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock +++ b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "5.1.0" +version = "5.2.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-5.1.0-py3-none-any.whl", hash = "sha256:98cef0398459f8ccc35335f486a30d7e13f8b929879730dbc18296d4df6e4e23"}, - {file = "airbyte_cdk-5.1.0.tar.gz", hash = "sha256:e8c039f392bb2d17ddb3ea8e191642e231e92d9b097e0705a2e69a26e44a97b5"}, + {file = "airbyte_cdk-5.2.0-py3-none-any.whl", hash = "sha256:1d4373e422e84e2eb37853baeabfb55e32e5c0159287dfa042c7d41381b33f7f"}, + {file = "airbyte_cdk-5.2.0.tar.gz", hash = "sha256:c76be0701427e2b2d99934558369004e2008b6778c8adb09494945f1784ea206"}, ] [package.dependencies] @@ -1605,4 +1605,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "5b8febed2b745b6298948425c022b91d7c33fe9e60fce2aa1a7c5a2e081ed1ad" +content-hash = "8b9677225336fa3548aa6d40746b75fee57fd0a29c46ceee7b00a9e13f98bd84" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml index 74345ba37e4e..ca36d851e678 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml +++ b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml @@ -3,7 +3,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "5.1.0" +version = "5.2.0" name = "source-declarative-manifest" description = "Base source implementation for low-code sources." authors = ["Airbyte "] @@ -17,7 +17,7 @@ include = "source_declarative_manifest" [tool.poetry.dependencies] python = "^3.10,<3.12" -airbyte-cdk = "5.1.0" +airbyte-cdk = "5.2.0" [tool.poetry.scripts] source-declarative-manifest = "source_declarative_manifest.run:run" diff --git a/docs/integrations/sources/low-code.md b/docs/integrations/sources/low-code.md index 09f9ea660f6b..26e63e9dc7a3 100644 --- a/docs/integrations/sources/low-code.md +++ b/docs/integrations/sources/low-code.md @@ -9,6 +9,7 @@ The changelog below is automatically updated by the `bump_version` command as pa | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------- | +| 5.2.0 | 2024-09-06 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.2.0 | | 5.1.0 | 2024-09-06 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.1.0 | | 5.0.1 | 2024-09-03 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.1 | | 5.0.0 | 2024-09-02 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.0 | From caa12ab0c7e1b1e2c2a08fe5feefd92d0cc04c6e Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Fri, 6 Sep 2024 17:39:35 +0300 Subject: [PATCH 45/51] docs(airbyte-cdk): Fix error in incremental sync docs (#45194) --- .../understanding-the-yaml-file/incremental-syncs.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md b/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md index 85a408027ccb..1ba21a4f9065 100644 --- a/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md +++ b/docs/connector-development/config-based/understanding-the-yaml-file/incremental-syncs.md @@ -220,7 +220,7 @@ The default state format is **per partition**, but there are options to enhance ``` ### Summary -- **Per Partition**: Default, use for manageable partitions (<10k). +- **Per Partition**: Default, use for manageable partitions (\<10k). - **Incremental Dependency**: Use for incremental parent streams with a dependent child cursor. Ensure API updates parent cursor with child records. - **Global Substream Cursor**: Ideal for large-scale parent streams with many partitions to optimize performance. From d7f85ed2468c2c92278b7432f6d805ec9543b33f Mon Sep 17 00:00:00 2001 From: tolik0 Date: Fri, 6 Sep 2024 14:44:01 +0000 Subject: [PATCH 46/51] =?UTF-8?q?=F0=9F=A4=96=20patch=20bump=20Python=20CD?= =?UTF-8?q?K=20to=20version=205.2.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index a3fd46e94550..129ebc999eb3 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 5.2.1 +Fix error in incremental sync docs + ## 5.2.0 Add Global Parent State Cursor diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index 50ead471967c..b2898e71d5ee 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "airbyte-cdk" -version = "5.2.0" +version = "5.2.1" description = "A framework for writing Airbyte Connectors." authors = ["Airbyte "] license = "MIT" From 2d94c539689c39bf149e0665ba5fe462da2bec61 Mon Sep 17 00:00:00 2001 From: tolik0 Date: Fri, 6 Sep 2024 14:50:23 +0000 Subject: [PATCH 47/51] =?UTF-8?q?=F0=9F=A4=96=20Cut=20version=205.2.1=20of?= =?UTF-8?q?=20source-declarative-manifest?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-declarative-manifest/metadata.yaml | 2 +- .../connectors/source-declarative-manifest/poetry.lock | 8 ++++---- .../connectors/source-declarative-manifest/pyproject.toml | 4 ++-- docs/integrations/sources/low-code.md | 1 + 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml index 30b93e15d0f7..80d0dad57c49 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml +++ b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml @@ -8,7 +8,7 @@ data: connectorType: source definitionId: 64a2f99c-542f-4af8-9a6f-355f1217b436 # This version should not be updated manually - it is updated by the CDK release workflow. - dockerImageTag: 5.2.0 + dockerImageTag: 5.2.1 dockerRepository: airbyte/source-declarative-manifest # This page is hidden from the docs for now, since the connector is not in any Airbyte registries. documentationUrl: https://docs.airbyte.com/integrations/sources/low-code diff --git a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock index 5effc4cb55dd..94b436b90189 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock +++ b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "5.2.0" +version = "5.2.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "airbyte_cdk-5.2.0-py3-none-any.whl", hash = "sha256:1d4373e422e84e2eb37853baeabfb55e32e5c0159287dfa042c7d41381b33f7f"}, - {file = "airbyte_cdk-5.2.0.tar.gz", hash = "sha256:c76be0701427e2b2d99934558369004e2008b6778c8adb09494945f1784ea206"}, + {file = "airbyte_cdk-5.2.1-py3-none-any.whl", hash = "sha256:b6d19a36e35c5002c248e0c09beb8d5d23ef045ad0bfa8fecbad8ad227641890"}, + {file = "airbyte_cdk-5.2.1.tar.gz", hash = "sha256:940c1b1f94d476584ba49d2d126ec77da14104b7ca1e4d7e25e0836f15cd25cc"}, ] [package.dependencies] @@ -1605,4 +1605,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10,<3.12" -content-hash = "8b9677225336fa3548aa6d40746b75fee57fd0a29c46ceee7b00a9e13f98bd84" +content-hash = "ebea97fabc6fd3b9a36cd0b764ccd6dcaa59a5a6502f35700ef60ebe441c5b57" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml index ca36d851e678..98de6f2a7125 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml +++ b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml @@ -3,7 +3,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "5.2.0" +version = "5.2.1" name = "source-declarative-manifest" description = "Base source implementation for low-code sources." authors = ["Airbyte "] @@ -17,7 +17,7 @@ include = "source_declarative_manifest" [tool.poetry.dependencies] python = "^3.10,<3.12" -airbyte-cdk = "5.2.0" +airbyte-cdk = "5.2.1" [tool.poetry.scripts] source-declarative-manifest = "source_declarative_manifest.run:run" diff --git a/docs/integrations/sources/low-code.md b/docs/integrations/sources/low-code.md index 26e63e9dc7a3..3f9fa36f6c17 100644 --- a/docs/integrations/sources/low-code.md +++ b/docs/integrations/sources/low-code.md @@ -9,6 +9,7 @@ The changelog below is automatically updated by the `bump_version` command as pa | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------- | +| 5.2.1 | 2024-09-06 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.2.1 | | 5.2.0 | 2024-09-06 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.2.0 | | 5.1.0 | 2024-09-06 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.1.0 | | 5.0.1 | 2024-09-03 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 5.0.1 | From 425d7d1897cfe5295d69aa3aff7de8102b3056c0 Mon Sep 17 00:00:00 2001 From: btkcodedev Date: Fri, 6 Sep 2024 21:09:36 +0530 Subject: [PATCH 48/51] Source Gong: Add new stream `/calls/extensive` (#45117) --- .../integration_tests/configured_catalog.json | 9 + .../connectors/source-gong/manifest.yaml | 1677 +++++++---------- .../connectors/source-gong/metadata.yaml | 4 +- docs/integrations/sources/gong.md | 2 + 4 files changed, 716 insertions(+), 976 deletions(-) diff --git a/airbyte-integrations/connectors/source-gong/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-gong/integration_tests/configured_catalog.json index aca1e16d42f7..3013e0b0d6f0 100644 --- a/airbyte-integrations/connectors/source-gong/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-gong/integration_tests/configured_catalog.json @@ -18,6 +18,15 @@ "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" }, + { + "stream": { + "name": "extensiveCalls", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, { "stream": { "name": "scorecards", diff --git a/airbyte-integrations/connectors/source-gong/manifest.yaml b/airbyte-integrations/connectors/source-gong/manifest.yaml index af586be3cb63..5c357b1f3dc7 100644 --- a/airbyte-integrations/connectors/source-gong/manifest.yaml +++ b/airbyte-integrations/connectors/source-gong/manifest.yaml @@ -1,9 +1,12 @@ -version: 4.3.0 +version: 4.6.2 + type: DeclarativeSource + check: type: CheckStream stream_names: - users + definitions: streams: users: @@ -14,12 +17,7 @@ definitions: retriever: type: SimpleRetriever requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" + $ref: "#/definitions/base_requester" path: /users http_method: GET request_parameters: @@ -48,93 +46,7 @@ definitions: schema_loader: type: InlineSchemaLoader schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - active: - type: - - "null" - - boolean - description: Indicates if the user is currently active or not - created: - type: - - "null" - - string - description: The timestamp denoting when the user account was created - format: date-time - emailAddress: - type: - - "null" - - string - description: The primary email address associated with the user - emailAliases: - type: - - "null" - - array - description: - Additional email addresses that can be used to reach the - user - extension: - type: - - "null" - - string - description: The phone extension number for the user - firstName: - type: - - "null" - - string - description: The first name of the user - id: - type: - - "null" - - string - description: Unique identifier for the user - lastName: - type: - - "null" - - string - description: The last name of the user - managerId: - type: - - "null" - - string - description: The ID of the user's manager - meetingConsentPageUrl: - type: - - "null" - - string - description: URL for the consent page related to meetings - personalMeetingUrls: - type: - - "null" - - array - description: URLs for personal meeting rooms assigned to the user - phoneNumber: - type: - - "null" - - string - description: The phone number associated with the user - settings: - type: - - "null" - - object - description: User-specific settings and configurations - spokenLanguages: - type: - - "null" - - array - description: Languages spoken by the user - title: - type: - - "null" - - string - description: The job title or position of the user - trustedEmailAddress: - type: - - "null" - - string - description: An email address that is considered trusted for the user + $ref: "#/schemas/users" calls: type: DeclarativeStream name: calls @@ -143,12 +55,7 @@ definitions: retriever: type: SimpleRetriever requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" + $ref: "#/definitions/base_requester" path: /calls http_method: GET request_parameters: @@ -172,116 +79,74 @@ definitions: schema_loader: type: InlineSchemaLoader schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - calendarEventId: - type: - - "null" - - string - description: - Unique identifier for the calendar event associated with - the call. - clientUniqueId: - type: - - "null" - - string - description: Unique identifier for the client related to the call. - customData: - type: - - "null" - - string - description: Custom data associated with the call. - direction: - type: - - "null" - - string - description: Direction of the call (inbound/outbound). - duration: - type: - - "null" - - integer - description: Duration of the call in seconds. - id: - type: - - "null" - - string - description: Unique identifier for the call. - isPrivate: - type: - - "null" - - boolean - description: Indicates if the call is private or not. - language: - type: - - "null" - - string - description: Language used in the call. - media: - type: - - "null" - - string - description: Media type used for communication (voice, video, etc.). - meetingUrl: - type: - - "null" - - string - description: URL for accessing the meeting associated with the call. - primaryUserId: - type: - - "null" - - string - description: - Unique identifier for the primary user involved in the - call. - purpose: - type: - - "null" - - string - description: Purpose or topic of the call. - scheduled: - type: - - "null" - - string - description: Scheduled date and time of the call. - format: date-time - scope: - type: - - "null" - - string - description: Scope or extent of the call. - sdrDisposition: - type: - - "null" - - string - description: Disposition set by the sales development representative. - started: - type: - - "null" - - string - description: Start date and time of the call. - format: date-time - system: - type: - - "null" - - string - description: System information related to the call. - title: - type: - - "null" - - string - description: Title or headline of the call. - url: - type: - - "null" - - string - description: URL associated with the call. - workspaceId: - type: - - "null" - - string - description: Identifier for the workspace to which the call belongs. + $ref: "#/schemas/calls" + extensiveCalls: + type: DeclarativeStream + name: extensiveCalls + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: calls/extensive + http_method: POST + request_body_json: + filter: + fromDateTime: "{{ config['start_date'] }}" + contentSelector: + exposedFields: + media: true + content: + brief: true + topics: true + outline: true + trackers: true + keyPoints: true + structure: true + highlights: true + callOutcome: true + pointsOfInterest: true + trackerOccurrences: true + parties: true + interaction: + video: true + speakers: true + questions: true + personInteractionStats: true + collaboration: + publicComments: true + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - calls + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + inject_into: body_json + field_name: cursor + page_size_option: + type: RequestOption + field_name: limit + inject_into: body_json + pagination_strategy: + type: CursorPagination + page_size: 100 + cursor_value: "{{ response.records.cursor }}" + stop_condition: "{{ 'records' not in response }}" + transformations: + - type: AddFields + fields: + - path: + - id + value: "{{ record['metaData']['id'] }}" + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/extensiveCalls" scorecards: type: DeclarativeStream name: scorecards @@ -290,12 +155,7 @@ definitions: retriever: type: SimpleRetriever requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" + $ref: "#/definitions/base_requester" path: /settings/scorecards http_method: GET request_parameters: @@ -324,98 +184,7 @@ definitions: schema_loader: type: InlineSchemaLoader schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - created: - type: - - "null" - - string - description: The timestamp when the scorecard was created - format: date-time - enabled: - type: - - "null" - - boolean - description: Indicates if the scorecard is enabled or disabled - questions: - type: - - "null" - - array - description: An array of questions related to the scorecard - items: - type: - - "null" - - object - properties: - created: - type: - - "null" - - string - description: The timestamp when the question was created - format: date-time - isOverall: - type: - - "null" - - boolean - description: - Indicates if the question is an overall score or - not - questionId: - type: - - "null" - - string - description: The unique identifier of the question - questionRevisionId: - type: - - "null" - - string - description: The revision identifier of the question - questionText: - type: - - "null" - - string - description: The text of the question - updated: - type: - - "null" - - string - description: The timestamp when the question was last updated - format: date-time - updaterUserId: - type: - - "null" - - string - description: The user ID of the person who last updated the question - scorecardId: - type: - - "null" - - string - description: The unique identifier of the scorecard - scorecardName: - type: - - "null" - - string - description: The name of the scorecard - updated: - type: - - "null" - - string - description: The timestamp when the scorecard was last updated - format: date-time - updaterUserId: - type: - - "null" - - string - description: The user ID of the person who last updated the scorecard - workspaceId: - type: - - "null" - - string - description: - The unique identifier of the workspace associated with - the scorecard + $ref: "#/schemas/scorecards" answeredScorecards: type: DeclarativeStream name: answeredScorecards @@ -424,12 +193,7 @@ definitions: retriever: type: SimpleRetriever requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" + $ref: "#/definitions/base_requester" path: /stats/activity/scorecards http_method: POST request_parameters: @@ -460,662 +224,22 @@ definitions: schema_loader: type: InlineSchemaLoader schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - answeredScorecardId: - type: - - "null" - - string - description: Unique identifier for the answered scorecard instance. - answers: - type: - - "null" - - array - description: Contains the answered questions in the scorecards - items: - type: - - "null" - - object - properties: - answerText: - type: - - "null" - - string - description: Text containing the answer given. - isOverall: - type: - - "null" - - boolean - description: >- - Boolean flag indicating if the answer is for an overall - evaluation. - notApplicable: - type: - - "null" - - boolean - description: >- - Boolean flag indicating if the question is marked as not - applicable. - questionId: - type: - - "null" - - string - description: Unique identifier for the question answered. - questionRevisionId: - type: - - "null" - - string - description: - Unique identifier for the revision of the question - answered. - score: - type: - - "null" - - integer - description: Numeric score assigned to the answer. - callId: - type: - - "null" - - string - description: - Unique identifier for the call associated with the answered - scorecard. - callStartTime: - type: - - "null" - - string - description: Timestamp indicating the start time of the call. - format: date-time - reviewTime: - type: - - "null" - - string - description: >- - Timestamp indicating when the review of the answered scorecard was - completed. - format: date-time - reviewedUserId: - type: - - "null" - - string - description: Unique identifier for the user whose performance was reviewed. - reviewerUserId: - type: - - "null" - - string - description: Unique identifier for the user who performed the review. - scorecardId: - type: - - "null" - - string - description: Unique identifier for the scorecard template used. - scorecardName: - type: - - "null" - - string - description: Name or title of the scorecard template used. - visibilityType: - type: - - "null" - - string - description: - Type indicating the visibility permissions for the answered - scorecard. + $ref: "#/schemas/answeredScorecards" base_requester: type: HttpRequester url_base: https://api.gong.io/v2/ authenticator: type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" + username: '{{ config["access_key"] }}' + password: '{{ config["access_key_secret"] }}' + streams: - - type: DeclarativeStream - name: users - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" - path: /users - http_method: GET - request_parameters: - fromDateTime: "{{ config['start_date'] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - users - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: cursor - page_size_option: - type: RequestOption - field_name: limit - inject_into: request_parameter - pagination_strategy: - type: CursorPagination - page_size: 100 - cursor_value: "{{ response.records.cursor }}" - stop_condition: "{{ 'records' not in response }}" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - active: - type: - - "null" - - boolean - description: Indicates if the user is currently active or not - created: - type: - - "null" - - string - description: The timestamp denoting when the user account was created - format: date-time - emailAddress: - type: - - "null" - - string - description: The primary email address associated with the user - emailAliases: - type: - - "null" - - array - description: Additional email addresses that can be used to reach the user - extension: - type: - - "null" - - string - description: The phone extension number for the user - firstName: - type: - - "null" - - string - description: The first name of the user - id: - type: - - "null" - - string - description: Unique identifier for the user - lastName: - type: - - "null" - - string - description: The last name of the user - managerId: - type: - - "null" - - string - description: The ID of the user's manager - meetingConsentPageUrl: - type: - - "null" - - string - description: URL for the consent page related to meetings - personalMeetingUrls: - type: - - "null" - - array - description: URLs for personal meeting rooms assigned to the user - phoneNumber: - type: - - "null" - - string - description: The phone number associated with the user - settings: - type: - - "null" - - object - description: User-specific settings and configurations - spokenLanguages: - type: - - "null" - - array - description: Languages spoken by the user - title: - type: - - "null" - - string - description: The job title or position of the user - trustedEmailAddress: - type: - - "null" - - string - description: An email address that is considered trusted for the user - - type: DeclarativeStream - name: calls - primary_key: - - id - retriever: - type: SimpleRetriever - requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" - path: /calls - http_method: GET - request_parameters: - fromDateTime: "{{ config['start_date'] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - calls - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: cursor - pagination_strategy: - type: CursorPagination - cursor_value: "{{ response.records.cursor }}" - stop_condition: "{{ 'records' not in response }}" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - calendarEventId: - type: - - "null" - - string - description: - Unique identifier for the calendar event associated with the - call. - clientUniqueId: - type: - - "null" - - string - description: Unique identifier for the client related to the call. - customData: - type: - - "null" - - string - description: Custom data associated with the call. - direction: - type: - - "null" - - string - description: Direction of the call (inbound/outbound). - duration: - type: - - "null" - - integer - description: Duration of the call in seconds. - id: - type: - - "null" - - string - description: Unique identifier for the call. - isPrivate: - type: - - "null" - - boolean - description: Indicates if the call is private or not. - language: - type: - - "null" - - string - description: Language used in the call. - media: - type: - - "null" - - string - description: Media type used for communication (voice, video, etc.). - meetingUrl: - type: - - "null" - - string - description: URL for accessing the meeting associated with the call. - primaryUserId: - type: - - "null" - - string - description: Unique identifier for the primary user involved in the call. - purpose: - type: - - "null" - - string - description: Purpose or topic of the call. - scheduled: - type: - - "null" - - string - description: Scheduled date and time of the call. - format: date-time - scope: - type: - - "null" - - string - description: Scope or extent of the call. - sdrDisposition: - type: - - "null" - - string - description: Disposition set by the sales development representative. - started: - type: - - "null" - - string - description: Start date and time of the call. - format: date-time - system: - type: - - "null" - - string - description: System information related to the call. - title: - type: - - "null" - - string - description: Title or headline of the call. - url: - type: - - "null" - - string - description: URL associated with the call. - workspaceId: - type: - - "null" - - string - description: Identifier for the workspace to which the call belongs. - - type: DeclarativeStream - name: scorecards - primary_key: - - scorecardId - retriever: - type: SimpleRetriever - requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" - path: /settings/scorecards - http_method: GET - request_parameters: - fromDateTime: "{{ config['start_date'] }}" - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - scorecards - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: request_parameter - field_name: cursor - page_size_option: - type: RequestOption - field_name: limit - inject_into: request_parameter - pagination_strategy: - type: CursorPagination - page_size: 100 - cursor_value: "{{ response.records.cursor }}" - stop_condition: "{{ 'records' not in response }}" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - created: - type: - - "null" - - string - description: The timestamp when the scorecard was created - format: date-time - enabled: - type: - - "null" - - boolean - description: Indicates if the scorecard is enabled or disabled - questions: - type: - - "null" - - array - description: An array of questions related to the scorecard - items: - type: - - "null" - - object - properties: - created: - type: - - "null" - - string - description: The timestamp when the question was created - format: date-time - isOverall: - type: - - "null" - - boolean - description: Indicates if the question is an overall score or not - questionId: - type: - - "null" - - string - description: The unique identifier of the question - questionRevisionId: - type: - - "null" - - string - description: The revision identifier of the question - questionText: - type: - - "null" - - string - description: The text of the question - updated: - type: - - "null" - - string - description: The timestamp when the question was last updated - format: date-time - updaterUserId: - type: - - "null" - - string - description: The user ID of the person who last updated the question - scorecardId: - type: - - "null" - - string - description: The unique identifier of the scorecard - scorecardName: - type: - - "null" - - string - description: The name of the scorecard - updated: - type: - - "null" - - string - description: The timestamp when the scorecard was last updated - format: date-time - updaterUserId: - type: - - "null" - - string - description: The user ID of the person who last updated the scorecard - workspaceId: - type: - - "null" - - string - description: - The unique identifier of the workspace associated with the - scorecard - - type: DeclarativeStream - name: answeredScorecards - primary_key: - - answeredScorecardId - retriever: - type: SimpleRetriever - requester: - type: HttpRequester - url_base: https://api.gong.io/v2/ - authenticator: - type: BasicHttpAuthenticator - username: "{{ config['access_key'] }}" - password: "{{ config['access_key_secret'] }}" - path: /stats/activity/scorecards - http_method: POST - request_parameters: - fromDateTime: "{{ config['start_date'] }}" - request_body_json: - filter: '{"callFromDate": "{{ config["start_date"] }}"}' - record_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - answeredScorecards - paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - inject_into: body_json - field_name: cursor - page_size_option: - type: RequestOption - field_name: limit - inject_into: body_json - pagination_strategy: - type: CursorPagination - page_size: 100 - cursor_value: "{{ response.records.cursor }}" - stop_condition: "{{ 'records' not in response }}" - schema_loader: - type: InlineSchemaLoader - schema: - type: object - $schema: http://json-schema.org/draft-07/schema# - additionalProperties: true - properties: - answeredScorecardId: - type: - - "null" - - string - description: Unique identifier for the answered scorecard instance. - answers: - type: - - "null" - - array - description: Contains the answered questions in the scorecards - items: - type: - - "null" - - object - properties: - answerText: - type: - - "null" - - string - description: Text containing the answer given. - isOverall: - type: - - "null" - - boolean - description: >- - Boolean flag indicating if the answer is for an overall - evaluation. - notApplicable: - type: - - "null" - - boolean - description: >- - Boolean flag indicating if the question is marked as not - applicable. - questionId: - type: - - "null" - - string - description: Unique identifier for the question answered. - questionRevisionId: - type: - - "null" - - string - description: Unique identifier for the revision of the question answered. - score: - type: - - "null" - - integer - description: Numeric score assigned to the answer. - callId: - type: - - "null" - - string - description: - Unique identifier for the call associated with the answered - scorecard. - callStartTime: - type: - - "null" - - string - description: Timestamp indicating the start time of the call. - format: date-time - reviewTime: - type: - - "null" - - string - description: >- - Timestamp indicating when the review of the answered scorecard was - completed. - format: date-time - reviewedUserId: - type: - - "null" - - string - description: Unique identifier for the user whose performance was reviewed. - reviewerUserId: - type: - - "null" - - string - description: Unique identifier for the user who performed the review. - scorecardId: - type: - - "null" - - string - description: Unique identifier for the scorecard template used. - scorecardName: - type: - - "null" - - string - description: Name or title of the scorecard template used. - visibilityType: - type: - - "null" - - string - description: - Type indicating the visibility permissions for the answered - scorecard. + - $ref: "#/definitions/streams/users" + - $ref: "#/definitions/streams/calls" + - $ref: "#/definitions/streams/extensiveCalls" + - $ref: "#/definitions/streams/scorecards" + - $ref: "#/definitions/streams/answeredScorecards" + spec: type: Spec connection_specification: @@ -1127,35 +251,75 @@ spec: properties: access_key: type: string - title: Gong Access Key description: Gong Access Key + title: Gong Access Key airbyte_secret: true order: 0 access_key_secret: type: string - title: Gong Access Key Secret description: Gong Access Key Secret + title: Gong Access Key Secret airbyte_secret: true order: 1 start_date: type: string - title: Start date - pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ description: >- The date from which to list calls, in the ISO-8601 format; if not specified, the calls start with the earliest recorded call. For web-conference calls recorded by Gong, the date denotes its scheduled time, otherwise, it denotes its actual start time. + title: Start date + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ examples: - "2018-02-18T08:00:00Z" order: 2 additionalProperties: true + metadata: autoImportSchema: users: false calls: false + extensiveCalls: false scorecards: false answeredScorecards: false + testedStreams: + scorecards: + streamHash: b4a5aa11ee2cb96554fa359ffec46d50641b0434 + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + users: + streamHash: c6b2bab08397024790447b2ea909e5e2056b3778 + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + answeredScorecards: + streamHash: 42e030943265c512ecbca24e953416c157210edf + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + extensiveCalls: + streamHash: 99ac4098c8bba3989f73dff4c7b4b85963c90acd + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + calls: + streamHash: 4152ed041830ae0728cd87629fcc46f5ab42a394 + hasResponse: true + responsesAreSuccessful: true + hasRecords: true + primaryKeysArePresent: true + primaryKeysAreUnique: true + assist: {} + schemas: users: type: object @@ -1252,9 +416,7 @@ schemas: type: - "null" - string - description: - Unique identifier for the calendar event associated with the - call. + description: Unique identifier for the calendar event associated with the call. clientUniqueId: type: - "null" @@ -1352,6 +514,570 @@ schemas: - "null" - string description: Identifier for the workspace to which the call belongs. + extensiveCalls: + type: object + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + collaboration: + type: + - object + - "null" + description: Collaboration information added to the call + properties: + brief: + type: + - string + - "null" + content: + type: + - object + - "null" + description: Analysis of the interaction content. + properties: + brief: + type: + - string + - "null" + highlights: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + items: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + startTimes: + type: + - array + - "null" + items: + type: + - number + - "null" + text: + type: + - string + - "null" + title: + type: + - string + - "null" + keyPoints: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + text: + type: + - string + - "null" + outline: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + duration: + type: + - number + - "null" + items: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + startTime: + type: + - number + - "null" + text: + type: + - string + - "null" + section: + type: + - string + - "null" + startTime: + type: + - number + - "null" + pointsOfInterest: + type: + - object + - "null" + properties: + actionItems: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + snippet: + type: + - string + - "null" + snippetEndTime: + type: + - number + - "null" + snippetStartTime: + type: + - number + - "null" + speakerID: + type: + - string + - "null" + topics: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + duration: + type: + - number + - "null" + name: + type: + - string + - "null" + trackers: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + type: + type: + - string + - "null" + count: + type: + - number + - "null" + id: + type: + - string + - "null" + name: + type: + - string + - "null" + occurrences: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + speakerId: + type: + - string + - "null" + startTime: + type: + - number + - "null" + phrases: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + count: + type: + - number + - "null" + occurrences: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + speakerId: + type: + - string + - "null" + startTime: + type: + - number + - "null" + phrase: + type: + - string + - "null" + context: + description: A list of the agenda of each part of the call. + properties: + objects: + type: array + description: List of objects within the external system. + items: + type: object + properties: + fields: + type: array + description: array. + items: + type: object + properties: + name: + type: + - "null" + - string + description: >- + Field name. For Account supported fields are: Name, + Website, Industry and all the custom fields. For + Opportunity supported fields are Name, LeadSource, + Type, StageName, Probability, Amount, CloseDate and + all the custom fields. + value: + type: + - "null" + - object + description: Field value. + objectId: + type: + - "null" + - string + description: Object ID. + objectType: + type: + - "null" + - string + description: "Object Type. Allowed: Opportunity, Account" + timing: + type: + - "null" + - string + description: "Timing of object. Allowed: Now, TimeOfCall." + system: + type: + - "null" + - string + description: >- + External system name. Allowed: Salesforce, HubSpot, + MicrosoftDynamic, Generic. + id: + type: + - "null" + - integer + description: Unique identifier for the call (from metaData.id). + interaction: + type: + - object + - "null" + description: Metrics collected around the interaction during the call. + properties: + interactionStats: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + name: + type: + - string + - "null" + value: + type: + - number + - "null" + questions: + type: + - object + - "null" + properties: + companyCount: + type: + - number + - "null" + nonCompanyCount: + type: + - number + - "null" + speakers: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + id: + type: + - string + - "null" + talkTime: + type: + - number + - "null" + userId: + type: + - string + - "null" + video: + type: + - array + - "null" + items: + type: + - object + - "null" + properties: + duration: + type: + - number + - "null" + name: + type: + - string + - "null" + media: + description: The media urls of the call. + properties: + audioUrl: + type: + - "null" + - string + description: >- + If true and exist, add audio url of the call. The url will be + available for 8 hours. + videoUrl: + type: + - "null" + - string + description: >- + If true and exist, add video url of the call. The url will be + available for 8 hours. + metaData: + type: + - object + - "null" + description: call's metadata. + properties: + calendarEventId: + type: + - "null" + - string + description: Unique identifier for the calendar event associated with the call. + clientUniqueId: + type: + - "null" + - string + description: Unique identifier for the client related to the call. + customData: + type: + - "null" + - string + description: Custom data associated with the call. + direction: + type: + - "null" + - string + description: Direction of the call (inbound/outbound). + duration: + type: + - "null" + - integer + description: Duration of the call in seconds. + id: + type: + - "null" + - string + description: Unique identifier for the call. + isPrivate: + type: + - "null" + - boolean + description: Indicates if the call is private or not. + language: + type: + - "null" + - string + description: Language used in the call. + media: + type: + - "null" + - string + description: Media type used for communication (voice, video, etc.). + meetingUrl: + type: + - "null" + - string + description: URL for accessing the meeting associated with the call. + primaryUserId: + type: + - "null" + - string + description: Unique identifier for the primary user involved in the call. + purpose: + type: + - "null" + - string + description: Purpose or topic of the call. + scheduled: + type: + - "null" + - string + description: Scheduled date and time of the call. + format: date-time + scope: + type: + - "null" + - string + description: Scope or extent of the call. + sdrDisposition: + type: + - "null" + - string + description: Disposition set by the sales development representative. + started: + type: + - "null" + - string + description: Start date and time of the call. + format: date-time + system: + type: + - "null" + - string + description: System information related to the call. + title: + type: + - "null" + - string + description: Title or headline of the call. + url: + type: + - "null" + - string + description: URL associated with the call. + workspaceId: + type: + - "null" + - string + description: Identifier for the workspace to which the call belongs. + parties: + type: + - array + - "null" + description: A list of the call's participants + items: + type: + - object + - "null" + properties: + affiliation: + type: + - string + - "null" + description: >- + Whether the participant is from the company or not. Allowed: + Internal, External, Unknown + context: + type: + - object + - "null" + description: >- + A list of links to external systems such as CRM, Dialer, Case + Management, etc. + emailAddress: + type: + - string + - "null" + description: Email address. + id: + type: + - string + - "null" + description: Unique ID of the participant in the call. + methods: + type: + - array + - "null" + description: >- + Whether the participant was invited to the meeting or only + attended the call. Allowed: Invitee, Attendee. + items: + type: + - string + - "null" + name: + type: + - string + - "null" + description: The name of the participant. + phoneNumber: + type: + - string + - "null" + description: The phone number of the participant. + speakerId: + type: + - string + - "null" + description: >- + Unique ID of a participant that spoke in the call. References to + this id will appear in the '/v2/calls/transcript' endpoint + response. + title: + type: + - string + - "null" + description: The job title of the participant. + userId: + type: + - string + - "null" + description: >- + The user ID of the participant within the Gong system, if the + participant exists in the system. scorecards: type: object $schema: http://json-schema.org/draft-07/schema# @@ -1489,7 +1215,8 @@ schemas: type: - "null" - string - description: Unique identifier for the revision of the question answered. + description: Unique identifier for the revision of the question + answered. score: type: - "null" @@ -1499,7 +1226,8 @@ schemas: type: - "null" - string - description: Unique identifier for the call associated with the answered scorecard. + description: Unique identifier for the call associated with the answered + scorecard. callStartTime: type: - "null" @@ -1538,4 +1266,5 @@ schemas: type: - "null" - string - description: Type indicating the visibility permissions for the answered scorecard. + description: Type indicating the visibility permissions for the answered + scorecard. diff --git a/airbyte-integrations/connectors/source-gong/metadata.yaml b/airbyte-integrations/connectors/source-gong/metadata.yaml index aa98f729fc6b..418d2dfb9a76 100644 --- a/airbyte-integrations/connectors/source-gong/metadata.yaml +++ b/airbyte-integrations/connectors/source-gong/metadata.yaml @@ -3,11 +3,11 @@ data: ql: 100 sl: 100 connectorBuildOptions: - baseImage: docker.io/airbyte/source-declarative-manifest:4.4.3@sha256:8937b693c7e01087f6e86e683826ac20f160f7952b8f0a13cbf4f9bfdd7af570 + baseImage: docker.io/airbyte/source-declarative-manifest:4.6.2@sha256:f5fcd3d4703b7590b6166a7853c5ed1686731607cd30a159a8c24e2fe2c1ee98 connectorSubtype: api connectorType: source definitionId: 32382e40-3b49-4b99-9c5c-4076501914e7 - dockerImageTag: 0.2.1 + dockerImageTag: 0.3.0 dockerRepository: airbyte/source-gong documentationUrl: https://docs.airbyte.com/integrations/sources/gong githubIssueLabel: source-gong diff --git a/docs/integrations/sources/gong.md b/docs/integrations/sources/gong.md index b04fcee1d961..acb73195184a 100644 --- a/docs/integrations/sources/gong.md +++ b/docs/integrations/sources/gong.md @@ -12,6 +12,7 @@ This Source is capable of syncing the following core Streams: - [answered scorecards](https://us-14321.app.gong.io/settings/api/documentation#post-/v2/stats/activity/scorecards) - [calls](https://us-14321.app.gong.io/settings/api/documentation#get-/v2/calls) +- [extensive calls](https://us-56804.app.gong.io/settings/api/documentation#post-/v2/calls/extensive) - [scorecards](https://us-14321.app.gong.io/settings/api/documentation#get-/v2/settings/scorecards) - [users](https://us-14321.app.gong.io/settings/api/documentation#get-/v2/users) @@ -39,6 +40,7 @@ By default Gong limits your company's access to the service to 3 API calls per s | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------ | +| 0.3.0 | 2024-09-04 | [45117](https://github.com/airbytehq/airbyte/pull/45117) | Add new stream `extensive calls` | | 0.2.1 | 2024-08-16 | [44196](https://github.com/airbytehq/airbyte/pull/44196) | Bump source-declarative-manifest version | | 0.2.0 | 2024-08-15 | [44144](https://github.com/airbytehq/airbyte/pull/44144) | Refactor connector to manifest-only format | | 0.1.17 | 2024-08-10 | [43481](https://github.com/airbytehq/airbyte/pull/43481) | Update dependencies | From 25313363ad85523c38d7bce37198625108dbaf09 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Fri, 6 Sep 2024 11:44:35 -0400 Subject: [PATCH 49/51] Source The Guardian API: Migrate to Manifest Only (#45195) --- .../source-the-guardian-api/README.md | 123 +- .../source-the-guardian-api/__init__.py | 3 - .../acceptance-test-config.yml | 2 +- .../source-the-guardian-api/bootstrap.md | 46 - .../custom_page_strategy.py => components.py} | 3 +- .../source-the-guardian-api/main.py | 8 - .../source-the-guardian-api/manifest.yaml | 376 ++++++ .../source-the-guardian-api/metadata.yaml | 8 +- .../source-the-guardian-api/poetry.lock | 1038 ----------------- .../source-the-guardian-api/pyproject.toml | 29 - .../source_the_guardian_api/__init__.py | 8 - .../source_the_guardian_api/manifest.yaml | 73 -- .../source_the_guardian_api/run.py | 14 - .../schemas/content.json | 52 - .../source_the_guardian_api/source.py | 18 - .../source_the_guardian_api/spec.yaml | 54 - docs/integrations/sources/the-guardian-api.md | 25 +- 17 files changed, 427 insertions(+), 1453 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/__init__.py delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/bootstrap.md rename airbyte-integrations/connectors/source-the-guardian-api/{source_the_guardian_api/custom_page_strategy.py => components.py} (87%) delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/main.py create mode 100644 airbyte-integrations/connectors/source-the-guardian-api/manifest.yaml delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/poetry.lock delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/__init__.py delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/manifest.yaml delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/run.py delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/schemas/content.json delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/source.py delete mode 100644 airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/spec.yaml diff --git a/airbyte-integrations/connectors/source-the-guardian-api/README.md b/airbyte-integrations/connectors/source-the-guardian-api/README.md index 9b2af0b867e0..30e1bb6c8d1e 100644 --- a/airbyte-integrations/connectors/source-the-guardian-api/README.md +++ b/airbyte-integrations/connectors/source-the-guardian-api/README.md @@ -1,98 +1,49 @@ -# The Guardian Api Source +# The Guardian API source connector -This is the repository for the The Guardian Api configuration based source connector. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/the-guardian-api). +This directory contains the manifest-only connector for `source-the-guardian-api`. +This _manifest-only_ connector is not a Python package on its own, as it runs inside of the base `source-declarative-manifest` image. -## Local development - -#### Create credentials +For information about how to configure and use this connector within Airbyte, see [the connector's full documentation](https://docs.airbyte.com/integrations/sources/the-guardian-api). -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/the-guardian-api) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_the_guardian_api/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +## Local development -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source the-guardian-api test creds` -and place them into `secrets/config.json`. +We recommend using the Connector Builder to edit this connector. +Using either Airbyte Cloud or your local Airbyte OSS instance, navigate to the **Builder** tab and select **Import a YAML**. +Then select the connector's `manifest.yaml` file to load the connector into the Builder. You're now ready to make changes to the connector! -### Locally running the connector docker image +If you prefer to develop locally, you can follow the instructions below. +### Building the docker image +You can build any manifest-only connector with `airbyte-ci`: -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash -airbyte-ci connectors --name source-the-guardian-api build -``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-the-guardian-api:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +airbyte-ci connectors --name=source-the-guardian-api build ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. +An image will be available on your host with the tag `airbyte/source-the-guardian-api:dev`. -If you would like to patch our connector and build your own a simple approach would be to: +### Creating credentials -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-the-guardian-api:latest +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/the-guardian-api) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `spec` object in the connector's `manifest.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code +### Running as a docker container -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` -Please use this as an example. This is not optimized. +Then run any of the standard source connector commands: -2. Build your image: ```bash -docker build -t airbyte/source-the-guardian-api:dev . -# Running the spec command against your patched connector -docker run airbyte/source-the-guardian-api:dev spec -``` -#### Run - -Then run any of the connector commands as follows: - -``` docker run --rm airbyte/source-the-guardian-api:dev spec docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-the-guardian-api:dev check --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-the-guardian-api:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-the-guardian-api:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running the CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): @@ -100,27 +51,15 @@ You can run our full test suite locally using [`airbyte-ci`](https://github.com/ airbyte-ci connectors --name=source-the-guardian-api test ``` -### Customizing acceptance Tests - -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management - -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: - -- required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -- required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector - -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +## Publishing a new version of the connector -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-the-guardian-api test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/the-guardian-api.md`). +If you want to contribute changes to `source-the-guardian-api`, here's how you can do that: +1. Make your changes locally, or load the connector's manifest into Connector Builder and make changes there. +2. Make sure your changes are passing our test suite with `airbyte-ci connectors --name=source-the-guardian-api test` +3. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/the-guardian-api.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. \ No newline at end of file +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. diff --git a/airbyte-integrations/connectors/source-the-guardian-api/__init__.py b/airbyte-integrations/connectors/source-the-guardian-api/__init__.py deleted file mode 100644 index c941b3045795..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-config.yml b/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-config.yml index f533f081d319..d7418f621722 100644 --- a/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-config.yml @@ -4,7 +4,7 @@ connector_image: airbyte/source-the-guardian-api:dev acceptance_tests: spec: tests: - - spec_path: "source_the_guardian_api/spec.yaml" + - spec_path: "manifest.yaml" connection: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-the-guardian-api/bootstrap.md b/airbyte-integrations/connectors/source-the-guardian-api/bootstrap.md deleted file mode 100644 index 816bdb38ab47..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/bootstrap.md +++ /dev/null @@ -1,46 +0,0 @@ -# The Guardian API - -## Overview - -[The Guardian Open Platform](https://open-platform.theguardian.com/) is a public web service for accessing all the content the Guardian creates, categorised by tags and section. To get started, You need a key to successfully authenticate against the API. The Guardian API Connector is implemented with the [Airbyte Low-Code CDK](https://docs.airbyte.com/connector-development/config-based/low-code-cdk-overview). - -## Output Format - -#### Each content item has the following structure:- - -```yaml -{ - "id": "string", - "type": "string" - "sectionId": "string" - "sectionName": "string" - "webPublicationDate": "string" - "webTitle": "string" - "webUrl": "string" - "apiUrl": "string" - "isHosted": "boolean" - "pillarId": "string" - "pillarName": "string" -} -``` - -**Description:-** - -**webPublicationDate**: The combined date and time of publication -**webUrl**: The URL of the html content -**apiUrl**: The URL of the raw content - -## Core Streams - -Connector supports the `content` stream that returns all pieces of content in the API. - -## Rate Limiting - -The key that you are assigned is rate-limited and as such any applications that depend on making large numbers of requests on a polling basis are likely to exceed their daily quota and thus be prevented from making further requests until the next period begins. - -## Authentication and Permissions - -To access the API, you will need to sign up for an API key, which should be sent with every request. Visit [this](https://open-platform.theguardian.com/access) link to get an API key. -The easiest way to see what data is included is to explore the data. You can build complex queries quickly and browse the results. Visit [this](https://open-platform.theguardian.com/explore) link to explore the data. - -See [this](https://docs.airbyte.io/integrations/sources/the-guardian-api) link for the connector docs. diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/custom_page_strategy.py b/airbyte-integrations/connectors/source-the-guardian-api/components.py similarity index 87% rename from airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/custom_page_strategy.py rename to airbyte-integrations/connectors/source-the-guardian-api/components.py index b1570f2738a0..998187314780 100644 --- a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/custom_page_strategy.py +++ b/airbyte-integrations/connectors/source-the-guardian-api/components.py @@ -15,7 +15,7 @@ class CustomPageIncrement(PageIncrement): Starts page from 1 instead of the default value that is 0. Stops Pagination when currentPage is equal to totalPages. """ - def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Any]: + def next_page_token(self, response: requests.Response, *args) -> Optional[Any]: res = response.json().get("response") currPage = res.get("currentPage") totalPages = res.get("pages") @@ -26,6 +26,7 @@ def next_page_token(self, response: requests.Response, last_records: List[Mappin return None def __post_init__(self, parameters: Mapping[str, Any]): + super().__post_init__(parameters) self._page = 1 def reset(self): diff --git a/airbyte-integrations/connectors/source-the-guardian-api/main.py b/airbyte-integrations/connectors/source-the-guardian-api/main.py deleted file mode 100644 index 50182e0c2da0..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_the_guardian_api.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-the-guardian-api/manifest.yaml b/airbyte-integrations/connectors/source-the-guardian-api/manifest.yaml new file mode 100644 index 000000000000..7b440631f138 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/manifest.yaml @@ -0,0 +1,376 @@ +version: "4.3.2" +definitions: + selector: + extractor: + field_path: + - response + - results + requester: + url_base: "https://content.guardianapis.com" + http_method: "GET" + request_parameters: + api-key: "{{ config['api_key'] }}" + q: "{{ config['query'] }}" + tag: "{{ config['tag'] }}" + section: "{{ config['section'] }}" + order-by: "oldest" + incremental_sync: + type: DatetimeBasedCursor + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" + datetime_format: "%Y-%m-%d" + step: "P7D" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_granularity: "PT1S" + cursor_field: "webPublicationDate" + start_time_option: + field_name: "from-date" + inject_into: "request_parameter" + end_time_option: + field_name: "to-date" + inject_into: "request_parameter" + retriever: + record_selector: + extractor: + field_path: + - response + - results + paginator: + type: DefaultPaginator + pagination_strategy: + type: CustomPaginationStrategy + class_name: "source_declarative_manifest.components.CustomPageIncrement" + page_size: 10 + page_token_option: + type: RequestOption + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "body_data" + field_name: "page_size" + requester: + url_base: "https://content.guardianapis.com" + http_method: "GET" + request_parameters: + api-key: "{{ config['api_key'] }}" + q: "{{ config['query'] }}" + tag: "{{ config['tag'] }}" + section: "{{ config['section'] }}" + order-by: "oldest" + base_stream: + incremental_sync: + type: DatetimeBasedCursor + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" + datetime_format: "%Y-%m-%d" + step: "P7D" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_granularity: "PT1S" + cursor_field: "webPublicationDate" + start_time_option: + field_name: "from-date" + inject_into: "request_parameter" + end_time_option: + field_name: "to-date" + inject_into: "request_parameter" + retriever: + record_selector: + extractor: + field_path: + - response + - results + paginator: + type: DefaultPaginator + pagination_strategy: + type: CustomPaginationStrategy + class_name: "source_declarative_manifest.components.CustomPageIncrement" + page_size: 10 + page_token_option: + type: RequestOption + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "body_data" + field_name: "page_size" + requester: + url_base: "https://content.guardianapis.com" + http_method: "GET" + request_parameters: + api-key: "{{ config['api_key'] }}" + q: "{{ config['query'] }}" + tag: "{{ config['tag'] }}" + section: "{{ config['section'] }}" + order-by: "oldest" + content_stream: + incremental_sync: + type: DatetimeBasedCursor + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" + datetime_format: "%Y-%m-%d" + step: "P7D" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_granularity: "PT1S" + cursor_field: "webPublicationDate" + start_time_option: + field_name: "from-date" + inject_into: "request_parameter" + end_time_option: + field_name: "to-date" + inject_into: "request_parameter" + retriever: + record_selector: + extractor: + field_path: + - response + - results + paginator: + type: "DefaultPaginator" + pagination_strategy: + type: CustomPaginationStrategy + class_name: "source_declarative_manifest.components.CustomPageIncrement" + page_size: 10 + page_token_option: + type: RequestOption + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "body_data" + field_name: "page_size" + requester: + url_base: "https://content.guardianapis.com" + http_method: "GET" + request_parameters: + api-key: "{{ config['api_key'] }}" + q: "{{ config['query'] }}" + tag: "{{ config['tag'] }}" + section: "{{ config['section'] }}" + order-by: "oldest" + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-04/schema# + type: object + properties: + id: + type: string + type: + type: string + sectionId: + type: string + sectionName: + type: string + webPublicationDate: + type: string + webTitle: + type: string + webUrl: + type: string + apiUrl: + type: string + isHosted: + type: boolean + pillarId: + type: string + pillarName: + type: string + required: + - id + - type + - sectionId + - sectionName + - webPublicationDate + - webTitle + - webUrl + - apiUrl + - isHosted + - pillarId + - pillarName +streams: + - incremental_sync: + type: DatetimeBasedCursor + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + type: MinMaxDatetime + end_datetime: + datetime: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" + datetime_format: "%Y-%m-%d" + type: MinMaxDatetime + step: "P7D" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_granularity: "PT1S" + cursor_field: "webPublicationDate" + start_time_option: + field_name: "from-date" + inject_into: "request_parameter" + type: RequestOption + end_time_option: + field_name: "to-date" + inject_into: "request_parameter" + type: RequestOption + retriever: + record_selector: + extractor: + field_path: + - response + - results + type: DpathExtractor + type: RecordSelector + paginator: + type: "DefaultPaginator" + pagination_strategy: + class_name: source_declarative_manifest.components.CustomPageIncrement + page_size: 10 + type: CustomPaginationStrategy + page_token_option: + type: RequestOption + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "body_data" + field_name: "page_size" + type: RequestOption + requester: + url_base: "https://content.guardianapis.com" + http_method: "GET" + request_parameters: + api-key: "{{ config['api_key'] }}" + q: "{{ config['query'] }}" + tag: "{{ config['tag'] }}" + section: "{{ config['section'] }}" + order-by: "oldest" + type: HttpRequester + path: "/search" + type: SimpleRetriever + schema_loader: + type: InlineSchemaLoader + schema: + $schema: http://json-schema.org/draft-04/schema# + type: object + properties: + id: + type: string + type: + type: string + sectionId: + type: string + sectionName: + type: string + webPublicationDate: + type: string + webTitle: + type: string + webUrl: + type: string + apiUrl: + type: string + isHosted: + type: boolean + pillarId: + type: string + pillarName: + type: string + required: + - id + - type + - sectionId + - sectionName + - webPublicationDate + - webTitle + - webUrl + - apiUrl + - isHosted + - pillarId + - pillarName + type: DeclarativeStream + name: "content" + primary_key: "id" +check: + stream_names: + - "content" + type: CheckStream +type: DeclarativeSource +spec: + type: Spec + documentation_url: https://docs.airbyte.com/integrations/sources/the-guardian-api + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + title: The Guardian Api Spec + type: object + required: + - api_key + - start_date + additionalProperties: true + properties: + api_key: + title: API Key + type: string + description: + Your API Key. See here. + The key is case sensitive. + airbyte_secret: true + start_date: + title: Start Date + type: string + description: + Use this to set the minimum date (YYYY-MM-DD) of the results. + Results older than the start_date will not be shown. + pattern: ^([1-9][0-9]{3})\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])$ + examples: + - YYYY-MM-DD + query: + title: Query + type: string + description: + (Optional) The query (q) parameter filters the results to only + those that include that search term. The q parameter supports AND, OR and + NOT operators. + examples: + - environment AND NOT water + - environment AND political + - amusement park + - political + tag: + title: Tag + type: string + description: + (Optional) A tag is a piece of data that is used by The Guardian + to categorise content. Use this parameter to filter results by showing only + the ones matching the entered tag. See here + for a list of all tags, and here + for the tags endpoint documentation. + examples: + - environment/recycling + - environment/plasticbags + - environment/energyefficiency + section: + title: Section + type: string + description: + (Optional) Use this to filter the results by a particular section. + See here + for a list of all sections, and here + for the sections endpoint documentation. + examples: + - media + - technology + - housing-network + end_date: + title: End Date + type: string + description: + (Optional) Use this to set the maximum date (YYYY-MM-DD) of the + results. Results newer than the end_date will not be shown. Default is set + to the current date (today) for incremental syncs. + pattern: ^([1-9][0-9]{3})\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])$ + examples: + - YYYY-MM-DD diff --git a/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml b/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml index 742ce797acc1..40541ee50e37 100644 --- a/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml @@ -3,7 +3,7 @@ data: ql: 100 sl: 100 connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:2.0.0@sha256:c44839ba84406116e8ba68722a0f30e8f6e7056c726f447681bb9e9ece8bd916 + baseImage: docker.io/airbyte/source-declarative-manifest:5.0.1@sha256:00e7e63244b57956f08f99ed45597e1710a269ef893722dffd841796ccdf3934 connectorSubtype: api connectorTestSuitesOptions: - suite: liveTests @@ -19,7 +19,7 @@ data: type: GSM connectorType: source definitionId: d42bd69f-6bf0-4d0b-9209-16231af07a92 - dockerImageTag: 0.1.9 + dockerImageTag: 0.2.0 dockerRepository: airbyte/source-the-guardian-api documentationUrl: https://docs.airbyte.com/integrations/sources/the-guardian-api githubIssueLabel: source-the-guardian-api @@ -34,10 +34,10 @@ data: releaseStage: alpha remoteRegistries: pypi: - enabled: true + enabled: false packageName: airbyte-source-the-guardian-api supportLevel: community tags: - - language:python - cdk:low-code + - language:manifest-only metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-the-guardian-api/poetry.lock b/airbyte-integrations/connectors/source-the-guardian-api/poetry.lock deleted file mode 100644 index 2d8cc7ef340b..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/poetry.lock +++ /dev/null @@ -1,1038 +0,0 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "airbyte-cdk" -version = "0.60.0" -description = "A framework for writing Airbyte Connectors." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte-cdk-0.60.0.tar.gz", hash = "sha256:75ed80dd66d48248c71bd1a4809b9b46ffcbbefe8703ccc811044026820545f9"}, - {file = "airbyte_cdk-0.60.0-py3-none-any.whl", hash = "sha256:83997df59af7d8ef347dfce369388321a03bb21855f68692938d2a5df18a2487"}, -] - -[package.dependencies] -airbyte-protocol-models = "0.5.1" -backoff = "*" -cachetools = "*" -Deprecated = ">=1.2,<2.0" -dpath = ">=2.0.1,<2.1.0" -genson = "1.2.2" -isodate = ">=0.6.1,<0.7.0" -Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" -jsonschema = ">=3.2.0,<3.3.0" -pendulum = "<3.0.0" -pydantic = ">=1.10.8,<2.0.0" -pyrate-limiter = ">=3.1.0,<3.2.0" -python-dateutil = "*" -PyYAML = ">=6.0.1" -requests = "*" -requests-cache = "*" -wcmatch = "8.4" - -[package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] - -[[package]] -name = "airbyte-protocol-models" -version = "0.5.1" -description = "Declares the Airbyte Protocol." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, -] - -[package.dependencies] -pydantic = ">=1.9.2,<2.0.0" - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "bracex" -version = "2.5" -description = "Bash style brace expander." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bracex-2.5-py3-none-any.whl", hash = "sha256:d2fcf4b606a82ac325471affe1706dd9bbaa3536c91ef86a31f6b766f3dad1d0"}, - {file = "bracex-2.5.tar.gz", hash = "sha256:0725da5045e8d37ea9592ab3614d8b561e22c3c5fde3964699be672e072ab611"}, -] - -[[package]] -name = "cachetools" -version = "5.5.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, -] - -[[package]] -name = "cattrs" -version = "24.1.0" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cattrs-24.1.0-py3-none-any.whl", hash = "sha256:043bb8af72596432a7df63abcff0055ac0f198a4d2e95af8db5a936a7074a761"}, - {file = "cattrs-24.1.0.tar.gz", hash = "sha256:8274f18b253bf7674a43da851e3096370d67088165d23138b04a1c04c8eaf48e"}, -] - -[package.dependencies] -attrs = ">=23.1.0" -exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -msgspec = ["msgspec (>=0.18.5)"] -orjson = ["orjson (>=3.9.2)"] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "dpath" -version = "2.0.8" -description = "Filesystem-like pathing and searching for dictionaries" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, - {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "freezegun" -version = "1.5.1" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.7" -files = [ - {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, - {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "genson" -version = "1.2.2" -description = "GenSON is a powerful, user-friendly JSON Schema generator." -optional = false -python-versions = "*" -files = [ - {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, -] - -[[package]] -name = "idna" -version = "3.8" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." -optional = false -python-versions = ">=3.3,<4.0" -files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, -] - -[[package]] -name = "jsonschema" -version = "3.2.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = "*" -files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" - -[package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pydantic" -version = "1.10.18" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, - {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, - {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, - {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, - {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, - {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, - {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, - {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, - {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, - {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, - {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pyrate-limiter" -version = "3.1.1" -description = "Python Rate-Limiter using Leaky-Bucket Algorithm" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, - {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, -] - -[package.extras] -all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] -docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] - -[[package]] -name = "pyrsistent" -version = "0.20.0" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, - {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, - {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, - {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, - {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, - {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, - {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, -] - -[[package]] -name = "pytest" -version = "8.3.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-cache" -version = "1.2.1" -description = "A persistent cache for python requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests_cache-1.2.1-py3-none-any.whl", hash = "sha256:1285151cddf5331067baa82598afe2d47c7495a1334bfe7a7d329b43e9fd3603"}, - {file = "requests_cache-1.2.1.tar.gz", hash = "sha256:68abc986fdc5b8d0911318fbb5f7c80eebcd4d01bfacc6685ecf8876052511d1"}, -] - -[package.dependencies] -attrs = ">=21.2" -cattrs = ">=22.2" -platformdirs = ">=2.5" -requests = ">=2.22" -url-normalize = ">=1.4" -urllib3 = ">=1.25.5" - -[package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] -bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] -dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] -json = ["ujson (>=5.4)"] -mongodb = ["pymongo (>=3)"] -redis = ["redis (>=3)"] -security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "requests-mock" -version = "1.12.1" -description = "Mock out responses from the requests package" -optional = false -python-versions = ">=3.5" -files = [ - {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, - {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, -] - -[package.dependencies] -requests = ">=2.22,<3" - -[package.extras] -fixture = ["fixtures"] - -[[package]] -name = "setuptools" -version = "74.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "url-normalize" -version = "1.4.3" -description = "URL normalization for Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, - {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wcmatch" -version = "8.4" -description = "Wildcard/glob file name matcher." -optional = false -python-versions = ">=3.7" -files = [ - {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, - {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, -] - -[package.dependencies] -bracex = ">=2.1.1" - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9,<3.12" -content-hash = "42e347e9e003b832fcf7ddb3f2f622afb51e9b77a870c448a3eb0f6cfffea3fd" diff --git a/airbyte-integrations/connectors/source-the-guardian-api/pyproject.toml b/airbyte-integrations/connectors/source-the-guardian-api/pyproject.toml deleted file mode 100644 index 0dbce00e4ac2..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/pyproject.toml +++ /dev/null @@ -1,29 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -version = "0.1.9" -name = "source-the-guardian-api" -description = "Source implementation for the guardian api." -authors = [ "Airbyte ",] -license = "MIT" -readme = "README.md" -documentation = "https://docs.airbyte.com/integrations/sources/the-guardian-api" -homepage = "https://airbyte.com" -repository = "https://github.com/airbytehq/airbyte" -[[tool.poetry.packages]] -include = "source_the_guardian_api" - -[tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "0.60.0" - -[tool.poetry.scripts] -source-the-guardian-api = "source_the_guardian_api.run:run" - -[tool.poetry.group.dev.dependencies] -requests-mock = "^1.11.0" -pytest = "^8.0.0" -freezegun = "^1.4.0" -pytest-mock = "^3.6.1" diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/__init__.py b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/__init__.py deleted file mode 100644 index 21f857392e2a..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceTheGuardianApi - -__all__ = ["SourceTheGuardianApi"] diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/manifest.yaml b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/manifest.yaml deleted file mode 100644 index 15c90ae95a7d..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/manifest.yaml +++ /dev/null @@ -1,73 +0,0 @@ -version: "0.29.0" - -definitions: - selector: - extractor: - field_path: - - response - - results - requester: - url_base: "https://content.guardianapis.com" - http_method: "GET" - request_parameters: - api-key: "{{ config['api_key'] }}" - q: "{{ config['query'] }}" - tag: "{{ config['tag'] }}" - section: "{{ config['section'] }}" - order-by: "oldest" - # from-date: "{{ config['start_date'] }}" - # to-date: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" - incremental_sync: - type: DatetimeBasedCursor - start_datetime: - datetime: "{{ config['start_date'] }}" - datetime_format: "%Y-%m-%d" - end_datetime: - datetime: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" - datetime_format: "%Y-%m-%d" - step: "P7D" - datetime_format: "%Y-%m-%dT%H:%M:%SZ" - cursor_granularity: "PT1S" - cursor_field: "webPublicationDate" - start_time_option: - field_name: "from-date" - inject_into: "request_parameter" - end_time_option: - field_name: "to-date" - inject_into: "request_parameter" - - retriever: - record_selector: - $ref: "#/definitions/selector" - paginator: - type: "DefaultPaginator" - pagination_strategy: - class_name: "source_the_guardian_api.custom_page_strategy.CustomPageIncrement" - page_size: 10 - page_token_option: - type: RequestOption - inject_into: "request_parameter" - field_name: "page" - page_size_option: - inject_into: "body_data" - field_name: "page_size" - requester: - $ref: "#/definitions/requester" - base_stream: - incremental_sync: - $ref: "#/definitions/incremental_sync" - retriever: - $ref: "#/definitions/retriever" - content_stream: - $ref: "#/definitions/base_stream" - $parameters: - name: "content" - primary_key: "id" - path: "/search" - -streams: - - "#/definitions/content_stream" - -check: - stream_names: - - "content" diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/run.py b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/run.py deleted file mode 100644 index eafbdfd35301..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_the_guardian_api import SourceTheGuardianApi - - -def run(): - source = SourceTheGuardianApi() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/schemas/content.json b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/schemas/content.json deleted file mode 100644 index 3751793358c4..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/schemas/content.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "type": { - "type": "string" - }, - "sectionId": { - "type": "string" - }, - "sectionName": { - "type": "string" - }, - "webPublicationDate": { - "type": "string" - }, - "webTitle": { - "type": "string" - }, - "webUrl": { - "type": "string" - }, - "apiUrl": { - "type": "string" - }, - "isHosted": { - "type": "boolean" - }, - "pillarId": { - "type": "string" - }, - "pillarName": { - "type": "string" - } - }, - "required": [ - "id", - "type", - "sectionId", - "sectionName", - "webPublicationDate", - "webTitle", - "webUrl", - "apiUrl", - "isHosted", - "pillarId", - "pillarName" - ] -} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/source.py b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/source.py deleted file mode 100644 index 3d1ad28962e3..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/source.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource - -""" -This file provides the necessary constructs to interpret a provided declarative YAML configuration file into -source connector. - -WARNING: Do not modify this file. -""" - - -# Declarative Source -class SourceTheGuardianApi(YamlDeclarativeSource): - def __init__(self): - super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/spec.yaml b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/spec.yaml deleted file mode 100644 index b9e0e1f3a767..000000000000 --- a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/spec.yaml +++ /dev/null @@ -1,54 +0,0 @@ -documentationUrl: https://docs.airbyte.com/integrations/sources/the-guardian-api -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: The Guardian Api Spec - type: object - required: - - api_key - - start_date - additionalProperties: true - properties: - api_key: - title: API Key - type: string - description: Your API Key. See here. The key is case sensitive. - airbyte_secret: true - start_date: - title: Start Date - type: string - description: Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown. - pattern: ^([1-9][0-9]{3})\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])$ - examples: - - YYYY-MM-DD - query: - title: Query - type: string - description: (Optional) The query (q) parameter filters the results to only those that include that search term. The q parameter supports AND, OR and NOT operators. - examples: - - environment AND NOT water - - environment AND political - - amusement park - - political - tag: - title: Tag - type: string - description: (Optional) A tag is a piece of data that is used by The Guardian to categorise content. Use this parameter to filter results by showing only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation. - examples: - - environment/recycling - - environment/plasticbags - - environment/energyefficiency - section: - title: Section - type: string - description: (Optional) Use this to filter the results by a particular section. See here for a list of all sections, and here for the sections endpoint documentation. - examples: - - media - - technology - - housing-network - end_date: - title: End Date - type: string - description: (Optional) Use this to set the maximum date (YYYY-MM-DD) of the results. Results newer than the end_date will not be shown. Default is set to the current date (today) for incremental syncs. - pattern: ^([1-9][0-9]{3})\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])$ - examples: - - YYYY-MM-DD diff --git a/docs/integrations/sources/the-guardian-api.md b/docs/integrations/sources/the-guardian-api.md index 66ea22eee31a..26b69574f3a7 100644 --- a/docs/integrations/sources/the-guardian-api.md +++ b/docs/integrations/sources/the-guardian-api.md @@ -111,17 +111,18 @@ The key that you are assigned is rate-limited and as such any applications that
Expand to review -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :-------------------------------------------------------- | :--------------------------------------------- | -| 0.1.9 | 2024-08-31 | [44997](https://github.com/airbytehq/airbyte/pull/44997) | Update dependencies | -| 0.1.8 | 2024-08-24 | [44746](https://github.com/airbytehq/airbyte/pull/44746) | Update dependencies | -| 0.1.7 | 2024-08-17 | [44208](https://github.com/airbytehq/airbyte/pull/44208) | Update dependencies | -| 0.1.6 | 2024-08-10 | [43540](https://github.com/airbytehq/airbyte/pull/43540) | Update dependencies | -| 0.1.5 | 2024-08-03 | [42781](https://github.com/airbytehq/airbyte/pull/42781) | Update dependencies | -| 0.1.4 | 2024-07-20 | [42316](https://github.com/airbytehq/airbyte/pull/42316) | Update dependencies | -| 0.1.3 | 2024-07-13 | [41878](https://github.com/airbytehq/airbyte/pull/41878) | Update dependencies | -| 0.1.2 | 2024-07-10 | [41505](https://github.com/airbytehq/airbyte/pull/41505) | Update dependencies | -| 0.1.1 | 2024-07-10 | [41049](https://github.com/airbytehq/airbyte/pull/41049) | Migrate to poetry | -| 0.1.0 | 2022-10-30 | [#18654](https://github.com/airbytehq/airbyte/pull/18654) | 🎉 New Source: The Guardian API [low-code CDK] | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :------------------------------------------------------- | :--------------------------------------------- | +| 0.2.0 | 2024-09-06 | [45195](https://github.com/airbytehq/airbyte/pull/45195) | Refactor connector to manifest-only format | +| 0.1.9 | 2024-08-31 | [44997](https://github.com/airbytehq/airbyte/pull/44997) | Update dependencies | +| 0.1.8 | 2024-08-24 | [44746](https://github.com/airbytehq/airbyte/pull/44746) | Update dependencies | +| 0.1.7 | 2024-08-17 | [44208](https://github.com/airbytehq/airbyte/pull/44208) | Update dependencies | +| 0.1.6 | 2024-08-10 | [43540](https://github.com/airbytehq/airbyte/pull/43540) | Update dependencies | +| 0.1.5 | 2024-08-03 | [42781](https://github.com/airbytehq/airbyte/pull/42781) | Update dependencies | +| 0.1.4 | 2024-07-20 | [42316](https://github.com/airbytehq/airbyte/pull/42316) | Update dependencies | +| 0.1.3 | 2024-07-13 | [41878](https://github.com/airbytehq/airbyte/pull/41878) | Update dependencies | +| 0.1.2 | 2024-07-10 | [41505](https://github.com/airbytehq/airbyte/pull/41505) | Update dependencies | +| 0.1.1 | 2024-07-10 | [41049](https://github.com/airbytehq/airbyte/pull/41049) | Migrate to poetry | +| 0.1.0 | 2022-10-30 | [18654](https://github.com/airbytehq/airbyte/pull/18654) | 🎉 New Source: The Guardian API [low-code CDK] |
From 13168c060bfff3957f20a0df29a043119ff5f13e Mon Sep 17 00:00:00 2001 From: bgroff Date: Fri, 6 Sep 2024 18:23:42 +0000 Subject: [PATCH 50/51] Bump Airbyte version from 0.64.2 to 0.64.3 --- .bumpversion.cfg | 2 +- gradle.properties | 2 +- run-ab-platform.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 47fb05b997bc..9b9e0d3084ea 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.64.2 +current_version = 0.64.3 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/gradle.properties b/gradle.properties index a6eb6a999e1d..5f201c5723ca 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -VERSION=0.64.2 +VERSION=0.64.3 # NOTE: some of these values are overwritten in CI! # NOTE: if you want to override this for your local machine, set overrides in ~/.gradle/gradle.properties diff --git a/run-ab-platform.sh b/run-ab-platform.sh index bc2c6cabd508..7011294338dc 100755 --- a/run-ab-platform.sh +++ b/run-ab-platform.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -VERSION=0.64.2 +VERSION=0.64.3 # Run away from anything even a little scary set -o nounset # -u exit if a variable is not set set -o errexit # -f exit for any command failure" From f23ae56693c931e36028a61ea1b11ffdd25762e2 Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Fri, 6 Sep 2024 14:30:36 -0400 Subject: [PATCH 51/51] bug(airbyte-cdk) Fix extras not being printed (#45198) --- airbyte-cdk/python/airbyte_cdk/logger.py | 2 +- airbyte-cdk/python/unit_tests/test_logger.py | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/logger.py b/airbyte-cdk/python/airbyte_cdk/logger.py index 72673dcfa8f7..da3f869f2f32 100644 --- a/airbyte-cdk/python/airbyte_cdk/logger.py +++ b/airbyte-cdk/python/airbyte_cdk/logger.py @@ -53,7 +53,7 @@ class AirbyteLogFormatter(logging.Formatter): def format(self, record: logging.LogRecord) -> str: """Return a JSON representation of the log message""" airbyte_level = self.level_mapping.get(record.levelno, "INFO") - if airbyte_level == "DEBUG": + if airbyte_level == Level.DEBUG: extras = self.extract_extra_args_from_record(record) debug_dict = {"type": "DEBUG", "message": record.getMessage(), "data": extras} return filter_secrets(json.dumps(debug_dict)) diff --git a/airbyte-cdk/python/unit_tests/test_logger.py b/airbyte-cdk/python/unit_tests/test_logger.py index df44cd22e006..3b6db8b89232 100644 --- a/airbyte-cdk/python/unit_tests/test_logger.py +++ b/airbyte-cdk/python/unit_tests/test_logger.py @@ -51,12 +51,15 @@ def test_level_transform(logger, caplog): def test_debug(logger, caplog): # Test debug logger in isolation since the default logger is initialized to TRACE (15) instead of DEBUG (10). + formatter = AirbyteLogFormatter() debug_logger = logging.getLogger("airbyte.Debuglogger") debug_logger.setLevel(logging.DEBUG) - debug_logger.debug("Test debug 1") + debug_logger.debug("Test debug 1", extra={"extra_field": "extra value"}) record = caplog.records[0] - assert record.levelname == "DEBUG" - assert record.message == "Test debug 1" + formatted_record = json.loads(formatter.format(record)) + assert formatted_record["type"] == "DEBUG" + assert formatted_record["message"] == "Test debug 1" + assert formatted_record["data"]["extra_field"] == "extra value" def test_default_debug_is_ignored(logger, caplog):