Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Isort as a pre-commit hook #86

Merged
merged 3 commits into from
Feb 26, 2021
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,10 @@ repos:
- repo: https://github.com/pre-commit/mirrors-mypy.git
rev: v0.790
hooks:
- id: mypy
- id: mypy
- repo: https://github.com/pycqa/isort
rev: 5.7.0
hooks:
- id: isort
args: ["--profile", "black"]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

to resolve conflict between black and isort

PyCQA/isort#1518 (comment)


3 changes: 2 additions & 1 deletion forwarder/application_logger.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import logging
import graypy
from typing import Optional

import graypy

logger_name = "python-forwarder"


Expand Down
12 changes: 7 additions & 5 deletions forwarder/configuration_store.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
import time
from typing import Dict
from unittest import mock

from confluent_kafka import TopicPartition
from streaming_data_types.forwarder_config_update_rf5k import (
serialise_rf5k,
StreamInfo,
Protocol,
)
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import (
UpdateType,
)
from streaming_data_types.forwarder_config_update_rf5k import (
Protocol,
StreamInfo,
serialise_rf5k,
)

from forwarder.parse_config_update import EpicsProtocol


Expand Down
2 changes: 1 addition & 1 deletion forwarder/epics_to_serialisable_types.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
from caproto import ChannelType
from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus
from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity
from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus

# caproto can give us values of different dtypes even from the same EPICS channel,
# for example it will use the smallest integer type it can for the particular value,
Expand Down
18 changes: 11 additions & 7 deletions forwarder/handle_config_change.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
from forwarder.update_handlers.create_update_handler import create_update_handler
from forwarder.parse_config_update import CommandType, Channel, ConfigUpdate
from typing import Optional, Dict
import fnmatch
from logging import Logger
from forwarder.status_reporter import StatusReporter
from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore
from typing import Dict, Optional

from caproto.threading.client import Context as CaContext
from p4p.client.thread import Context as PvaContext

from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore
from forwarder.kafka.kafka_producer import KafkaProducer
from forwarder.update_handlers.create_update_handler import UpdateHandler
import fnmatch
from forwarder.parse_config_update import Channel, CommandType, ConfigUpdate
from forwarder.status_reporter import StatusReporter
from forwarder.update_handlers.create_update_handler import (
UpdateHandler,
create_update_handler,
)


def _subscribe_to_pv(
Expand Down
18 changes: 10 additions & 8 deletions forwarder/kafka/kafka_helpers.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
from confluent_kafka import Consumer
from .kafka_producer import KafkaProducer
from streaming_data_types.logdata_f142 import serialise_f142
from streaming_data_types.timestamps_tdct import serialise_tdct
from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus
from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity
import uuid
from typing import Dict, Optional, Tuple, Union

import numpy as np
from typing import Optional, Tuple, Dict, Union
from confluent_kafka import Consumer
from streaming_data_types.epics_connection_info_ep00 import serialise_ep00
from streaming_data_types.fbschemas.epics_connection_info_ep00.EventType import (
EventType as ConnectionStatusEventType,
)
from streaming_data_types.epics_connection_info_ep00 import serialise_ep00
from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity
from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus
from streaming_data_types.logdata_f142 import serialise_f142
from streaming_data_types.timestamps_tdct import serialise_tdct

from .kafka_producer import KafkaProducer


def create_producer(broker_address: str) -> KafkaProducer:
Expand Down
6 changes: 4 additions & 2 deletions forwarder/kafka/kafka_producer.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import confluent_kafka
from threading import Thread
from forwarder.application_logger import setup_logger
from typing import Optional

import confluent_kafka

from forwarder.application_logger import setup_logger


class KafkaProducer:
def __init__(self, configs: dict):
Expand Down
7 changes: 4 additions & 3 deletions forwarder/parse_commandline_args.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import configparser
import logging
import configargparse
from os import getpid
from socket import gethostname
import configparser
from pathlib import Path
from socket import gethostname

import configargparse


class VersionArgParser(configargparse.ArgumentParser):
Expand Down
20 changes: 11 additions & 9 deletions forwarder/parse_config_update.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,22 @@
from forwarder.application_logger import get_logger
import attr
from enum import Enum
from typing import Tuple, Generator, Optional, List
from typing import Generator, List, Optional, Tuple

import attr
from flatbuffers.packer import struct as flatbuffer_struct
from streaming_data_types.exceptions import WrongSchemaException
from streaming_data_types.forwarder_config_update_rf5k import (
deserialise_rf5k,
StreamInfo,
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import (
Protocol,
)
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import (
UpdateType,
)
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import (
Protocol,
from streaming_data_types.forwarder_config_update_rf5k import (
StreamInfo,
deserialise_rf5k,
)

from forwarder.application_logger import get_logger
from forwarder.update_handlers.schema_publishers import schema_publishers
from flatbuffers.packer import struct as flatbuffer_struct

logger = get_logger()

Expand Down
2 changes: 1 addition & 1 deletion forwarder/repeat_timer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from threading import Timer, Lock
from datetime import datetime, timedelta
from threading import Lock, Timer


def milliseconds_to_seconds(time_ms: int) -> float:
Expand Down
16 changes: 9 additions & 7 deletions forwarder/status_reporter.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds
from forwarder.kafka.kafka_producer import KafkaProducer
from typing import Dict
from streaming_data_types.status_x5f2 import serialise_x5f2
import json
import time
from socket import gethostname
from os import getpid
from logging import Logger
from forwarder.update_handlers.create_update_handler import UpdateHandler
from os import getpid
from socket import gethostname
from typing import Dict

from streaming_data_types.status_x5f2 import serialise_x5f2

from forwarder.kafka.kafka_producer import KafkaProducer
from forwarder.parse_config_update import Channel
from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds
from forwarder.update_handlers.create_update_handler import UpdateHandler


class StatusReporter:
Expand Down
26 changes: 14 additions & 12 deletions forwarder/update_handlers/ca_update_handler.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,25 @@
from forwarder.application_logger import get_logger
from forwarder.kafka.kafka_producer import KafkaProducer
from caproto import ReadNotifyResponse, ChannelType
import numpy as np
import time
from threading import Lock
from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds
from typing import Any, Optional, Tuple

import numpy as np
from caproto import ChannelType, ReadNotifyResponse
from caproto.threading.client import PV
from caproto.threading.client import Context as CAContext

from forwarder.application_logger import get_logger
from forwarder.epics_to_serialisable_types import (
numpy_type_from_caproto_type,
epics_alarm_severity_to_f142,
ca_alarm_status_to_f142,
epics_alarm_severity_to_f142,
numpy_type_from_caproto_type,
)
from caproto.threading.client import Context as CAContext
from caproto.threading.client import PV
from typing import Optional, Tuple, Any
from forwarder.update_handlers.schema_publishers import schema_publishers
import time
from forwarder.kafka.kafka_helpers import (
publish_connection_status_message,
seconds_to_nanoseconds,
)
from forwarder.kafka.kafka_producer import KafkaProducer
from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds
from forwarder.update_handlers.schema_publishers import schema_publishers


class CAUpdateHandler:
Expand Down
11 changes: 6 additions & 5 deletions forwarder/update_handlers/create_update_handler.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
from forwarder.parse_config_update import EpicsProtocol
from forwarder.parse_config_update import Channel as ConfigChannel
from forwarder.kafka.kafka_producer import KafkaProducer
from typing import Optional, Union

from caproto.threading.client import Context as CAContext
from p4p.client.thread import Context as PVAContext

from forwarder.kafka.kafka_producer import KafkaProducer
from forwarder.parse_config_update import Channel as ConfigChannel
from forwarder.parse_config_update import EpicsProtocol
from forwarder.update_handlers.ca_update_handler import CAUpdateHandler
from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler
from forwarder.update_handlers.fake_update_handler import FakeUpdateHandler

from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler

UpdateHandler = Union[CAUpdateHandler, PVAUpdateHandler, FakeUpdateHandler]

Expand Down
8 changes: 5 additions & 3 deletions forwarder/update_handlers/fake_update_handler.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from forwarder.kafka.kafka_producer import KafkaProducer
import time
from random import randint

import numpy as np

from forwarder.kafka.kafka_producer import KafkaProducer
from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds
import time
from forwarder.update_handlers.schema_publishers import schema_publishers
from random import randint


class FakeUpdateHandler:
Expand Down
25 changes: 14 additions & 11 deletions forwarder/update_handlers/pva_update_handler.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,27 @@
from p4p.client.thread import Context as PVAContext
import time
from threading import Lock
from typing import Any, Optional, Tuple, Union

import numpy as np
from p4p import Value
from forwarder.kafka.kafka_producer import KafkaProducer
from p4p.client.thread import Cancelled
from p4p.client.thread import Context as PVAContext
from p4p.client.thread import Disconnected, RemoteError
from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus

from forwarder.application_logger import get_logger
from typing import Optional, Tuple, Union, Any
from threading import Lock
from forwarder.update_handlers.schema_publishers import schema_publishers
from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds
from forwarder.epics_to_serialisable_types import (
numpy_type_from_p4p_type,
epics_alarm_severity_to_f142,
numpy_type_from_p4p_type,
pva_alarm_message_to_f142_alarm_status,
)
from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus
import numpy as np
from forwarder.kafka.kafka_helpers import (
publish_connection_status_message,
seconds_to_nanoseconds,
)
from p4p.client.thread import Cancelled, Disconnected, RemoteError
import time
from forwarder.kafka.kafka_producer import KafkaProducer
from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds
from forwarder.update_handlers.schema_publishers import schema_publishers


def _get_alarm_status(response):
Expand Down
4 changes: 2 additions & 2 deletions forwarder/update_handlers/schema_publishers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from forwarder.kafka.kafka_helpers import publish_f142_message, publish_tdct_message
from typing import Dict, Callable
from typing import Callable, Dict

from forwarder.kafka.kafka_helpers import publish_f142_message, publish_tdct_message

schema_publishers: Dict[str, Callable] = {
"f142": publish_f142_message,
Expand Down
16 changes: 7 additions & 9 deletions forwarder_launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,19 @@
from caproto.threading.client import Context as CaContext
from p4p.client.thread import Context as PvaContext

from forwarder.application_logger import setup_logger
from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore
from forwarder.handle_config_change import handle_configuration_change
from forwarder.kafka.kafka_helpers import (
create_producer,
create_consumer,
create_producer,
get_broker_and_topic_from_uri,
)
from forwarder.application_logger import setup_logger
from forwarder.parse_config_update import parse_config_update
from forwarder.status_reporter import StatusReporter
from forwarder.parse_commandline_args import get_version, parse_args
from forwarder.parse_config_update import Channel, parse_config_update
from forwarder.statistics_reporter import StatisticsReporter
from forwarder.parse_commandline_args import parse_args, get_version
from forwarder.handle_config_change import handle_configuration_change
from forwarder.status_reporter import StatusReporter
from forwarder.update_handlers.create_update_handler import UpdateHandler
from forwarder.parse_config_update import Channel
from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore


if __name__ == "__main__":
args = parse_args()
Expand Down
14 changes: 8 additions & 6 deletions manual_testing/add_ca_config.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from forwarder.kafka.kafka_helpers import create_producer
from streaming_data_types.forwarder_config_update_rf5k import serialise_rf5k, StreamInfo
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import (
UpdateType,
)
import time

from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import (
Protocol,
)
import time
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import (
UpdateType,
)
from streaming_data_types.forwarder_config_update_rf5k import StreamInfo, serialise_rf5k

from forwarder.kafka.kafka_helpers import create_producer

"""
"docker-compose up" first!
Expand Down
14 changes: 8 additions & 6 deletions manual_testing/add_fake_config.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from forwarder.kafka.kafka_helpers import create_producer
from streaming_data_types.forwarder_config_update_rf5k import serialise_rf5k, StreamInfo
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import (
UpdateType,
)
import time

from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import (
Protocol,
)
import time
from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import (
UpdateType,
)
from streaming_data_types.forwarder_config_update_rf5k import StreamInfo, serialise_rf5k

from forwarder.kafka.kafka_helpers import create_producer

"""
"docker-compose up" first!
Expand Down
Loading