From 415c9cec4789a55c7a6f5be372b0ad48be0b6cb6 Mon Sep 17 00:00:00 2001 From: "Bruno P. Kinoshita" Date: Wed, 7 Sep 2022 16:47:32 +1200 Subject: [PATCH 01/50] Fix a few typos --- aviso-server/admin/migration/etcd_migration.py | 2 +- aviso-server/admin/tests/store_size_cycle.py | 2 +- aviso-server/auth/aviso_auth/authorisation.py | 6 +++--- .../aviso_monitoring/reporter/aviso_auth_reporter.py | 2 +- .../aviso_monitoring/reporter/aviso_rest_reporter.py | 2 +- .../monitoring/aviso_monitoring/reporter/etcd_reporter.py | 2 +- .../aviso_monitoring/reporter/opsview_reporter.py | 2 +- docs/source/contributing/aviso_server.rst | 2 +- docs/source/reference/configuration.rst | 2 +- pyaviso/cli_aviso.py | 2 +- pyaviso/engine/etcd_rest_engine.py | 4 ++-- pyaviso/triggers/post_trigger.py | 2 +- tests/system/test_aviso.py | 6 +++--- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/aviso-server/admin/migration/etcd_migration.py b/aviso-server/admin/migration/etcd_migration.py index ab8f2d8..9b0b045 100644 --- a/aviso-server/admin/migration/etcd_migration.py +++ b/aviso-server/admin/migration/etcd_migration.py @@ -89,7 +89,7 @@ def pull_kvpairs(etcd_repo, revision): resp = requests.post(old_etcd_url, json=body) resp.raise_for_status() - print("Retrival completed") + print("Retrieval completed") # parse the result to return just key-value pairs new_kvs = [] diff --git a/aviso-server/admin/tests/store_size_cycle.py b/aviso-server/admin/tests/store_size_cycle.py index 7cc55cc..7e7b1eb 100644 --- a/aviso-server/admin/tests/store_size_cycle.py +++ b/aviso-server/admin/tests/store_size_cycle.py @@ -148,7 +148,7 @@ def defrag(): # print(f"Total number of keys deleted {tot}") # defrag # r = defrag() - # print(f"Deframentation: {r}") + # print(f"Defragmentation: {r}") print(store_size()) # print(f"Current store size {store_size()}") diff --git a/aviso-server/auth/aviso_auth/authorisation.py b/aviso-server/auth/aviso_auth/authorisation.py index cb322f0..27386d8 100644 --- a/aviso-server/auth/aviso_auth/authorisation.py +++ b/aviso-server/auth/aviso_auth/authorisation.py @@ -62,7 +62,7 @@ def _allowed_destinations_impl(self, username: str): :param username: :return: - the list of allowed destinations associated to this username if valid - - UserNotFoundException if the user is not registred in ECPDS + - UserNotFoundException if the user is not registered in ECPDS - AuthorisationUnavailableException if the ECPDS server is unreachable - InternalSystemError otherwise """ @@ -132,7 +132,7 @@ def is_authorised_impl(self, username: str, request): :return: - True if authorised - False if not authorised - - UserNotFoundException if the user is not registred in ECPDS + - UserNotFoundException if the user is not registered in ECPDS - AuthorisationUnavailableException if the ECPDS server is unreachable - InternalSystemError otherwise """ @@ -160,7 +160,7 @@ def _is_backend_key_allowed(self, username: str, backend_key: str): :return: - True if authorised - False if not authorised - - UserNotFoundException if the user is not registred in ECPDS + - UserNotFoundException if the user is not registered in ECPDS - AuthorisationUnavailableException if the ECPDS server is unreachable - InternalSystemError otherwise """ diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index ec7c39e..91c8c21 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -210,7 +210,7 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = OpsviewReporter.retrive_metrics([self.metric_server_url], self.req_timeout)[ + metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index 2d99ad8..e71cdb2 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -191,7 +191,7 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = OpsviewReporter.retrive_metrics([self.metric_server_url], self.req_timeout)[ + metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py index c2cc1e4..a0c79a2 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py @@ -33,7 +33,7 @@ def process_messages(self): logger.debug("Etcd processing metrics...") # fetch the raw tlms provided by etcd - raw_tlms = OpsviewReporter.retrive_metrics(self.member_urls, self.req_timeout) # noqa: F841 + raw_tlms = OpsviewReporter.retrieve_metrics(self.member_urls, self.req_timeout) # noqa: F841 # array of metric to return metrics = [] diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py index 08e9ec7..09ba543 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py @@ -199,7 +199,7 @@ def aggregate_unique_counter_tlms(tlms): } return agg_tlm - def retrive_metrics(metric_servers, req_timeout): + def retrieve_metrics(metric_servers, req_timeout): """ This methods retrieves the metrics provided by specific metric servers using a Prometheus interface. """ diff --git a/docs/source/contributing/aviso_server.rst b/docs/source/contributing/aviso_server.rst index 18bd03f..2e46fbe 100644 --- a/docs/source/contributing/aviso_server.rst +++ b/docs/source/contributing/aviso_server.rst @@ -17,7 +17,7 @@ The source of the components presented here is available in the ``aviso-server`` Key-Value store --------------- -The core component of Aviso Server is a Key-Value store. This is a critical component because it guarantees persistency and consistency for all the notifications processed. +The core component of Aviso Server is a Key-Value store. This is a critical component because it guarantees persistence and consistency for all the notifications processed. The current Key-Value store technology used is etcd_. This is a strongly consistent, distributed key-value store able to reach consensus thanks to Raft algorithm. This allows it to gracefully tolerate machine failure and network partition. Moreover, it is designed for high-throughput. We are running it in is default configuration of a cluster of 3 components. diff --git a/docs/source/reference/configuration.rst b/docs/source/reference/configuration.rst index 3db5573..05ada17 100644 --- a/docs/source/reference/configuration.rst +++ b/docs/source/reference/configuration.rst @@ -13,7 +13,7 @@ where each step merges on the previous one: 3. Loading Home config file `~/.aviso/config.yaml` 4. Loading config defined by environment variables, AVISO_CONFIG 5. Loading config defined by command line option, ``-c``, ``--config`` -6. Loading all environnement variables +6. Loading all environment variables 7. Loading all command line options System and Home config files are optional. diff --git a/pyaviso/cli_aviso.py b/pyaviso/cli_aviso.py index 7f933ad..dd0467f 100644 --- a/pyaviso/cli_aviso.py +++ b/pyaviso/cli_aviso.py @@ -169,7 +169,7 @@ def user_config_setup(f): @click.option("--key", "-k", help="File path to the key required to authenticate to the server.") @functools.wraps(f) def functor(*args, **kwargs): - # CLIK automatically sets the flags, put back None values like for the other parameters + # CLICK automatically sets the flags, put back None values like for the other parameters kwargs["debug"] = None if not kwargs["debug"] else True kwargs["quiet"] = None if not kwargs["quiet"] else True kwargs["no_fail"] = None if not kwargs["no_fail"] else True diff --git a/pyaviso/engine/etcd_rest_engine.py b/pyaviso/engine/etcd_rest_engine.py index efb05e9..3287676 100644 --- a/pyaviso/engine/etcd_rest_engine.py +++ b/pyaviso/engine/etcd_rest_engine.py @@ -120,7 +120,7 @@ def pull( f"Not able to pull key {key}, status {resp.status_code}, {resp.reason}, " f"{resp.content.decode()}" ) - # we got a good responce, exit from the loop + # we got a good response, exit from the loop break logger.debug(f"Query for {key} completed") @@ -304,7 +304,7 @@ def _latest_revision(self, key: str) -> int: f"{resp.content.decode()}" ) - # we got a good responce, exit from the loop + # we got a good response, exit from the loop break logger.debug("Query for latest revision completed") diff --git a/pyaviso/triggers/post_trigger.py b/pyaviso/triggers/post_trigger.py index a995d5c..d2cceed 100644 --- a/pyaviso/triggers/post_trigger.py +++ b/pyaviso/triggers/post_trigger.py @@ -117,7 +117,7 @@ def execute(self): class PostCloudEventsAws: """ - This class implements a trigger in charge of translating the notification in a CloudEvents messag and send it to a + This class implements a trigger in charge of translating the notification in a CloudEvents message and send it to a AWS topic specified by the user. """ diff --git a/tests/system/test_aviso.py b/tests/system/test_aviso.py index c416494..2a53105 100644 --- a/tests/system/test_aviso.py +++ b/tests/system/test_aviso.py @@ -120,7 +120,7 @@ def caplog_for_logger(caplog): # this is needed to assert over the logging outp lo.removeHandler(caplog.handler) -def reset_previuous_run(): +def reset_previous_run(): file_path = "tests/system/fixtures/received.txt" full_path = os.path.join(os.getcwd(), file_path) if os.path.exists(full_path): @@ -136,7 +136,7 @@ def test_command_listener(config): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) # delete previous file of notification - file_path = reset_previuous_run() + file_path = reset_previous_run() aviso._listen(config, ["tests/system/fixtures/listeners/command_listener.yaml"]) @@ -157,7 +157,7 @@ def test_command_listener(config): assert received # delete result of notification - file_path = reset_previuous_run() + file_path = reset_previous_run() @pytest.mark.parametrize("config", configs) From 19a8c67ca9fbaf6bbe2c5956f73cf82e58103276 Mon Sep 17 00:00:00 2001 From: sametd Date: Mon, 31 Jul 2023 17:34:57 +0200 Subject: [PATCH 02/50] updated requirements to fix github action issues --- requirements.txt | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 83fb9e5..ab40880 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,4 @@ parse pyinotify cloudevents>=1.2.0 boto3 - - - - +protobuf<=3.20.3 \ No newline at end of file From 81c13231979fb1632725fecfb934adde2dc68bde Mon Sep 17 00:00:00 2001 From: sametd Date: Mon, 31 Jul 2023 17:37:37 +0200 Subject: [PATCH 03/50] updated requirements to fix github action issues --- .github/workflows/check.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/check.yaml b/.github/workflows/check.yaml index e79e6ce..77bb4a3 100644 --- a/.github/workflows/check.yaml +++ b/.github/workflows/check.yaml @@ -23,8 +23,8 @@ jobs: strategy: fail-fast: false matrix: - platform: ["ubuntu-latest"] - python-version: ["3.6", "3.7", "3.8", "3.9"] + platform: ["ubuntu-20.04"] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] name: Python ${{ matrix.python-version }} on ${{ matrix.platform }} runs-on: ${{ matrix.platform }} From 91453b1215b5b129f57bafe458023a73d45874da Mon Sep 17 00:00:00 2001 From: sametd Date: Mon, 31 Jul 2023 17:41:43 +0200 Subject: [PATCH 04/50] black formatting --- aviso-server/admin/aviso_admin/config.py | 1 - aviso-server/auth/aviso_auth/authorisation.py | 1 - aviso-server/auth/aviso_auth/config.py | 1 - aviso-server/monitoring/aviso_monitoring/collector/config.py | 1 - .../monitoring/aviso_monitoring/collector/transmitter.py | 1 - aviso-server/monitoring/aviso_monitoring/config.py | 2 -- .../aviso_monitoring/reporter/aviso_auth_reporter.py | 1 - .../aviso_monitoring/reporter/aviso_rest_reporter.py | 1 - .../monitoring/aviso_monitoring/reporter/etcd_reporter.py | 1 - .../aviso_monitoring/reporter/prometheus_reporter.py | 1 - aviso-server/monitoring/tests/test_prometheus_reporter.py | 1 - aviso-server/rest/aviso_rest/config.py | 2 -- pyaviso/cli_aviso.py | 2 -- pyaviso/service_config_manager.py | 1 - pyaviso/triggers/post_trigger.py | 2 -- pyaviso/triggers/trigger_factory.py | 1 - pyaviso/user_config.py | 1 - tests/unit/test_etcd_engine.py | 2 -- tests/unit/test_triggers.py | 3 --- 19 files changed, 26 deletions(-) diff --git a/aviso-server/admin/aviso_admin/config.py b/aviso-server/admin/aviso_admin/config.py index 3e79a5f..897ce2a 100644 --- a/aviso-server/admin/aviso_admin/config.py +++ b/aviso-server/admin/aviso_admin/config.py @@ -158,7 +158,6 @@ def _read_env_variables(self): return config def logging_setup(self, logging_conf_path): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: diff --git a/aviso-server/auth/aviso_auth/authorisation.py b/aviso-server/auth/aviso_auth/authorisation.py index 27386d8..89692dd 100644 --- a/aviso-server/auth/aviso_auth/authorisation.py +++ b/aviso-server/auth/aviso_auth/authorisation.py @@ -170,7 +170,6 @@ def _is_backend_key_allowed(self, username: str, backend_key: str): # now check if we are accessing to a key space that is open only to authorised users elif len(list(filter(lambda x: backend_key.startswith(x), self.protected_keys))) > 0: - allowed_destinations = self._allowed_destinations(username) logger.debug(f"Destination allowed: {allowed_destinations}") diff --git a/aviso-server/auth/aviso_auth/config.py b/aviso-server/auth/aviso_auth/config.py index 4c28fff..3f8ed6f 100644 --- a/aviso-server/auth/aviso_auth/config.py +++ b/aviso-server/auth/aviso_auth/config.py @@ -200,7 +200,6 @@ def _read_env_variables(self) -> Dict[str, any]: return config def logging_setup(self, logging_conf_path: str): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: diff --git a/aviso-server/monitoring/aviso_monitoring/collector/config.py b/aviso-server/monitoring/aviso_monitoring/collector/config.py index 2584caa..84e4ce6 100644 --- a/aviso-server/monitoring/aviso_monitoring/collector/config.py +++ b/aviso-server/monitoring/aviso_monitoring/collector/config.py @@ -55,7 +55,6 @@ def __init__(self, transmitter=None, enabled=None, conf_from_file=None): @staticmethod def _create_default_config() -> Dict: - transmitter = { "monitoring_server_host": "127.0.0.1", "monitoring_server_port": 1111, diff --git a/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py b/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py index a8e1e19..e97aec8 100644 --- a/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py +++ b/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py @@ -58,7 +58,6 @@ def transmitter_cycle(self): # logger.debug("Telemetry transmitter cycle started") if len(self.tlm_buffer): # don't do anything if the buffer is empty - # read the event buffer and clear it tlms = self.tlm_buffer.copy() logger.debug(f"{len(tlms)} TLMs found in the buffer") diff --git a/aviso-server/monitoring/aviso_monitoring/config.py b/aviso-server/monitoring/aviso_monitoring/config.py index ea54dcc..cc9ae9b 100644 --- a/aviso-server/monitoring/aviso_monitoring/config.py +++ b/aviso-server/monitoring/aviso_monitoring/config.py @@ -28,7 +28,6 @@ def __init__( etcd_reporter=None, prometheus_reporter=None, ): - try: # we build the configuration in priority order from the lower to the higher # start from the defaults @@ -52,7 +51,6 @@ def __init__( @staticmethod def _create_default_config() -> Dict: - udp_server = {"host": "127.0.0.1", "port": 1111, "buffer_size": 64 * 1024} # this are the setting for sending the telemetry to a monitoring server like Opsview diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index 91c8c21..7a118f8 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -78,7 +78,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - # incoming tlms assert self.msg_receiver, "Msg receiver is None" new_tlms = self.msg_receiver.extract_incoming_tlms(self.metric_name) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index e71cdb2..da520a8 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -76,7 +76,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - logger.debug(f"Processing tlms {self.metric_name}...") assert self.msg_receiver, "Msg receiver is None" diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py index a0c79a2..6d6900b 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py @@ -89,7 +89,6 @@ class StoreSize(EtcdChecker): """ def metric(self): - # defaults status = 0 message = "Store size is nominal" diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py index d32b5ce..4cdde63 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py @@ -121,7 +121,6 @@ def __init__(self, tlm_type, *args, **kwargs): self.msg_receiver = kwargs["msg_receiver"] def metric(self): - logger.debug(f"Processing tlms {self.metric_name}...") assert self.msg_receiver, "Msg receiver is None" diff --git a/aviso-server/monitoring/tests/test_prometheus_reporter.py b/aviso-server/monitoring/tests/test_prometheus_reporter.py index d62fe11..bf61918 100644 --- a/aviso-server/monitoring/tests/test_prometheus_reporter.py +++ b/aviso-server/monitoring/tests/test_prometheus_reporter.py @@ -23,7 +23,6 @@ def receiver(): - counter_tlm1 = { "telemetry_type": counter_type, "component_name": "counter_comp", diff --git a/aviso-server/rest/aviso_rest/config.py b/aviso-server/rest/aviso_rest/config.py index 9e4d269..cea1459 100644 --- a/aviso-server/rest/aviso_rest/config.py +++ b/aviso-server/rest/aviso_rest/config.py @@ -87,7 +87,6 @@ def __init__( @staticmethod def _create_default_config() -> Dict: - # main config config = {} config["monitoring"] = None @@ -165,7 +164,6 @@ def _read_env_variables(self) -> Dict[str, any]: return config def logging_setup(self, logging_conf_path: str): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: diff --git a/pyaviso/cli_aviso.py b/pyaviso/cli_aviso.py index dd0467f..825d641 100644 --- a/pyaviso/cli_aviso.py +++ b/pyaviso/cli_aviso.py @@ -52,11 +52,9 @@ def catch_all_exceptions(cls, handler): """ class Cls(cls): - _original_args = None def make_context(self, info_name, args, parent=None, **extra): - # grab the original command line arguments self._original_args = " ".join(args) diff --git a/pyaviso/service_config_manager.py b/pyaviso/service_config_manager.py index 9a31964..d2cb93a 100644 --- a/pyaviso/service_config_manager.py +++ b/pyaviso/service_config_manager.py @@ -169,7 +169,6 @@ def pull_and_save(self, service: str, directory: str, delete: bool) -> List[str] pulled_files = [] pulled_files_tmp = [] if len(kvs) > 0: - # First check if we need to delete the existing folder if delete and os.path.exists(directory): try: diff --git a/pyaviso/triggers/post_trigger.py b/pyaviso/triggers/post_trigger.py index d2cceed..fc821cf 100644 --- a/pyaviso/triggers/post_trigger.py +++ b/pyaviso/triggers/post_trigger.py @@ -83,7 +83,6 @@ def __init__(self, notification: Dict, params: Dict): self.source = self.SOURCE_DEFAULT def execute(self): - # prepare the CloudEvents message attributes = { @@ -147,7 +146,6 @@ def __init__(self, notification: Dict, params: Dict): self.source = self.SOURCE_DEFAULT def execute(self): - # prepare the AWS topic message attributes = { diff --git a/pyaviso/triggers/trigger_factory.py b/pyaviso/triggers/trigger_factory.py index 2c688ad..ee3f197 100644 --- a/pyaviso/triggers/trigger_factory.py +++ b/pyaviso/triggers/trigger_factory.py @@ -18,7 +18,6 @@ class TriggerFactory: """ def create_trigger(self, notification: Dict[str, any], params: Dict[str, any]) -> Trigger: - assert "type" in params, "'type' is a mandatory field in trigger" # find specific trigger class trigger_type = TriggerType[params.get("type").lower()] diff --git a/pyaviso/user_config.py b/pyaviso/user_config.py index 38d9f98..3e3409b 100644 --- a/pyaviso/user_config.py +++ b/pyaviso/user_config.py @@ -337,7 +337,6 @@ def _read_env_variables(self) -> Dict[str, any]: return config def logging_setup(self, logging_conf_path: str): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: diff --git a/tests/unit/test_etcd_engine.py b/tests/unit/test_etcd_engine.py index 3331577..e2a6bde 100644 --- a/tests/unit/test_etcd_engine.py +++ b/tests/unit/test_etcd_engine.py @@ -429,7 +429,6 @@ def test_find_compacted_revision(engine): @pytest.mark.parametrize("engine", engines) def test_push_with_lease(engine): - # submit a key expiring kvs = [{"key": "test/test0", "value": "0"}] assert engine.push_with_status(kvs, base_key="test/", message="test/test0", ttl=1) @@ -448,7 +447,6 @@ def test_push_with_lease(engine): @pytest.mark.parametrize("engine", engines) def test_status_as_linked_list(engine): - status0 = {"date_time": "2020-08-28T10:58:17.829Z"} kv0 = {"mod_rev": "100", "value": json.dumps(status0).encode()} diff --git a/tests/unit/test_triggers.py b/tests/unit/test_triggers.py index fd981ad..ce67aea 100644 --- a/tests/unit/test_triggers.py +++ b/tests/unit/test_triggers.py @@ -53,7 +53,6 @@ def caplog_for_logger(caplog): # this is needed to assert over the logging outp def test_echo_trigger(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output - # open the listener yaml file with open("tests/unit/fixtures/good_listeners/echo_listener.yaml", "r") as f: listeners_dict = yaml.safe_load(f.read()) @@ -106,7 +105,6 @@ def trigger_function(notification): def test_logger_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output - # open the listener yaml file with open("tests/unit/fixtures/good_listeners/log_listener.yaml", "r") as f: listeners_dict = yaml.safe_load(f.read()) @@ -136,7 +134,6 @@ def test_logger_listener(conf, listener_factory, caplog): def test_command_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output - # open the listener yaml file with open("tests/unit/fixtures/good_listeners/command_listener.yaml", "r") as f: listeners_dict = yaml.safe_load(f.read()) From 9f19c6bfff5923e8aa2e0a322bde79e3b2619f9d Mon Sep 17 00:00:00 2001 From: sametd Date: Mon, 31 Jul 2023 18:02:23 +0200 Subject: [PATCH 05/50] type comparison is improved --- pyaviso/engine/etcd_grpc_engine.py | 2 +- pyaviso/engine/etcd_rest_engine.py | 2 +- pyaviso/event_listeners/listener_schema_parser.py | 4 ++-- tests/unit/test_auth.py | 6 +++--- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pyaviso/engine/etcd_grpc_engine.py b/pyaviso/engine/etcd_grpc_engine.py index afb83c2..2e94ded 100644 --- a/pyaviso/engine/etcd_grpc_engine.py +++ b/pyaviso/engine/etcd_grpc_engine.py @@ -33,7 +33,7 @@ def __init__(self, config: EngineConfig, auth: Auth): self._base_url = f"http://{self._host}:{self._port}/v3/" def _initialise_server(self): - if type(self.auth) == EtcdAuth: + if isinstance(self.auth, EtcdAuth): self._server = Etcd3Client( self.host, self.port, user=self.auth.username, password=self.auth.password, timeout=self.timeout ) diff --git a/pyaviso/engine/etcd_rest_engine.py b/pyaviso/engine/etcd_rest_engine.py index 3287676..32938e5 100644 --- a/pyaviso/engine/etcd_rest_engine.py +++ b/pyaviso/engine/etcd_rest_engine.py @@ -247,7 +247,7 @@ def _authenticate(self) -> bool: This method authenticates the user and set the internal token, this is only done for Etcd authentication :return: True if successfully authenticated """ - if type(self.auth) == EtcdAuth: + if isinstance(self.auth, EtcdAuth): logger.debug(f"Authenticating user {self.auth.username}...") url = self._base_url + "auth/authenticate" diff --git a/pyaviso/event_listeners/listener_schema_parser.py b/pyaviso/event_listeners/listener_schema_parser.py index 2fd29a1..7c03ac3 100644 --- a/pyaviso/event_listeners/listener_schema_parser.py +++ b/pyaviso/event_listeners/listener_schema_parser.py @@ -170,7 +170,7 @@ def _update_schema(self, mars_schema, evl_schema) -> Dict[str, any]: # search for all the enum keys in all event listener types and add the mars values to it logger.debug("Parsing mars language schema...") for e in evl_schema.items(): - if type(e[1]) == dict and e[1].get("request"): + if isinstance(e[1], dict) and e[1].get("request"): request = e[1].get("request") for k in request: for t in request[k]: @@ -180,7 +180,7 @@ def _update_schema(self, mars_schema, evl_schema) -> Dict[str, any]: # check the mars schema mars_enums = mars_schema["_field"][k]["values"] for me in mars_enums: - if type(me) == list: + if isinstance(me, list): for en in me: t["values"].append(en) else: diff --git a/tests/unit/test_auth.py b/tests/unit/test_auth.py index 5dd2575..2c34653 100644 --- a/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -39,9 +39,9 @@ def test_auth_type(conf): auth2 = auth.Auth.get_auth(conf) conf.auth_type = "ecmwf" auth3 = auth.Auth.get_auth(conf) - assert type(auth1) == none_auth.NoneAuth - assert type(auth2) == etcd_auth.EtcdAuth - assert type(auth3) == ecmwf_auth.EcmwfAuth + assert isinstance(auth1, none_auth.NoneAuth) + assert isinstance(auth2, etcd_auth.EtcdAuth) + assert isinstance(auth3, ecmwf_auth.EcmwfAuth) @pytest.mark.skip # we cannot authenticate in this test setup From 5beb09220297fe4df42e0fa507ae8e73dd7617f4 Mon Sep 17 00:00:00 2001 From: sametd Date: Sun, 30 Jul 2023 23:13:37 +0200 Subject: [PATCH 06/50] relative paths for 2 tests --- tests/unit/test_auth.py | 4 +++- tests/unit/test_cli.py | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_auth.py b/tests/unit/test_auth.py index 2c34653..5baa14d 100644 --- a/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -13,10 +13,12 @@ from pyaviso import logger, user_config from pyaviso.authentication import auth, ecmwf_auth, etcd_auth, none_auth from pyaviso.engine import engine_factory +from pathlib import Path def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py index 46548dc..62a0646 100644 --- a/tests/unit/test_cli.py +++ b/tests/unit/test_cli.py @@ -14,12 +14,14 @@ from pyaviso import logger, user_config from pyaviso.cli_aviso import cli, key, listen, notify, value from pyaviso.engine.engine_factory import EngineType +from pathlib import Path @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") - os.environ["AVISO_CONFIG"] = "tests/config.yaml" + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + os.environ["AVISO_CONFIG"] = str(Path(tests_path / "config.yaml")) return c From 7e6af9ac884225fd29fa1b4bde0911763439f232 Mon Sep 17 00:00:00 2001 From: sametd Date: Sun, 30 Jul 2023 23:34:09 +0200 Subject: [PATCH 07/50] relative path for etcd engine test --- tests/unit/test_etcd_engine.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_etcd_engine.py b/tests/unit/test_etcd_engine.py index e2a6bde..442bca7 100644 --- a/tests/unit/test_etcd_engine.py +++ b/tests/unit/test_etcd_engine.py @@ -23,17 +23,20 @@ from pyaviso.engine.etcd_engine import LOCAL_STATE_FOLDER from pyaviso.engine.etcd_grpc_engine import EtcdGrpcEngine from pyaviso.engine.etcd_rest_engine import EtcdRestEngine +from pathlib import Path def rest_engine(): # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = EtcdRestEngine(c.notification_engine, authenticator) return engine def grpc_engine(): # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = EtcdGrpcEngine(c.notification_engine, authenticator) return engine From e26868d01e7f55038d2f81dfd4a1f2855c460344 Mon Sep 17 00:00:00 2001 From: sametd Date: Sun, 30 Jul 2023 23:59:58 +0200 Subject: [PATCH 08/50] more relative path fixes --- tests/unit/test_event_listener_factory.py | 28 ++++++++++++---------- tests/unit/test_file_based_engine.py | 4 +++- tests/unit/test_listener_manager.py | 4 +++- tests/unit/test_triggers.py | 29 +++++++++++++---------- 4 files changed, 37 insertions(+), 28 deletions(-) diff --git a/tests/unit/test_event_listener_factory.py b/tests/unit/test_event_listener_factory.py index 5db45fd..490512d 100644 --- a/tests/unit/test_event_listener_factory.py +++ b/tests/unit/test_event_listener_factory.py @@ -8,6 +8,7 @@ import json import os +from pathlib import Path import pytest import yaml @@ -17,17 +18,18 @@ from pyaviso.engine import engine_factory as ef from pyaviso.event_listeners import event_listener_factory as elf +tests_path = Path(__file__).parent.parent @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) return c @pytest.fixture() def schema(conf): # Load test schema - with open("tests/unit/fixtures/listener_schema.json") as schema: + with Path(tests_path / "unit/fixtures/listener_schema.json").open() as schema: return json.load(schema) @@ -38,7 +40,7 @@ def test_empty_file(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/empty.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/empty.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -54,7 +56,7 @@ def test_no_listeners(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/noListeners.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/noListeners.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -70,7 +72,7 @@ def test_bad_tree_structure(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badTree.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTree.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -86,7 +88,7 @@ def test_bad_attribute(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badAttribute.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badAttribute.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -102,7 +104,7 @@ def test_bad_format(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badFormat.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badFormat.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -118,7 +120,7 @@ def test_no_trigger(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/noTrigger.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/noTrigger.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -134,7 +136,7 @@ def test_bad_trigger_type(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badTriggerType.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTriggerType.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -150,7 +152,7 @@ def test_bad_trigger(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badTrigger.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTrigger.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -166,7 +168,7 @@ def test_single_listener_complete(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/complete_flight_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/complete_flight_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -183,7 +185,7 @@ def test_single_listener(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/basic_flight_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/basic_flight_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -200,7 +202,7 @@ def test_multiple_listener(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/multiple_flight_listeners.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_flight_listeners.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) diff --git a/tests/unit/test_file_based_engine.py b/tests/unit/test_file_based_engine.py index 25d4292..b0a0e74 100644 --- a/tests/unit/test_file_based_engine.py +++ b/tests/unit/test_file_based_engine.py @@ -9,6 +9,7 @@ import os import time from shutil import rmtree +from pathlib import Path import pytest @@ -20,7 +21,8 @@ @pytest.fixture() def test_engine(): # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = FileBasedEngine(c.notification_engine, authenticator) return engine diff --git a/tests/unit/test_listener_manager.py b/tests/unit/test_listener_manager.py index 18af2e3..9e08d7a 100644 --- a/tests/unit/test_listener_manager.py +++ b/tests/unit/test_listener_manager.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest @@ -20,7 +21,8 @@ @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_triggers.py b/tests/unit/test_triggers.py index ce67aea..fffc3a5 100644 --- a/tests/unit/test_triggers.py +++ b/tests/unit/test_triggers.py @@ -9,6 +9,7 @@ import contextlib import logging import os +from pathlib import Path import time from threading import Thread @@ -22,10 +23,12 @@ from pyaviso.event_listeners import event_listener_factory as elf from pyaviso.event_listeners.listener_schema_parser import ListenerSchemaParser +tests_path = Path(__file__).parent.parent @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) return c @@ -54,7 +57,7 @@ def test_echo_trigger(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/echo_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -106,7 +109,7 @@ def test_logger_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/log_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/log_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -123,7 +126,7 @@ def test_logger_listener(conf, listener_factory, caplog): # check the trigger has logged the notification on the system log assert "Log Trigger completed" in caplog.text # check the trigger has logged the notification on the log specified - with open(listener.triggers[0].get("path"), "r") as f: + with open(listener.triggers[0].get("path")).open(mode='r') as f: assert "Notification received" in f.read() # clean up @@ -135,7 +138,7 @@ def test_command_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -156,7 +159,7 @@ def test_command_json_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_json_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_json_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -177,7 +180,7 @@ def test_command_json_path_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_json_path_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_json_path_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -210,7 +213,7 @@ def test_post_cloudEventshttp_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/post_cloudEventsHttp_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsHttp_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -238,7 +241,7 @@ def test_post_cloudeventsaws_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/post_cloudEventsAws_fifo_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsAws_fifo_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -265,7 +268,7 @@ def test_multiple_nots_echo(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/echo_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -288,7 +291,7 @@ def test_multiple_nots_cmd(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -311,7 +314,7 @@ def test_multiple_listeners(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/multiple_listeners.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_listeners.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -332,7 +335,7 @@ def test_multiple_triggers(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/multiple_triggers.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_triggers.yaml").open(mode='r') as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) From d53a5354908d8dc2743f339b634a2bc34a95370b Mon Sep 17 00:00:00 2001 From: sametd Date: Mon, 31 Jul 2023 00:45:53 +0200 Subject: [PATCH 09/50] more relative path fixes and monkey patches whenever necessary) --- tests/unit/test_triggers.py | 32 +++++++++++++++++++-------- tests/unit/test_user_config.py | 40 +++++++++++++++++++++++----------- tests/unit/test_validation.py | 2 +- 3 files changed, 51 insertions(+), 23 deletions(-) diff --git a/tests/unit/test_triggers.py b/tests/unit/test_triggers.py index fffc3a5..e2a86f0 100644 --- a/tests/unit/test_triggers.py +++ b/tests/unit/test_triggers.py @@ -25,6 +25,12 @@ tests_path = Path(__file__).parent.parent + +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent + + @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging tests_path = Path(__file__).parent.parent @@ -53,7 +59,8 @@ def caplog_for_logger(caplog): # this is needed to assert over the logging outp lo.removeHandler(caplog.handler) -def test_echo_trigger(conf, listener_factory, caplog): +def test_echo_trigger(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file @@ -79,7 +86,8 @@ def test_echo_trigger(conf, listener_factory, caplog): assert "Echo Trigger completed" in caplog.text -def test_function_trigger(conf, listener_factory, caplog): +def test_function_trigger(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) # create a list that increments every time there is a new event trigger_list = [] @@ -105,7 +113,8 @@ def trigger_function(notification): assert trigger_list.__len__() == 1 -def test_logger_listener(conf, listener_factory, caplog): +def test_logger_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file @@ -126,7 +135,7 @@ def test_logger_listener(conf, listener_factory, caplog): # check the trigger has logged the notification on the system log assert "Log Trigger completed" in caplog.text # check the trigger has logged the notification on the log specified - with open(listener.triggers[0].get("path")).open(mode='r') as f: + with open(listener.triggers[0].get("path"), 'r') as f: assert "Notification received" in f.read() # clean up @@ -134,7 +143,8 @@ def test_logger_listener(conf, listener_factory, caplog): os.remove("testLog.log") -def test_command_listener(conf, listener_factory, caplog): +def test_command_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file @@ -155,7 +165,8 @@ def test_command_listener(conf, listener_factory, caplog): assert "Command Trigger completed" in caplog.text -def test_command_json_listener(conf, listener_factory, caplog): +def test_command_json_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file @@ -176,7 +187,8 @@ def test_command_json_listener(conf, listener_factory, caplog): assert "Command Trigger completed" in caplog.text -def test_command_json_path_listener(conf, listener_factory, caplog): +def test_command_json_path_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file @@ -287,7 +299,8 @@ def test_multiple_nots_echo(conf, listener_factory, caplog): assert caplog.text.count("Echo Trigger completed") == n_nots -def test_multiple_nots_cmd(conf, listener_factory, caplog): +def test_multiple_nots_cmd(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file @@ -331,7 +344,8 @@ def test_multiple_listeners(conf, listener_factory, caplog): assert caplog.text.count("Echo Trigger completed") == 3 -def test_multiple_triggers(conf, listener_factory, caplog): +def test_multiple_triggers(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file diff --git a/tests/unit/test_user_config.py b/tests/unit/test_user_config.py index d92486b..7533e84 100644 --- a/tests/unit/test_user_config.py +++ b/tests/unit/test_user_config.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest @@ -16,7 +17,13 @@ from pyaviso.event_listeners.listener_schema_parser import ListenerSchemaParserType from pyaviso.user_config import KEY_FILE, UserConfig -test_config_folder = "tests/unit/fixtures/" + +tests_path = Path(__file__).parent.parent +test_config_folder = str(Path(tests_path / "unit/fixtures/")) + +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent @pytest.fixture(autouse=True) @@ -124,7 +131,8 @@ def clear_environment(): pass -def test_default(): +def test_default(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) c = UserConfig._create_default_config() assert not c["debug"] @@ -155,9 +163,10 @@ def test_default(): assert not c["remote_schema"] -def test_config_file(): +def test_config_file(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) - c = UserConfig(conf_path=test_config_folder + "config.yaml") + c = UserConfig(conf_path=test_config_folder + "/config.yaml") assert c.debug assert c.notification_engine.polling_interval == 1 assert c.notification_engine.type == EngineType.ETCD_GRPC @@ -185,9 +194,10 @@ def test_config_file(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_config_file_with_ev(): +def test_config_file_with_ev(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) - os.environ["AVISO_CONFIG"] = test_config_folder + "config.yaml" + os.environ["AVISO_CONFIG"] = test_config_folder + "/config.yaml" c = UserConfig() assert c.debug assert c.notification_engine.polling_interval == 1 @@ -216,7 +226,8 @@ def test_config_file_with_ev(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_env_variables(): +def test_env_variables(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) os.environ["AVISO_NOTIFICATION_HOST"] = "test_env" os.environ["AVISO_NOTIFICATION_PORT"] = "3" @@ -273,7 +284,8 @@ def test_env_variables(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_env_variables_with_config_file(): +def test_env_variables_with_config_file(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) os.environ["AVISO_NOTIFICATION_HOST"] = "test_env" os.environ["AVISO_NOTIFICATION_PORT"] = "3" @@ -300,7 +312,7 @@ def test_env_variables_with_config_file(): os.environ["AVISO_SCHEMA_PARSER"] = "generic" # create a config with the configuration file but the environment variables take priority - c = UserConfig(conf_path=test_config_folder + "config.yaml") + c = UserConfig(conf_path=test_config_folder + "/config.yaml") assert not c.debug assert c.notification_engine.polling_interval == 3 @@ -329,7 +341,8 @@ def test_env_variables_with_config_file(): assert c.schema_parser == ListenerSchemaParserType.GENERIC -def test_constructor(): +def test_constructor(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) # create a config with passing the configuration file as well as the parameters. The parameters will take priority notification_engine = { @@ -354,7 +367,7 @@ def test_constructor(): } c = UserConfig( - conf_path=test_config_folder + "config.yaml", + conf_path=test_config_folder + "/config.yaml", notification_engine=notification_engine, configuration_engine=configuration_engine, debug=False, @@ -394,7 +407,8 @@ def test_constructor(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_constructor_with_env_var(): +def test_constructor_with_env_var(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) os.environ["AVISO_NOTIFICATION_HOST"] = "test_env" os.environ["AVISO_NOTIFICATION_PORT"] = "3" @@ -444,7 +458,7 @@ def test_constructor_with_env_var(): } c = UserConfig( - conf_path=test_config_folder + "config.yaml", + conf_path=test_config_folder + "/config.yaml", notification_engine=notification_engine, configuration_engine=configuration_engine, debug=False, diff --git a/tests/unit/test_validation.py b/tests/unit/test_validation.py index 91ecdde..d88d5ff 100644 --- a/tests/unit/test_validation.py +++ b/tests/unit/test_validation.py @@ -28,7 +28,7 @@ def test_int_handler(): try: validator = IntHandler(key="test", **schema) except TypeError as e: - assert e.args[0] == "__init__() got an unexpected keyword argument 'error'" + assert e.args[0].split()[-3:] == "IntHandler.__init__() got an unexpected keyword argument 'error'".split()[-3:] schema = {"range": [0, 20], "canonic": "{0:0>4}"} From 20a9c30621f8894aa3268851e018cf775fd004f7 Mon Sep 17 00:00:00 2001 From: sametd Date: Mon, 31 Jul 2023 16:57:21 +0200 Subject: [PATCH 10/50] relative paths and monkey pathes for system tests --- tests/system/test_aviso.py | 21 +++++++++--- tests/system/test_aviso_config.py | 54 ++++++++++++++++++++++--------- 2 files changed, 54 insertions(+), 21 deletions(-) diff --git a/tests/system/test_aviso.py b/tests/system/test_aviso.py index 2a53105..b39ceb6 100644 --- a/tests/system/test_aviso.py +++ b/tests/system/test_aviso.py @@ -9,6 +9,7 @@ import contextlib import logging import os +from pathlib import Path import time from datetime import datetime from shutil import rmtree @@ -28,8 +29,14 @@ aviso = NotificationManager() +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent + + def create_conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) return c @@ -62,7 +69,8 @@ def clear_environment(): @pytest.mark.parametrize("config", configs) @pytest.fixture(autouse=True) # this runs before and after every test -def pre_post_test(config): +def pre_post_test(config, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) # delete the revision state full_home_path = os.path.expanduser(HOME_FOLDER) full_state_path = os.path.join(full_home_path, LOCAL_STATE_FOLDER) @@ -120,7 +128,8 @@ def caplog_for_logger(caplog): # this is needed to assert over the logging outp lo.removeHandler(caplog.handler) -def reset_previous_run(): +def reset_previous_run(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) file_path = "tests/system/fixtures/received.txt" full_path = os.path.join(os.getcwd(), file_path) if os.path.exists(full_path): @@ -132,7 +141,8 @@ def reset_previous_run(): @pytest.mark.parametrize("config", [c1, c2]) -def test_command_listener(config): +def test_command_listener(config, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) # delete previous file of notification @@ -250,7 +260,8 @@ def test_notify_ttl(config): @pytest.mark.skip @pytest.mark.parametrize("config", [c1]) -def test_history_on_server(config: user_config.UserConfig, caplog): +def test_history_on_server(config: user_config.UserConfig, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output config.notification_engine.host = "aviso.ecmwf.int" diff --git a/tests/system/test_aviso_config.py b/tests/system/test_aviso_config.py index eeb17c9..60a44f3 100644 --- a/tests/system/test_aviso_config.py +++ b/tests/system/test_aviso_config.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import time from filecmp import dircmp from shutil import rmtree @@ -24,8 +25,14 @@ config_folder_to_pull = "tests/system/fixtures/config_test_pull1" +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent + + def create_conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) return c @@ -39,7 +46,8 @@ def create_conf() -> user_config.UserConfig: # this automatically configure the @pytest.mark.parametrize("conf", confs) @pytest.fixture(autouse=True) # this runs before and after every test -def pre_post_test(conf): +def pre_post_test(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) # set environment os.environ["AVISO_CONFIG"] = "tests/config.yaml" yield @@ -55,7 +63,8 @@ def clear_environment(): @pytest.fixture(autouse=True) -def remove_test_svc(): +def remove_test_svc(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) yield result = CliRunner().invoke(cli, ["remove", test_svc, "-f"]) assert result.exit_code == 0 @@ -118,7 +127,8 @@ def test_help(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull(conf): +def test_push_and_pull(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["push", test_svc, "-D", config_folder_to_push1, "-m", "test configuration"]) @@ -149,13 +159,14 @@ def test_push_and_pull(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow1(conf): +def test_push_and_pull_workflow1(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set, then a small set with NO delete -> pulled folder is like the union of the larger and smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -193,12 +204,13 @@ def test_push_and_pull_workflow1(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow2(conf): +def test_push_and_pull_workflow2(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set, then a small set with delete -> pulled folder is like the smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -233,13 +245,14 @@ def test_push_and_pull_workflow2(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow3(conf): +def test_push_and_pull_workflow3(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set and pull, then push small set with delete and pull with NO delete -> pulled folder is like the union of larger and smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -288,13 +301,14 @@ def test_push_and_pull_workflow3(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow4(conf): +def test_push_and_pull_workflow4(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set and pull, then push small set with delete and pull with delete -> pulled folder is like the smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -338,7 +352,8 @@ def test_push_and_pull_workflow4(conf): @pytest.mark.parametrize("conf", confs) -def test_remove(conf): +def test_remove(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -371,7 +386,8 @@ def test_remove(conf): @pytest.mark.parametrize("conf", confs) -def test_remove_doit(conf): +def test_remove_doit(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -400,7 +416,8 @@ def test_remove_doit(conf): @pytest.mark.parametrize("conf", confs) -def test_revert(conf): +def test_revert(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -455,7 +472,8 @@ def test_revert(conf): @pytest.mark.parametrize("conf", confs) -def test_status(conf): +def test_status(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -470,7 +488,8 @@ def test_status(conf): @pytest.mark.parametrize("conf", confs) -def test_pull_nothing(conf): +def test_pull_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["pull", test_svc, "-D", config_folder_to_pull]) @@ -479,7 +498,8 @@ def test_pull_nothing(conf): @pytest.mark.parametrize("conf", confs) -def test_remove_nothing(conf): +def test_remove_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["remove", test_svc, "-f"]) @@ -488,7 +508,8 @@ def test_remove_nothing(conf): @pytest.mark.parametrize("conf", confs) -def test_revert_nothing(conf): +def test_revert_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["revert", test_svc]) @@ -497,7 +518,8 @@ def test_revert_nothing(conf): @pytest.mark.parametrize("conf", confs) -def test_status_nothing(conf): +def test_status_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["status", test_svc]) From fbaa4555d0e7e2c6dd9dafeede01d8467638e2f0 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Tue, 1 Aug 2023 12:57:14 +0200 Subject: [PATCH 11/50] code quaility fixes --- tests/system/test_aviso.py | 4 +-- tests/system/test_aviso_config.py | 4 +-- tests/unit/test_auth.py | 4 +-- tests/unit/test_cli.py | 4 +-- tests/unit/test_etcd_engine.py | 6 ++--- tests/unit/test_event_listener_factory.py | 25 ++++++++++--------- tests/unit/test_file_based_engine.py | 4 +-- tests/unit/test_listener_manager.py | 2 +- tests/unit/test_triggers.py | 30 ++++++++++++----------- tests/unit/test_user_config.py | 2 +- 10 files changed, 44 insertions(+), 41 deletions(-) diff --git a/tests/system/test_aviso.py b/tests/system/test_aviso.py index b39ceb6..888d094 100644 --- a/tests/system/test_aviso.py +++ b/tests/system/test_aviso.py @@ -9,9 +9,9 @@ import contextlib import logging import os -from pathlib import Path import time from datetime import datetime +from pathlib import Path from shutil import rmtree import pytest @@ -36,7 +36,7 @@ def base_path() -> Path: def create_conf() -> user_config.UserConfig: # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c diff --git a/tests/system/test_aviso_config.py b/tests/system/test_aviso_config.py index 60a44f3..d50218a 100644 --- a/tests/system/test_aviso_config.py +++ b/tests/system/test_aviso_config.py @@ -7,9 +7,9 @@ # nor does it submit to any jurisdiction. import os -from pathlib import Path import time from filecmp import dircmp +from pathlib import Path from shutil import rmtree import pytest @@ -32,7 +32,7 @@ def base_path() -> Path: def create_conf() -> user_config.UserConfig: # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_auth.py b/tests/unit/test_auth.py index 5baa14d..d523225 100644 --- a/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -7,18 +7,18 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest from pyaviso import logger, user_config from pyaviso.authentication import auth, ecmwf_auth, etcd_auth, none_auth from pyaviso.engine import engine_factory -from pathlib import Path def conf() -> user_config.UserConfig: # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py index 62a0646..0eba240 100644 --- a/tests/unit/test_cli.py +++ b/tests/unit/test_cli.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest from click.testing import CliRunner @@ -14,13 +15,12 @@ from pyaviso import logger, user_config from pyaviso.cli_aviso import cli, key, listen, notify, value from pyaviso.engine.engine_factory import EngineType -from pathlib import Path @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) os.environ["AVISO_CONFIG"] = str(Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_etcd_engine.py b/tests/unit/test_etcd_engine.py index 442bca7..4a585e5 100644 --- a/tests/unit/test_etcd_engine.py +++ b/tests/unit/test_etcd_engine.py @@ -13,6 +13,7 @@ import os import subprocess import time +from pathlib import Path from shutil import rmtree from threading import Thread @@ -23,12 +24,11 @@ from pyaviso.engine.etcd_engine import LOCAL_STATE_FOLDER from pyaviso.engine.etcd_grpc_engine import EtcdGrpcEngine from pyaviso.engine.etcd_rest_engine import EtcdRestEngine -from pathlib import Path def rest_engine(): # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = EtcdRestEngine(c.notification_engine, authenticator) return engine @@ -36,7 +36,7 @@ def rest_engine(): # this automatically configure the logging def grpc_engine(): # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = EtcdGrpcEngine(c.notification_engine, authenticator) return engine diff --git a/tests/unit/test_event_listener_factory.py b/tests/unit/test_event_listener_factory.py index 490512d..a4badb2 100644 --- a/tests/unit/test_event_listener_factory.py +++ b/tests/unit/test_event_listener_factory.py @@ -20,9 +20,10 @@ tests_path = Path(__file__).parent.parent + @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c @@ -40,7 +41,7 @@ def test_empty_file(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/empty.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/empty.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -56,7 +57,7 @@ def test_no_listeners(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/noListeners.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/noListeners.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -72,7 +73,7 @@ def test_bad_tree_structure(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/badTree.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTree.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -88,7 +89,7 @@ def test_bad_attribute(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/badAttribute.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badAttribute.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -104,7 +105,7 @@ def test_bad_format(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/badFormat.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badFormat.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -120,7 +121,7 @@ def test_no_trigger(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/noTrigger.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/noTrigger.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -136,7 +137,7 @@ def test_bad_trigger_type(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/badTriggerType.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTriggerType.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -152,7 +153,7 @@ def test_bad_trigger(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/bad_listeners/badTrigger.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTrigger.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -168,7 +169,7 @@ def test_single_listener_complete(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/complete_flight_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/complete_flight_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -185,7 +186,7 @@ def test_single_listener(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/basic_flight_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/basic_flight_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -202,7 +203,7 @@ def test_multiple_listener(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/multiple_flight_listeners.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_flight_listeners.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) diff --git a/tests/unit/test_file_based_engine.py b/tests/unit/test_file_based_engine.py index b0a0e74..6f94b50 100644 --- a/tests/unit/test_file_based_engine.py +++ b/tests/unit/test_file_based_engine.py @@ -8,8 +8,8 @@ import os import time -from shutil import rmtree from pathlib import Path +from shutil import rmtree import pytest @@ -22,7 +22,7 @@ @pytest.fixture() def test_engine(): # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = FileBasedEngine(c.notification_engine, authenticator) return engine diff --git a/tests/unit/test_listener_manager.py b/tests/unit/test_listener_manager.py index 9e08d7a..7052dd3 100644 --- a/tests/unit/test_listener_manager.py +++ b/tests/unit/test_listener_manager.py @@ -22,7 +22,7 @@ @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_triggers.py b/tests/unit/test_triggers.py index e2a86f0..7b24fa4 100644 --- a/tests/unit/test_triggers.py +++ b/tests/unit/test_triggers.py @@ -9,8 +9,8 @@ import contextlib import logging import os -from pathlib import Path import time +from pathlib import Path from threading import Thread import pytest @@ -34,7 +34,7 @@ def base_path() -> Path: @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging tests_path = Path(__file__).parent.parent - c = user_config.UserConfig(conf_path= Path(tests_path / "config.yaml")) + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c @@ -64,7 +64,7 @@ def test_echo_trigger(conf, listener_factory, caplog, monkeypatch: pytest.Monkey logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -118,7 +118,7 @@ def test_logger_listener(conf, listener_factory, caplog, monkeypatch: pytest.Mon logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/log_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/log_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -135,7 +135,7 @@ def test_logger_listener(conf, listener_factory, caplog, monkeypatch: pytest.Mon # check the trigger has logged the notification on the system log assert "Log Trigger completed" in caplog.text # check the trigger has logged the notification on the log specified - with open(listener.triggers[0].get("path"), 'r') as f: + with open(listener.triggers[0].get("path"), "r") as f: assert "Notification received" in f.read() # clean up @@ -148,7 +148,7 @@ def test_command_listener(conf, listener_factory, caplog, monkeypatch: pytest.Mo logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -170,7 +170,7 @@ def test_command_json_listener(conf, listener_factory, caplog, monkeypatch: pyte logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/command_json_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_json_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -192,7 +192,7 @@ def test_command_json_path_listener(conf, listener_factory, caplog, monkeypatch: logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/command_json_path_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_json_path_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -225,7 +225,7 @@ def test_post_cloudEventshttp_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsHttp_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsHttp_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -253,7 +253,9 @@ def test_post_cloudeventsaws_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsAws_fifo_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsAws_fifo_listener.yaml").open( + mode="r" + ) as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -280,7 +282,7 @@ def test_multiple_nots_echo(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -304,7 +306,7 @@ def test_multiple_nots_cmd(conf, listener_factory, caplog, monkeypatch: pytest.M logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -327,7 +329,7 @@ def test_multiple_listeners(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/multiple_listeners.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_listeners.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -349,7 +351,7 @@ def test_multiple_triggers(conf, listener_factory, caplog, monkeypatch: pytest.M logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with Path(tests_path / "unit/fixtures/good_listeners/multiple_triggers.yaml").open(mode='r') as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_triggers.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) diff --git a/tests/unit/test_user_config.py b/tests/unit/test_user_config.py index 7533e84..aa433e5 100644 --- a/tests/unit/test_user_config.py +++ b/tests/unit/test_user_config.py @@ -17,10 +17,10 @@ from pyaviso.event_listeners.listener_schema_parser import ListenerSchemaParserType from pyaviso.user_config import KEY_FILE, UserConfig - tests_path = Path(__file__).parent.parent test_config_folder = str(Path(tests_path / "unit/fixtures/")) + def base_path() -> Path: """Get the current folder of the test""" return Path(__file__).parent.parent.parent From 3f095e068b419c5e6ddca4b8b8c06e86cda5fa66 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Tue, 1 Aug 2023 16:25:39 +0200 Subject: [PATCH 12/50] relative path fixes for aviso-server --- aviso-server/admin/tests/store_size_cycle.py | 8 ++- aviso-server/admin/tests/test_cleaner.py | 49 +++++++++++++++---- aviso-server/admin/tests/test_compactor.py | 4 +- aviso-server/rest/tests/test_rest_frontend.py | 4 +- 4 files changed, 51 insertions(+), 14 deletions(-) diff --git a/aviso-server/admin/tests/store_size_cycle.py b/aviso-server/admin/tests/store_size_cycle.py index 7e7b1eb..4e67b12 100644 --- a/aviso-server/admin/tests/store_size_cycle.py +++ b/aviso-server/admin/tests/store_size_cycle.py @@ -8,6 +8,7 @@ import datetime import queue +from pathlib import Path import requests from aviso_admin import config @@ -35,7 +36,8 @@ def conf() -> config.Config: # this automatically configure the logging - c = config.Config(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent + c = config.Config(conf_path=Path(tests_path / "config.yaml")) return c @@ -68,7 +70,9 @@ def delete_destination_keys(date): def store_size(): reporter = EtcdReporter(conf().monitoring) checker = StoreSize( - EtcdMetricType.etcd_store_size, member_urls=reporter.member_urls, raw_tlms=reporter.retrive_raw_tlms() + EtcdMetricType.etcd_store_size, + member_urls=reporter.member_urls, + raw_tlms=reporter.retrive_raw_tlms(), ) size = checker.max_store_size("etcd_mvcc_db_total_size_in_use_in_bytes") return size diff --git a/aviso-server/admin/tests/test_cleaner.py b/aviso-server/admin/tests/test_cleaner.py index 41e029f..453405e 100644 --- a/aviso-server/admin/tests/test_cleaner.py +++ b/aviso-server/admin/tests/test_cleaner.py @@ -8,6 +8,7 @@ import datetime import os +from pathlib import Path import requests from aviso_admin import config, logger @@ -16,7 +17,8 @@ def conf() -> config.Config: # this automatically configure the logging - c = config.Config(conf_path="aviso-server/admin/tests/config.yaml") + tests_path = Path(__file__).parent + c = config.Config(conf_path=Path(tests_path / "config.yaml")) return c @@ -69,9 +71,18 @@ def test_delete_dissemination_keys(): date = datetime.datetime.now() - datetime.timedelta(days=1) # first put a few dissemination keys - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc") + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc", + ) # delete dissemination keys n_deleted = cleaner.delete_keys(date, "EC1") @@ -86,9 +97,18 @@ def test_delete_mars_keys(): date = datetime.datetime.now() - datetime.timedelta(days=1) # first put a few MARS keys - put_key(config["url"], config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/aaaa") - put_key(config["url"], config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/bbbb") - put_key(config["url"], config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/cccc") + put_key( + config["url"], + config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/aaaa", + ) + put_key( + config["url"], + config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/bbbb", + ) + put_key( + config["url"], + config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/cccc", + ) # delete MARS keys n_deleted = cleaner.delete_keys(date) @@ -109,9 +129,18 @@ def test_run_cleaner(): # first put a few destinations and keys prefix = config["dest_path"] + date.strftime(DATE_FORMAT) + "/" put_key(config["url"], prefix + "EC1") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc") + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc", + ) # run the whole workflow cleaner.run() diff --git a/aviso-server/admin/tests/test_compactor.py b/aviso-server/admin/tests/test_compactor.py index 25ed8e4..12f3869 100644 --- a/aviso-server/admin/tests/test_compactor.py +++ b/aviso-server/admin/tests/test_compactor.py @@ -10,6 +10,7 @@ import random import time from datetime import datetime +from pathlib import Path import pytest import requests @@ -19,7 +20,8 @@ def conf() -> config.Config: # this automatically configure the logging - c = config.Config(conf_path="aviso-server/admin/tests/config.yaml") + tests_path = Path(__file__).parent + c = config.Config(conf_path=Path(tests_path / "config.yaml")) return c diff --git a/aviso-server/rest/tests/test_rest_frontend.py b/aviso-server/rest/tests/test_rest_frontend.py index b574488..a89ec24 100644 --- a/aviso-server/rest/tests/test_rest_frontend.py +++ b/aviso-server/rest/tests/test_rest_frontend.py @@ -10,6 +10,7 @@ import os import threading import time +from pathlib import Path import pytest import requests @@ -20,7 +21,8 @@ from pyaviso.cli_aviso import _parse_inline_params from pyaviso.notification_manager import NotificationManager -config = Config(conf_path="aviso-server/rest/tests/config.yaml") +tests_path = Path(__file__).parent +config = Config(conf_path=Path(tests_path / "config.yaml")) frontend_url_home = f"http://{config.host}:{config.port}" frontend_url_api = f"{frontend_url_home}/api/v1" From 8e5ffde87ae3e41e62a6c656ff452de031790d22 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 13:04:52 +0200 Subject: [PATCH 13/50] migration address cleanup --- aviso-server/admin/migration/etcd_migration.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/aviso-server/admin/migration/etcd_migration.py b/aviso-server/admin/migration/etcd_migration.py index 9b0b045..326f03f 100644 --- a/aviso-server/admin/migration/etcd_migration.py +++ b/aviso-server/admin/migration/etcd_migration.py @@ -11,10 +11,10 @@ import requests -old_etcd = "http://k8s-dataservices-master.ecmwf.int:30000" -new_etcd = "http://localhost:2379" -from_revision = 145679891 -MAX_KV_RETURNED = 10000 +old_etcd: str = "__OLD_ETCD_ADDRESS__:PORT" +new_etcd: str = "__NEW_ETCD_ADDRESS__:PORT" +from_revision: int = 145679891 +MAX_KV_RETURNED: int = 10000 def push_kvpairs(etcd_repo, kvs): From 272a539fe83c73de7148e60a298499f1f6ac5545 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 13:05:30 +0200 Subject: [PATCH 14/50] macos is added to github actions --- .github/workflows/check.yaml | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/workflows/check.yaml b/.github/workflows/check.yaml index 77bb4a3..5969e1c 100644 --- a/.github/workflows/check.yaml +++ b/.github/workflows/check.yaml @@ -4,6 +4,7 @@ on: workflow_dispatch: push: + pull_request: branches: [develop] jobs: @@ -23,7 +24,7 @@ jobs: strategy: fail-fast: false matrix: - platform: ["ubuntu-20.04"] + platform: ["ubuntu-20.04","macos-13"] python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] name: Python ${{ matrix.python-version }} on ${{ matrix.platform }} @@ -36,7 +37,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install etcd + - name: Install etcd for linux run: | ETCD_VER=v3.4.14 DOWNLOAD_URL=https://github.com/etcd-io/etcd/releases/download @@ -44,6 +45,18 @@ jobs: mkdir /tmp/etcd-download-test tar xzvf /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz -C /tmp/etcd-download-test --strip-components=1 /tmp/etcd-download-test/etcd & + if: runner.os == 'Linux' + + - name: Install etcd for macos + run: | + ETCD_VER=v3.4.14 + DOWNLOAD_URL=https://github.com/etcd-io/etcd/releases/download + curl -L ${DOWNLOAD_URL}/${ETCD_VER}/etcd-${ETCD_VER}-darwin-amd64.zip -o /tmp/etcd-${ETCD_VER}-darwin-amd64.zip + mkdir /tmp/etcd-download-test + unzip /tmp/etcd-${ETCD_VER}-darwin-amd64.zip -d /tmp + mv /tmp/etcd-${ETCD_VER}-darwin-amd64/* /tmp/etcd-download-test + /tmp/etcd-download-test/etcd & + if: runner.os == 'macOS' - name: Install packages run: | From 0bf3651e6a7f942aff124a5ea2e4326202fc8633 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 13:15:48 +0200 Subject: [PATCH 15/50] whatchdog is implemented instead of pyinotify --- pyaviso/engine/file_based_engine.py | 48 ++++++++++++++--------------- requirements.txt | 2 +- 2 files changed, 24 insertions(+), 26 deletions(-) diff --git a/pyaviso/engine/file_based_engine.py b/pyaviso/engine/file_based_engine.py index 8c475c5..88e603f 100644 --- a/pyaviso/engine/file_based_engine.py +++ b/pyaviso/engine/file_based_engine.py @@ -8,14 +8,14 @@ import _thread import os -import threading import time from datetime import datetime from queue import Queue from shutil import rmtree from typing import Dict, List -import pyinotify +from watchdog.observers import Observer +from watchdog.events import FileSystemEventHandler from .. import logger from ..authentication.auth import Auth @@ -214,19 +214,17 @@ def _polling( logger.debug("", exc_info=True) return False - # create a watch manager - wm = pyinotify.WatchManager() # Watch Manager - mask = pyinotify.IN_CLOSE_WRITE # watched events - # define a class to handle the new events - class EventHandler(pyinotify.ProcessEvent): - def __init__(self, engine): - super(EventHandler, self).__init__() + class WatchdogHandler(FileSystemEventHandler): + def __init__(self, engine, key, callback): + super().__init__() self._engine = engine + self._key = key + self._callback = callback - def process_IN_CLOSE_WRITE(self, event): - if not os.path.isdir(event.pathname): - kvs = self._engine.pull(key=event.pathname) + def on_modified(self, event): + if not event.is_directory and event.src_path.endswith(self._key): + kvs = self._engine.pull(key=event.src_path) for kv in kvs: k = kv["key"] v = kv["value"].decode() @@ -236,30 +234,30 @@ def process_IN_CLOSE_WRITE(self, event): logger.debug(f"Notification received for key {k}") try: # execute the trigger - callback(k, v) + self._callback(k, v) except Exception as ee: logger.error(f"Error with notification trigger, exception: {type(ee)} {ee}") logger.debug("", exc_info=True) - # add the handler to the watch manager and define the watching task - handler = EventHandler(engine=self) - notifier = pyinotify.Notifier(wm, handler, read_freq=self._polling_interval) - wm.add_watch(key, mask, rec=True, auto_add=True) + # define the event handler + event_handler = WatchdogHandler(engine=self, key=key, callback=callback) - # encapsulate the watcher in a daemon thread so we can stop it - def t_run(): - notifier.loop() + # create an observer and schedule the event handler + observer = Observer() + observer.schedule(event_handler, path=key, recursive=True) - t = threading.Thread(target=t_run) - t.setDaemon(True) - t.start() + # start the observer in a daemon thread so we can stop it + observer.start() # this is the stop condition while key in self._listeners: time.sleep(0.1) - notifier.stop() + + # stop the observer + observer.stop() + observer.join() except Exception as e: - logger.error(f"Error while listening to key {key}, {e}") + logger.error(f"Error occurred during polling: {e}") logger.debug("", exc_info=True) _thread.interrupt_main() diff --git a/requirements.txt b/requirements.txt index ab40880..e51ccd5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ PyYAML python-json-logger requests parse -pyinotify +watchdog cloudevents>=1.2.0 boto3 protobuf<=3.20.3 \ No newline at end of file From a74f68506e2b0aa5da58fc91ff4a7bdabcc0df99 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 13:22:57 +0200 Subject: [PATCH 16/50] watchdog is added instead of pyinotify --- aviso-server/rest/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aviso-server/rest/requirements.txt b/aviso-server/rest/requirements.txt index 69ab6cb..1cef525 100644 --- a/aviso-server/rest/requirements.txt +++ b/aviso-server/rest/requirements.txt @@ -4,7 +4,7 @@ PyYAML>=5.1.2 python-json-logger>=0.1.11 parse>=1.12.1 requests>=2.23.0 -pyinotify>=0.9.6 +watchdog gunicorn>=20.0.4 flask>=1.1.2 cloudevents>=1.2.0 From bae2602261b951c352e7b517306ed51501329fb1 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 13:23:23 +0200 Subject: [PATCH 17/50] isort for imports --- pyaviso/engine/file_based_engine.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/pyaviso/engine/file_based_engine.py b/pyaviso/engine/file_based_engine.py index 88e603f..354a1fa 100644 --- a/pyaviso/engine/file_based_engine.py +++ b/pyaviso/engine/file_based_engine.py @@ -14,8 +14,8 @@ from shutil import rmtree from typing import Dict, List -from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler +from watchdog.observers import Observer from .. import logger from ..authentication.auth import Auth @@ -86,7 +86,9 @@ def read_key(k): for fp in x[2]: # any file kk: str = os.path.join(x[0], fp) read_key(kk) - if not prefix: # we only look at the current directory, nothing deeper + if ( + not prefix + ): # we only look at the current directory, nothing deeper break else: read_key(key) @@ -131,7 +133,9 @@ def delete(self, key: str, prefix: bool = True) -> List[Dict[str, bytes]]: return del_kvs - def push(self, kvs: List[Dict[str, any]], ks_delete: List[str] = None, ttl: int = None) -> bool: + def push( + self, kvs: List[Dict[str, any]], ks_delete: List[str] = None, ttl: int = None + ) -> bool: """ Method to submit a list of key-value pairs and delete a list of keys from the server as a single transaction :param kvs: List of KV pair @@ -236,7 +240,9 @@ def on_modified(self, event): # execute the trigger self._callback(k, v) except Exception as ee: - logger.error(f"Error with notification trigger, exception: {type(ee)} {ee}") + logger.error( + f"Error with notification trigger, exception: {type(ee)} {ee}" + ) logger.debug("", exc_info=True) # define the event handler From 1405aa767f0bf67edd69420445ae46ad635e29fc Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 13:23:50 +0200 Subject: [PATCH 18/50] removed unnecessary blank lines --- tests/requirements-dev.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/requirements-dev.txt b/tests/requirements-dev.txt index 57e38af..4d9461e 100644 --- a/tests/requirements-dev.txt +++ b/tests/requirements-dev.txt @@ -6,5 +6,4 @@ debugpy black isort flake8 -tox - +tox \ No newline at end of file From 69bfc0700243ccda73df47abc231e075709c2ac0 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 13:30:40 +0200 Subject: [PATCH 19/50] black formatter for file-based engine --- pyaviso/engine/file_based_engine.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/pyaviso/engine/file_based_engine.py b/pyaviso/engine/file_based_engine.py index 354a1fa..85664fa 100644 --- a/pyaviso/engine/file_based_engine.py +++ b/pyaviso/engine/file_based_engine.py @@ -86,9 +86,7 @@ def read_key(k): for fp in x[2]: # any file kk: str = os.path.join(x[0], fp) read_key(kk) - if ( - not prefix - ): # we only look at the current directory, nothing deeper + if not prefix: # we only look at the current directory, nothing deeper break else: read_key(key) @@ -133,9 +131,7 @@ def delete(self, key: str, prefix: bool = True) -> List[Dict[str, bytes]]: return del_kvs - def push( - self, kvs: List[Dict[str, any]], ks_delete: List[str] = None, ttl: int = None - ) -> bool: + def push(self, kvs: List[Dict[str, any]], ks_delete: List[str] = None, ttl: int = None) -> bool: """ Method to submit a list of key-value pairs and delete a list of keys from the server as a single transaction :param kvs: List of KV pair @@ -240,9 +236,7 @@ def on_modified(self, event): # execute the trigger self._callback(k, v) except Exception as ee: - logger.error( - f"Error with notification trigger, exception: {type(ee)} {ee}" - ) + logger.error(f"Error with notification trigger, exception: {type(ee)} {ee}") logger.debug("", exc_info=True) # define the event handler From 5cb36c7f39d5798dd789565cfb2322796c2015e2 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Wed, 2 Aug 2023 18:11:35 +0200 Subject: [PATCH 20/50] version for watchdog is set to 2.3.1 to keep python 3.6 support --- aviso-server/rest/requirements.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aviso-server/rest/requirements.txt b/aviso-server/rest/requirements.txt index 1cef525..5c54d14 100644 --- a/aviso-server/rest/requirements.txt +++ b/aviso-server/rest/requirements.txt @@ -4,7 +4,7 @@ PyYAML>=5.1.2 python-json-logger>=0.1.11 parse>=1.12.1 requests>=2.23.0 -watchdog +watchdog==2.3.1 gunicorn>=20.0.4 flask>=1.1.2 cloudevents>=1.2.0 diff --git a/requirements.txt b/requirements.txt index e51ccd5..8aa3d72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ PyYAML python-json-logger requests parse -watchdog +watchdog==2.3.1 cloudevents>=1.2.0 boto3 protobuf<=3.20.3 \ No newline at end of file From f36a1ef89c23b863528cacbb19789ae05582ee2e Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 21:24:21 +0100 Subject: [PATCH 21/50] monkey patching is not necessary here --- tests/system/test_aviso.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/system/test_aviso.py b/tests/system/test_aviso.py index 888d094..c71ff1f 100644 --- a/tests/system/test_aviso.py +++ b/tests/system/test_aviso.py @@ -128,8 +128,7 @@ def caplog_for_logger(caplog): # this is needed to assert over the logging outp lo.removeHandler(caplog.handler) -def reset_previous_run(monkeypatch: pytest.MonkeyPatch): - monkeypatch.chdir(base_path()) +def reset_previous_run(): file_path = "tests/system/fixtures/received.txt" full_path = os.path.join(os.getcwd(), file_path) if os.path.exists(full_path): From afb8b8e2ed4883ee50b59b7d9d2812c0bbf47520 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 21:24:48 +0100 Subject: [PATCH 22/50] venv folder is added to gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 1dfea94..30b75ef 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,5 @@ aviso-server/rest/aviso_rest.egg-info/* aviso-server/auth/aviso_auth.egg-info/* aviso-server/monitoring/aviso_monitoring.egg-info/* docs/build/* +venv +.venv From cff34c72c8b407f9eb894c348987409013c95e68 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 21:31:19 +0100 Subject: [PATCH 23/50] tox.ini is refined and venv added to exclude --- tox.ini | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tox.ini b/tox.ini index 93f7fbb..2c84211 100644 --- a/tox.ini +++ b/tox.ini @@ -1,17 +1,17 @@ [flake8] max-line-length = 120 show-source = true -exclude = .* +exclude = .*,venv extend-ignore = E203 [isort] profile=black -skip_glob=.* +skip = venv +skip_glob = .* [tox] envlist = py36, quality [testenv] -deps = - pytest -passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY +deps = pytest +passenv = http_proxy,HTTP_PROXY,https_proxy,HTTPS_PROXY,no_proxy,NO_PROXY commands = pip install -e aviso-server/monitoring pip install -e aviso-server/rest @@ -26,5 +26,5 @@ deps = flake8 commands = isort --check . - black --check . + black --check --exclude /venv/ --exclude .* . flake8 . \ No newline at end of file From ce51a6a25205740104d64960d9822768fa360505 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 23:38:48 +0100 Subject: [PATCH 24/50] polling logic is added to watchdog --- pyaviso/engine/file_based_engine.py | 50 ++++++++++++++++------------- 1 file changed, 27 insertions(+), 23 deletions(-) diff --git a/pyaviso/engine/file_based_engine.py b/pyaviso/engine/file_based_engine.py index 85664fa..5a7cba5 100644 --- a/pyaviso/engine/file_based_engine.py +++ b/pyaviso/engine/file_based_engine.py @@ -8,6 +8,7 @@ import _thread import os +import threading import time from datetime import datetime from queue import Queue @@ -192,7 +193,7 @@ def _polling( to_date: datetime = None, ): """ - This method implements the active polling + This method implements the active polling using watchdog :param key: key to watch as a prefix :param callback: function to call if any change happen :param channel: global communication channel among threads @@ -204,8 +205,9 @@ def _polling( logger.warning("from_date option is disabled in TestMode") if to_date: logger.warning("to_date option is disabled in TestMode") + try: - # first create the directory to watch + # Create the directory to watch if it doesn't exist if not os.path.exists(key): try: os.makedirs(key, exist_ok=True) @@ -214,50 +216,52 @@ def _polling( logger.debug("", exc_info=True) return False - # define a class to handle the new events - class WatchdogHandler(FileSystemEventHandler): - def __init__(self, engine, key, callback): + # Define a class to handle the new events using watchdog + class EventHandler(FileSystemEventHandler): + def __init__(self, engine, debounce_time=0.5, polling_interval=1): super().__init__() self._engine = engine - self._key = key - self._callback = callback + self._last_processed_time = 0 + self._debounce_time = debounce_time + self._polling_interval = polling_interval def on_modified(self, event): - if not event.is_directory and event.src_path.endswith(self._key): + if not event.is_directory: kvs = self._engine.pull(key=event.src_path) for kv in kvs: k = kv["key"] v = kv["value"].decode() - # skip the status if kv["key"].endswith("status"): continue logger.debug(f"Notification received for key {k}") try: - # execute the trigger - self._callback(k, v) + callback(k, v) except Exception as ee: logger.error(f"Error with notification trigger, exception: {type(ee)} {ee}") logger.debug("", exc_info=True) + time.sleep(self._polling_interval) - # define the event handler - event_handler = WatchdogHandler(engine=self, key=key, callback=callback) - - # create an observer and schedule the event handler + # Set up the observer and event handler + event_handler = EventHandler(engine=self) observer = Observer() - observer.schedule(event_handler, path=key, recursive=True) - - # start the observer in a daemon thread so we can stop it + observer.schedule(event_handler, key, recursive=True) observer.start() - # this is the stop condition + # Daemon thread to allow stopping + def t_run(): + observer.join() + + t = threading.Thread(target=t_run) + t.setDaemon(True) + t.start() + + # Stop condition while key in self._listeners: time.sleep(0.1) - - # stop the observer observer.stop() observer.join() except Exception as e: - logger.error(f"Error occurred during polling: {e}") + logger.error(f"Error while listening to key {key}, {e}") logger.debug("", exc_info=True) - _thread.interrupt_main() + _thread.interrupt_main() \ No newline at end of file From 418455ea7418833e2db9071d960f747c4c0002de Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 23:39:30 +0100 Subject: [PATCH 25/50] logger added to test callback for better debugging --- tests/unit/test_file_based_engine.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/tests/unit/test_file_based_engine.py b/tests/unit/test_file_based_engine.py index 6f94b50..d09a1d0 100644 --- a/tests/unit/test_file_based_engine.py +++ b/tests/unit/test_file_based_engine.py @@ -40,7 +40,7 @@ def pre_post_test(test_engine): pass yield # delete all the keys at the end of the test - test_engine.delete("/tmp/aviso/test") + # test_engine.delete("/tmp/aviso/test") test_engine.stop() @@ -74,35 +74,33 @@ def test_listen(test_engine): callback_list = [] def callback(key, value): + logger.debug(f"Callback triggered for key: {key}") callback_list.append(1) - # listen to a test key + # Listen to a test key assert test_engine.listen(["/tmp/aviso/test"], callback) - # wait a fraction and check the function has been triggered - time.sleep(0.5) + time.sleep(0.5) # Increased wait time - # create independent change to the test key to trigger the notification + # Create independent change to the test key to trigger the notification kvs = [{"key": "/tmp/aviso/test/test1", "value": "1"}] assert test_engine.push(kvs) - # wait a fraction and check the function has been triggered - time.sleep(1) + time.sleep(1) # Increased wait time assert len(callback_list) == 1 - # repeat the push operation + # Repeat the push operation kvs = [{"key": "/tmp/aviso/test/test1", "value": "2"}] assert test_engine.push(kvs) - # wait a fraction and check the function has been triggered - time.sleep(1) + time.sleep(1) # Increased wait time assert len(callback_list) == 2 - # stop listening + # Stop listening resp = test_engine.stop() assert resp - # repeat the push operation - kvs = [{"key": "/tmp/aviso/test/test1", "value": "2"}] + # Repeat the push operation + kvs = [{"key": "/tmp/aviso/test/test1", "value": "3"}] assert test_engine.push(kvs) - # wait a fraction and check the function has NOT been triggered + # Wait and check that the function has NOT been triggered time.sleep(1) - assert len(callback_list) == 2 + assert len(callback_list) == 2 \ No newline at end of file From 711553c8c48ad596e6dccff08339bd501e27dae7 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 23:40:44 +0100 Subject: [PATCH 26/50] gitignore updated for the cases etcd run inside aviso folder --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 30b75ef..cab6443 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,5 @@ aviso-server/monitoring/aviso_monitoring.egg-info/* docs/build/* venv .venv +build +default.etcd From c59893671802fd14462eb023485b5c10a18c7d19 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 23:58:48 +0100 Subject: [PATCH 27/50] new line is added at the end as suggested by flake --- pyaviso/engine/file_based_engine.py | 2 +- tests/unit/test_file_based_engine.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyaviso/engine/file_based_engine.py b/pyaviso/engine/file_based_engine.py index 5a7cba5..d57ddd6 100644 --- a/pyaviso/engine/file_based_engine.py +++ b/pyaviso/engine/file_based_engine.py @@ -264,4 +264,4 @@ def t_run(): except Exception as e: logger.error(f"Error while listening to key {key}, {e}") logger.debug("", exc_info=True) - _thread.interrupt_main() \ No newline at end of file + _thread.interrupt_main() diff --git a/tests/unit/test_file_based_engine.py b/tests/unit/test_file_based_engine.py index d09a1d0..cca6ee8 100644 --- a/tests/unit/test_file_based_engine.py +++ b/tests/unit/test_file_based_engine.py @@ -103,4 +103,4 @@ def callback(key, value): # Wait and check that the function has NOT been triggered time.sleep(1) - assert len(callback_list) == 2 \ No newline at end of file + assert len(callback_list) == 2 From 4b50f33545b887473be1275c12bc66085e9f42a5 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 23:59:15 +0100 Subject: [PATCH 28/50] black will ignore venv and directories starting with dot --- pyproject.toml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e34796e..1bc5fe2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,8 @@ [tool.black] -line-length = 120 \ No newline at end of file +line-length = 120 +exclude = ''' +/( + \. # Ignore directories starting with . + | venv # Ignore venv directory +)/ +''' \ No newline at end of file From 061e89515d706dbc6a8361cf8c433e8a7458f731 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 14 Dec 2023 23:59:48 +0100 Subject: [PATCH 29/50] tox.ini improved for black for better ignore operation --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 2c84211..83f780d 100644 --- a/tox.ini +++ b/tox.ini @@ -26,5 +26,5 @@ deps = flake8 commands = isort --check . - black --check --exclude /venv/ --exclude .* . + black --check --exclude='/(\..*|venv)/' . flake8 . \ No newline at end of file From 42a4f5044bd995b20525a62d9d4143d23bc4af74 Mon Sep 17 00:00:00 2001 From: sametd Date: Fri, 15 Dec 2023 01:09:47 +0100 Subject: [PATCH 30/50] ecmwf auth was failing due to being inherited from etcd auth, instead of is instance, == is used for type checking and proper comment is added for tox to ignore this --- pyaviso/engine/etcd_grpc_engine.py | 2 +- pyaviso/engine/etcd_rest_engine.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyaviso/engine/etcd_grpc_engine.py b/pyaviso/engine/etcd_grpc_engine.py index 2e94ded..e071c00 100644 --- a/pyaviso/engine/etcd_grpc_engine.py +++ b/pyaviso/engine/etcd_grpc_engine.py @@ -33,7 +33,7 @@ def __init__(self, config: EngineConfig, auth: Auth): self._base_url = f"http://{self._host}:{self._port}/v3/" def _initialise_server(self): - if isinstance(self.auth, EtcdAuth): + if type(self.auth) == EtcdAuth: # noqa: E721 self._server = Etcd3Client( self.host, self.port, user=self.auth.username, password=self.auth.password, timeout=self.timeout ) diff --git a/pyaviso/engine/etcd_rest_engine.py b/pyaviso/engine/etcd_rest_engine.py index 32938e5..8a0c28b 100644 --- a/pyaviso/engine/etcd_rest_engine.py +++ b/pyaviso/engine/etcd_rest_engine.py @@ -247,7 +247,7 @@ def _authenticate(self) -> bool: This method authenticates the user and set the internal token, this is only done for Etcd authentication :return: True if successfully authenticated """ - if isinstance(self.auth, EtcdAuth): + if type(self.auth) == EtcdAuth: # noqa: E721 logger.debug(f"Authenticating user {self.auth.username}...") url = self._base_url + "auth/authenticate" From 37fd36f9881e8c8e91f41661a7bc5f4aaf02e2ce Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 17:07:45 +0100 Subject: [PATCH 31/50] custom kube_metric support is added to monitoring --- .../monitoring/aviso_monitoring/config.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/aviso-server/monitoring/aviso_monitoring/config.py b/aviso-server/monitoring/aviso_monitoring/config.py index cc9ae9b..4c248c5 100644 --- a/aviso-server/monitoring/aviso_monitoring/config.py +++ b/aviso-server/monitoring/aviso_monitoring/config.py @@ -27,6 +27,7 @@ def __init__( aviso_auth_reporter=None, etcd_reporter=None, prometheus_reporter=None, + kube_state_metrics=None, ): try: # we build the configuration in priority order from the lower to the higher @@ -41,6 +42,7 @@ def __init__( self.aviso_auth_reporter = aviso_auth_reporter self.etcd_reporter = etcd_reporter self.prometheus_reporter = prometheus_reporter + self.kube_state_metrics = kube_state_metrics logger.debug("Loading configuration completed") @@ -113,6 +115,8 @@ def _create_default_config() -> Dict: }, } + kube_state_metrics = {"ssl_enabled": False, "token": None} + # main config config = {} config["udp_server"] = udp_server @@ -121,6 +125,7 @@ def _create_default_config() -> Dict: config["aviso_auth_reporter"] = aviso_auth_reporter config["etcd_reporter"] = etcd_reporter config["prometheus_reporter"] = prometheus_reporter + config["kube_state_metrics"] = kube_state_metrics return config def _read_env_variables(self) -> Dict: @@ -246,6 +251,23 @@ def prometheus_reporter(self, prometheus_reporter): assert pr.get("port") is not None, "prometheus_reporter port has not been configured" self._prometheus_reporter = pr + @property + def kube_state_metrics(self): + return self._kube_state_metrics + + @kube_state_metrics.setter + def kube_state_metrics(self, kube_state_metrics): + ksm = self._config.get("kube_state_metrics") + if kube_state_metrics is not None and ksm is not None: + Config.deep_update(ksm, kube_state_metrics) + elif kube_state_metrics is not None: + ksm = kube_state_metrics + # verify is valid + assert ksm is not None, "kube_state_metrics has not been configured" + assert ksm.get("ssl_enabled") is not None, "kube_state_metrics ssl_enabled has not been configured" + assert ksm.get("token") is not None, "kube_state_metrics token has not been configured" + self._kube_state_metrics = ksm + def __str__(self): config_string = ( f"udp_server: {self.udp_server}" @@ -254,6 +276,7 @@ def __str__(self): + f", aviso_auth_reporter: {self.aviso_auth_reporter}" + f", etcd_reporter: {self.etcd_reporter}" + f", prometheus_reporter: {self.prometheus_reporter}" + + f", kube_state_metrics: {self.kube_state_metrics}" ) return config_string From 03aebe7d27910ba46d1b3da103de22565399e6eb Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 17:19:45 +0100 Subject: [PATCH 32/50] token should only be checked if ssl is enabled --- aviso-server/monitoring/aviso_monitoring/config.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/config.py b/aviso-server/monitoring/aviso_monitoring/config.py index 4c248c5..a6ded4d 100644 --- a/aviso-server/monitoring/aviso_monitoring/config.py +++ b/aviso-server/monitoring/aviso_monitoring/config.py @@ -184,7 +184,7 @@ def aviso_rest_reporter(self, aviso_rest_reporter): assert ar is not None, "aviso_rest_reporter has not been configured" assert ar.get("tlms") is not None, "aviso_rest_reporter tlms has not been configured" assert ar.get("enabled") is not None, "aviso_rest_reporter enabled has not been configured" - if type(ar["enabled"]) is str: + if isinstance(ar["enabled"], str): ar["enabled"] = ar["enabled"].casefold() == "true".casefold() assert ar.get("frequency") is not None, "aviso_rest_reporter frequency has not been configured" self._aviso_rest_reporter = ar @@ -204,7 +204,7 @@ def aviso_auth_reporter(self, aviso_auth_reporter): assert aa is not None, "aviso_auth_reporter has not been configured" assert aa.get("tlms") is not None, "aviso_auth_reporter tlms has not been configured" assert aa.get("enabled") is not None, "aviso_auth_reporter enabled has not been configured" - if type(aa["enabled"]) is str: + if isinstance(aa["enabled"], str): aa["enabled"] = aa["enabled"].casefold() == "true".casefold() assert aa.get("frequency") is not None, "aviso_auth_reporter frequency has not been configured" self._aviso_auth_reporter = aa @@ -224,7 +224,7 @@ def etcd_reporter(self, etcd_reporter): assert e is not None, "etcd_reporter has not been configured" assert e.get("tlms") is not None, "etcd_reporter tlms has not been configured" assert e.get("enabled") is not None, "etcd_reporter enabled has not been configured" - if type(e["enabled"]) is str: + if isinstance(e["enabled"], str): e["enabled"] = e["enabled"].casefold() == "true".casefold() assert e.get("frequency") is not None, "etcd_reporter frequency has not been configured" assert e.get("member_urls") is not None, "etcd_reporter member_urls has not been configured" @@ -246,7 +246,7 @@ def prometheus_reporter(self, prometheus_reporter): assert pr is not None, "prometheus_reporter has not been configured" assert pr.get("host") is not None, "prometheus_reporter host has not been configured" assert pr.get("enabled") is not None, "prometheus_reporter enabled has not been configured" - if type(pr["enabled"]) is str: + if isinstance(pr["enabled"], str): pr["enabled"] = pr["enabled"].casefold() == "true".casefold() assert pr.get("port") is not None, "prometheus_reporter port has not been configured" self._prometheus_reporter = pr @@ -265,7 +265,8 @@ def kube_state_metrics(self, kube_state_metrics): # verify is valid assert ksm is not None, "kube_state_metrics has not been configured" assert ksm.get("ssl_enabled") is not None, "kube_state_metrics ssl_enabled has not been configured" - assert ksm.get("token") is not None, "kube_state_metrics token has not been configured" + if ksm["ssl_enabled"]: + assert ksm.get("token") is not None, "kube_state_metrics token has not been configured" self._kube_state_metrics = ksm def __str__(self): From 7032e597549423f0d0d8ef43d441100516222885 Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 18:06:12 +0100 Subject: [PATCH 33/50] metric token support added for retrieve metrics function --- .../aviso_monitoring/reporter/opsview_reporter.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py index 09ba543..4dd81c9 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py @@ -24,6 +24,9 @@ def __init__(self, config: Config, msg_receiver=None): self.monitor_servers = config.monitor_servers self.msg_receiver = msg_receiver self.token = {} + if config.kube_state_metrics["ssl_enabled"]: + self.metric_ssl_enabled = True + self.metric_token = config.kube_state_metrics["token"] def ms_authenticate(self, m_server): """ @@ -199,7 +202,7 @@ def aggregate_unique_counter_tlms(tlms): } return agg_tlm - def retrieve_metrics(metric_servers, req_timeout): + def retrieve_metrics(self, metric_servers, req_timeout): """ This methods retrieves the metrics provided by specific metric servers using a Prometheus interface. """ @@ -207,8 +210,11 @@ def retrieve_metrics(metric_servers, req_timeout): for u in metric_servers: url = u + "/metrics" logger.debug(f"Retrieving metrics from {url}...") + headers = {} try: - resp = requests.get(url, verify=False, timeout=req_timeout) + if self.metric_ssl_enabled: + headers["Authorization"] = f"Bearer {self.metric_token}" + resp = requests.get(url, verify=False, timeout=req_timeout, headers=headers) except Exception as e: logger.exception(f"Not able to get metrics from {url}, error {e}") raw_tlms[u] = None From 4089bc243a1938d3dcb24b1dd44d04faba30a592 Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 18:31:44 +0100 Subject: [PATCH 34/50] opsviewreporter instantiated when necessary to retrieve logs --- .../aviso_monitoring/reporter/aviso_auth_reporter.py | 5 +++-- .../aviso_monitoring/reporter/aviso_rest_reporter.py | 5 +++-- .../monitoring/aviso_monitoring/reporter/etcd_reporter.py | 3 ++- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index 7a118f8..8d50cd9 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -198,6 +198,7 @@ def __init__(self, *args, **kwargs): self.critical_t = kwargs["critical_t"] self.req_timeout = kwargs["req_timeout"] self.metric_server_url = kwargs["metric_server_url"] + self.opsview_reporter = OpsviewReporter() super().__init__(*args, **kwargs) def metric(self): @@ -209,13 +210,13 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ + metrics = self.opsview_reporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: logger.debug(f"Processing tlm {self.metric_name}...") - av_pod = OpsviewReporter.read_from_metrics(metrics, pattern) + av_pod = self.opsview_reporter.read_from_metrics(metrics, pattern) if av_pod: av_pod = int(av_pod) if av_pod <= self.critical_t: diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index da520a8..cfa1e66 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -179,6 +179,7 @@ def __init__(self, *args, **kwargs): self.critical_t = kwargs["critical_t"] self.req_timeout = kwargs["req_timeout"] self.metric_server_url = kwargs["metric_server_url"] + self.opsview_reporter = OpsviewReporter() super().__init__(*args, **kwargs) def metric(self): @@ -190,13 +191,13 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ + metrics = self.opsview_reporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: logger.debug(f"Processing tlm {self.metric_name}...") - av_pod = OpsviewReporter.read_from_metrics(metrics, pattern) + av_pod = self.opsview_reporter.read_from_metrics(metrics, pattern) if av_pod: av_pod = int(av_pod) if av_pod <= self.critical_t: diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py index 6d6900b..bb705c7 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py @@ -23,6 +23,7 @@ def __init__(self, config, *args, **kwargs): self.req_timeout = self.etcd_config["req_timeout"] self.member_urls = self.etcd_config["member_urls"] self.tlms = self.etcd_config["tlms"] + self.opsview_reporter = OpsviewReporter() super().__init__(config, *args, **kwargs) def process_messages(self): @@ -33,7 +34,7 @@ def process_messages(self): logger.debug("Etcd processing metrics...") # fetch the raw tlms provided by etcd - raw_tlms = OpsviewReporter.retrieve_metrics(self.member_urls, self.req_timeout) # noqa: F841 + raw_tlms = self.opsview_reporter.retrieve_metrics(self.member_urls, self.req_timeout) # noqa: F841 # array of metric to return metrics = [] From 637b5d49b3004c5d773515f2c1b03c5436e29156 Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 18:40:15 +0100 Subject: [PATCH 35/50] Revert "opsviewreporter instantiated when necessary to retrieve logs" This reverts commit 4089bc243a1938d3dcb24b1dd44d04faba30a592. --- .../aviso_monitoring/reporter/aviso_auth_reporter.py | 5 ++--- .../aviso_monitoring/reporter/aviso_rest_reporter.py | 5 ++--- .../monitoring/aviso_monitoring/reporter/etcd_reporter.py | 3 +-- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index 8d50cd9..7a118f8 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -198,7 +198,6 @@ def __init__(self, *args, **kwargs): self.critical_t = kwargs["critical_t"] self.req_timeout = kwargs["req_timeout"] self.metric_server_url = kwargs["metric_server_url"] - self.opsview_reporter = OpsviewReporter() super().__init__(*args, **kwargs) def metric(self): @@ -210,13 +209,13 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = self.opsview_reporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ + metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: logger.debug(f"Processing tlm {self.metric_name}...") - av_pod = self.opsview_reporter.read_from_metrics(metrics, pattern) + av_pod = OpsviewReporter.read_from_metrics(metrics, pattern) if av_pod: av_pod = int(av_pod) if av_pod <= self.critical_t: diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index cfa1e66..da520a8 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -179,7 +179,6 @@ def __init__(self, *args, **kwargs): self.critical_t = kwargs["critical_t"] self.req_timeout = kwargs["req_timeout"] self.metric_server_url = kwargs["metric_server_url"] - self.opsview_reporter = OpsviewReporter() super().__init__(*args, **kwargs) def metric(self): @@ -191,13 +190,13 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = self.opsview_reporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ + metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: logger.debug(f"Processing tlm {self.metric_name}...") - av_pod = self.opsview_reporter.read_from_metrics(metrics, pattern) + av_pod = OpsviewReporter.read_from_metrics(metrics, pattern) if av_pod: av_pod = int(av_pod) if av_pod <= self.critical_t: diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py index bb705c7..6d6900b 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py @@ -23,7 +23,6 @@ def __init__(self, config, *args, **kwargs): self.req_timeout = self.etcd_config["req_timeout"] self.member_urls = self.etcd_config["member_urls"] self.tlms = self.etcd_config["tlms"] - self.opsview_reporter = OpsviewReporter() super().__init__(config, *args, **kwargs) def process_messages(self): @@ -34,7 +33,7 @@ def process_messages(self): logger.debug("Etcd processing metrics...") # fetch the raw tlms provided by etcd - raw_tlms = self.opsview_reporter.retrieve_metrics(self.member_urls, self.req_timeout) # noqa: F841 + raw_tlms = OpsviewReporter.retrieve_metrics(self.member_urls, self.req_timeout) # noqa: F841 # array of metric to return metrics = [] From 3a202f7b52fcde0847c0e628a9f73799e9fb02c6 Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 19:43:26 +0100 Subject: [PATCH 36/50] metric token support added for retrieve metrics function --- .../aviso_monitoring/reporter/opsview_reporter.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py index 4dd81c9..14d7be6 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py @@ -202,7 +202,8 @@ def aggregate_unique_counter_tlms(tlms): } return agg_tlm - def retrieve_metrics(self, metric_servers, req_timeout): + @classmethod + def retrieve_metrics(cls, metric_servers, req_timeout): """ This methods retrieves the metrics provided by specific metric servers using a Prometheus interface. """ @@ -212,8 +213,8 @@ def retrieve_metrics(self, metric_servers, req_timeout): logger.debug(f"Retrieving metrics from {url}...") headers = {} try: - if self.metric_ssl_enabled: - headers["Authorization"] = f"Bearer {self.metric_token}" + if cls.metric_ssl_enabled: + headers["Authorization"] = f"Bearer {cls.metric_token}" resp = requests.get(url, verify=False, timeout=req_timeout, headers=headers) except Exception as e: logger.exception(f"Not able to get metrics from {url}, error {e}") From 1026913a6e19a58ad22564af545b3c6fdaf4ca2a Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 20:02:49 +0100 Subject: [PATCH 37/50] mitigate metric variable installation --- .../reporter/aviso_rest_reporter.py | 2 ++ .../aviso_monitoring/reporter/opsview_reporter.py | 14 ++++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index da520a8..b0ec55b 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -18,6 +18,8 @@ def __init__(self, config, *args, **kwargs): self.frequency = aviso_rest_config["frequency"] self.enabled = aviso_rest_config["enabled"] self.tlms = aviso_rest_config["tlms"] + #configure the metric vars once only here + OpsviewReporter.configure_metric_vars(config) super().__init__(config, *args, **kwargs) def process_messages(self): diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py index 14d7be6..1dc3b6a 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py @@ -19,14 +19,24 @@ class OpsviewReporter(ABC): + metric_ssl_enabled = False + metric_token = "" + def __init__(self, config: Config, msg_receiver=None): urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) self.monitor_servers = config.monitor_servers self.msg_receiver = msg_receiver self.token = {} + + @classmethod + def configure_metric_vars(cls, config): + """ + Configures the class attributes based on the provided config. + """ + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) if config.kube_state_metrics["ssl_enabled"]: - self.metric_ssl_enabled = True - self.metric_token = config.kube_state_metrics["token"] + cls.metric_ssl_enabled = True + cls.metric_token = config.kube_state_metrics["token"] def ms_authenticate(self, m_server): """ From 964d4c05ddb2a991da0e02db9587064dcc83d529 Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 22:55:31 +0100 Subject: [PATCH 38/50] namespace is now taken from the pod instead of a fixed name --- .../reporter/aviso_rest_reporter.py | 38 ++++++++++++++++++- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index b0ec55b..4b6752b 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -18,7 +18,7 @@ def __init__(self, config, *args, **kwargs): self.frequency = aviso_rest_config["frequency"] self.enabled = aviso_rest_config["enabled"] self.tlms = aviso_rest_config["tlms"] - #configure the metric vars once only here + # configure the metric vars once only here OpsviewReporter.configure_metric_vars(config) super().__init__(config, *args, **kwargs) @@ -184,7 +184,14 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - pattern = r'kube_deployment_status_replicas{namespace="aviso",deployment="aviso-rest-\w+"}' + namespace = self.get_k8s_pod_namespace() + if namespace: + logger.info(f"The pod is running in the '{namespace}' namespace.") + else: + logger.warning("Could not determine the pod's namespace.") + namespace = "aviso" + + pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-rest-\w+"}}' # defaults status = 0 message = "All pods available" @@ -226,3 +233,30 @@ def metric(self): m_status = {"name": self.metric_name, "status": 1, "message": "Metric could not be retrieved"} logger.debug(f"{self.metric_name} metric: {m_status}") return m_status + + @staticmethod + def get_k8s_pod_namespace(): + """ + Retrieves the Kubernetes (k8s) namespace in which the current pod is running. + + This function reads the namespace name from a file that Kubernetes automatically + mounts inside the pod. This file is typically located at: + '/var/run/secrets/kubernetes.io/serviceaccount/namespace' + + Returns: + str: The namespace in which the pod is running. If the namespace cannot be determined + (e.g., the file doesn't exist or the pod is not running in a k8s environment), + the function returns None. + """ + namespace_file = "/var/run/secrets/kubernetes.io/serviceaccount/namespace" + try: + with open(namespace_file, "r") as file: + return file.read().strip() + except FileNotFoundError: + logger.error(f"Namespace file not found: {namespace_file}") + except IOError as e: + logger.error(f"I/O error occurred when reading namespace file: {e}") + except Exception as e: + logger.exception(f"Unexpected error occurred when reading namespace file: {e}") + + return None From 884fd4f9aa9c069e4c318ebc343f72af3b249f40 Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 23:03:23 +0100 Subject: [PATCH 39/50] namespace is now taken from the pod instead of a fixed name --- .../reporter/aviso_auth_reporter.py | 36 ++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index 7a118f8..2a2fc97 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -201,7 +201,14 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - pattern = r'kube_deployment_status_replicas{namespace="aviso",deployment="aviso-auth-\w+"}' + namespace = self.get_k8s_pod_namespace() + if namespace: + logger.info(f"The pod is running in the '{namespace}' namespace.") + else: + logger.warning("Could not determine the pod's namespace.") + namespace = "aviso" + + pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-auth-\w+"}}' # defaults status = 0 message = "All pods available" @@ -243,3 +250,30 @@ def metric(self): m_status = {"name": self.metric_name, "status": 1, "message": "Metric could not be retrieved"} logger.debug(f"{self.metric_name} metric: {m_status}") return m_status + + @staticmethod + def get_k8s_pod_namespace(): + """ + Retrieves the Kubernetes (k8s) namespace in which the current pod is running. + + This function reads the namespace name from a file that Kubernetes automatically + mounts inside the pod. This file is typically located at: + '/var/run/secrets/kubernetes.io/serviceaccount/namespace' + + Returns: + str: The namespace in which the pod is running. If the namespace cannot be determined + (e.g., the file doesn't exist or the pod is not running in a k8s environment), + the function returns None. + """ + namespace_file = "/var/run/secrets/kubernetes.io/serviceaccount/namespace" + try: + with open(namespace_file, "r") as file: + return file.read().strip() + except FileNotFoundError: + logger.error(f"Namespace file not found: {namespace_file}") + except IOError as e: + logger.error(f"I/O error occurred when reading namespace file: {e}") + except Exception as e: + logger.exception(f"Unexpected error occurred when reading namespace file: {e}") + + return None From 7675c3c9eafb32f008d40c8812ba99517094662b Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 23:28:27 +0100 Subject: [PATCH 40/50] name correction for the metric --- .../monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py | 2 +- .../monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index 2a2fc97..c34022b 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -208,7 +208,7 @@ def metric(self): logger.warning("Could not determine the pod's namespace.") namespace = "aviso" - pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-auth-\w+"}}' + pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-auth"}}' # defaults status = 0 message = "All pods available" diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index 4b6752b..91f2f45 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -191,7 +191,7 @@ def metric(self): logger.warning("Could not determine the pod's namespace.") namespace = "aviso" - pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-rest-\w+"}}' + pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-rest"}}' # defaults status = 0 message = "All pods available" From 9478e9ff63f4814952ef1925ed97d9a41d4e0227 Mon Sep 17 00:00:00 2001 From: sametd Date: Wed, 20 Dec 2023 23:34:29 +0100 Subject: [PATCH 41/50] unnecessary info log is removed, otherwise same log would be written over and over --- .../aviso_monitoring/reporter/aviso_auth_reporter.py | 4 +--- .../aviso_monitoring/reporter/aviso_rest_reporter.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index c34022b..6526930 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -202,9 +202,7 @@ def __init__(self, *args, **kwargs): def metric(self): namespace = self.get_k8s_pod_namespace() - if namespace: - logger.info(f"The pod is running in the '{namespace}' namespace.") - else: + if not namespace: logger.warning("Could not determine the pod's namespace.") namespace = "aviso" diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index 91f2f45..384f59d 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -185,9 +185,7 @@ def __init__(self, *args, **kwargs): def metric(self): namespace = self.get_k8s_pod_namespace() - if namespace: - logger.info(f"The pod is running in the '{namespace}' namespace.") - else: + if not namespace: logger.warning("Could not determine the pod's namespace.") namespace = "aviso" From 611c2ce741bfcdf0efe70bc720baa1a9eccd4b5c Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 21 Dec 2023 11:36:37 +0100 Subject: [PATCH 42/50] rename the token variable --- .../aviso_monitoring/reporter/opsview_reporter.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py index 1dc3b6a..5d9415d 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py @@ -19,7 +19,7 @@ class OpsviewReporter(ABC): - metric_ssl_enabled = False + metric_token_enabled = False metric_token = "" def __init__(self, config: Config, msg_receiver=None): @@ -34,8 +34,8 @@ def configure_metric_vars(cls, config): Configures the class attributes based on the provided config. """ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - if config.kube_state_metrics["ssl_enabled"]: - cls.metric_ssl_enabled = True + if config.kube_state_metrics["token_enabled"]: + cls.metric_token_enabled = True cls.metric_token = config.kube_state_metrics["token"] def ms_authenticate(self, m_server): @@ -223,7 +223,7 @@ def retrieve_metrics(cls, metric_servers, req_timeout): logger.debug(f"Retrieving metrics from {url}...") headers = {} try: - if cls.metric_ssl_enabled: + if cls.metric_token_enabled: headers["Authorization"] = f"Bearer {cls.metric_token}" resp = requests.get(url, verify=False, timeout=req_timeout, headers=headers) except Exception as e: From 5b3070196957f8337ef51dd787edf115b88f43e9 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 21 Dec 2023 11:38:03 +0100 Subject: [PATCH 43/50] migration is removed from admin as a better migration schema introduced in deployment --- .../admin/migration/etcd_migration.py | 178 ------------------ 1 file changed, 178 deletions(-) delete mode 100644 aviso-server/admin/migration/etcd_migration.py diff --git a/aviso-server/admin/migration/etcd_migration.py b/aviso-server/admin/migration/etcd_migration.py deleted file mode 100644 index 326f03f..0000000 --- a/aviso-server/admin/migration/etcd_migration.py +++ /dev/null @@ -1,178 +0,0 @@ -# (C) Copyright 1996- ECMWF. -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. -# In applying this licence, ECMWF does not waive the privileges and immunities -# granted to it by virtue of its status as an intergovernmental organisation -# nor does it submit to any jurisdiction. - -import base64 -from typing import Dict - -import requests - -old_etcd: str = "__OLD_ETCD_ADDRESS__:PORT" -new_etcd: str = "__NEW_ETCD_ADDRESS__:PORT" -from_revision: int = 145679891 -MAX_KV_RETURNED: int = 10000 - - -def push_kvpairs(etcd_repo, kvs): - """ - Submit key-value pairs to the etcd_repo - :param etcd_repo - :param kvs - :return: True if completed - """ - - print(f"Pushing key-value pairs to {etcd_repo} ...") - - url = etcd_repo + "/v3/kv/txn" - - ops = [] - # Prepare the transaction with a put operation for each KV pair - for kv in kvs: - k = encode_to_str_base64(kv["key"]) - v = encode_to_str_base64(kv["value"]) - put = {"requestPut": {"key": k, "value": v}} - ops.append(put) - - body = {"success": ops} - - # commit transaction - resp = requests.post(url, json=body) - resp.raise_for_status() - - print("Operation completed") - - resp_body = resp.json() - # read the header - if "header" in resp_body: - h = resp_body["header"] - rev = int(h["revision"]) - print(f"New server revision {rev}") - - return True - - -def pull_kvpairs(etcd_repo, revision): - """ - Retrieve key-value pairs newer than the revision number from the etcd_repo - :param etcd_repo - :param revision - :return: kv pairs as dictionary - """ - main_key = "/ec/" - - old_etcd_url = etcd_repo + "/v3/kv/range" - - print(f"Getting key-value pairs to {etcd_repo} newer than {revision} ...") - - range_end = encode_to_str_base64(str(incr_last_byte(main_key), "utf-8")) - - # encode key - encoded_key = encode_to_str_base64(main_key) - - # create the body for the get range on the etcd sever, order them newest first - body = { - "key": encoded_key, - "range_end": range_end, - "limit": MAX_KV_RETURNED, - "sort_order": "DESCEND", - "sort_target": "KEY", - "min_mod_revision": revision, - } - # make the call - # print(f"Pull request: {body}") - - # start an infinite loop of request if the server side is unreachable - resp = requests.post(old_etcd_url, json=body) - resp.raise_for_status() - - print("Retrieval completed") - - # parse the result to return just key-value pairs - new_kvs = [] - resp_body = resp.json() - if "kvs" in resp_body: - print("Building key-value list") - for kv in resp_body["kvs"]: - new_kv = parse_raw_kv(kv, False) - new_kvs.append(new_kv) - print(f"Key: {new_kv['key']} pulled successfully") - - print(f"{len(new_kvs)} keys found") - return new_kvs - - -def parse_raw_kv(kv: Dict[str, any], key_only: bool = False) -> Dict[str, any]: - """ - Internal method to translate the kv pair coming from the etcd server into a dictionary that fits better this - application - :param kv: raw kv pair from the etcd server - :param key_only: - :return: translated kv pair as dictionary - """ - new_kv = {} - if not key_only: - new_kv["value"] = decode_to_bytes(kv["value"]) # leave it as binary - new_kv["key"] = decode_to_bytes(kv["key"]).decode() - new_kv["version"] = int(kv["version"]) - new_kv["create_rev"] = int(kv["create_revision"]) - new_kv["mod_rev"] = int(kv["mod_revision"]) - return new_kv - - -def encode_to_str_base64(obj: any) -> str: - """ - Internal method to translate the object passed in a field that could be accepted by etcd and the request library - for the key or value. The request library accepts only strings encoded in base64 while etcd wants binaries for - the key and value fields. - :param obj: - :return: a base64 string representation of the binary translation - """ - if type(obj) is bytes: - binary = obj - elif type(obj) is str: - binary = obj.encode() - else: - binary = str(obj).encode() - - return str(base64.b64encode(binary), "utf-8") - - -def decode_to_bytes(string: str) -> any: - """ - Internal method to translate what is coming back from the notification server. - The request library returns only string base64 encoded - :param string: - :return: the payload decoded from the base64 string representation - """ - return base64.decodebytes(string.encode()) - - -def incr_last_byte(path: str) -> bytes: - """ - This function determines the end of the range required for a range call with the etcd3 API - By incrementing the last byte of the input path, it allows to make a range call describing the input - path as a branch rather than a leaf path. - - :param path: the path representing the start of the range - :return: the path representing the end of the range - """ - bytes_types = (bytes, bytearray) - if not isinstance(path, bytes_types): - if not isinstance(path, str): - path = str(path) - path = path.encode("utf-8") - s = bytearray(path) - # increment the last byte - s[-1] = s[-1] + 1 - return bytes(s) - - -# first get the key-value pairs from the old repo -kvs = pull_kvpairs(old_etcd, from_revision) - -# send them to the new repo -completed = push_kvpairs(new_etcd, kvs) From d1d115ddfd021df5fae9c069e333f9710b7b9704 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 21 Dec 2023 11:46:26 +0100 Subject: [PATCH 44/50] better readibility and seperation of duties --- aviso-server/admin/aviso_admin/admin.py | 94 ++++++++++++++++--------- 1 file changed, 59 insertions(+), 35 deletions(-) diff --git a/aviso-server/admin/aviso_admin/admin.py b/aviso-server/admin/aviso_admin/admin.py index d53902b..71a9c1b 100644 --- a/aviso-server/admin/aviso_admin/admin.py +++ b/aviso-server/admin/aviso_admin/admin.py @@ -22,53 +22,77 @@ from aviso_monitoring.udp_server import UdpServer -def main(): - # load the configuration - config = Config() - logger.info(f"Running Aviso-admin v.{__version__}") - logger.info(f"aviso_monitoring module v.{monitoring_version}") - logger.info(f"Configuration loaded: {config}") - - # instantiate the compactor and cleaner +def setup_compactor_and_cleaner(config): + """Sets up the compactor and cleaner with scheduling.""" compactor = Compactor(config.compactor) - cleaner = Cleaner(config.cleaner) - - # Every day at scheduled time run the compactor if compactor.enabled: schedule.every().day.at(config.compactor["scheduled_time"]).do(compactor.run) - # Every day at scheduled time run the cleaner + cleaner = Cleaner(config.cleaner) if cleaner.enabled: schedule.every().day.at(config.cleaner["scheduled_time"]).do(cleaner.run) - # create the UDP server - receiver = Receiver() - udp_server = UdpServer(config.monitoring.udp_server, receiver) - udp_server.start() - - # schedule reporters - rest_reporter = AvisoRestReporter(config.monitoring, receiver) - if rest_reporter.enabled: - schedule.every(rest_reporter.frequency).minutes.do(rest_reporter.run) - auth_reporter = AvisoAuthReporter(config.monitoring, receiver) - if auth_reporter.enabled: - schedule.every(auth_reporter.frequency).minutes.do(auth_reporter.run) - etcd_reporter = EtcdReporter(config.monitoring, receiver) - if etcd_reporter.enabled: - schedule.every(etcd_reporter.frequency).minutes.do(etcd_reporter.run) - - # launch the prometheus reporter, this expose some tlms to /metrics + +def setup_udp_server(config, receiver): + """Initializes and starts the UDP server.""" + try: + udp_server = UdpServer(config.monitoring.udp_server, receiver) + udp_server.start() + return udp_server + except Exception as e: + logger.exception("Failed to start UDP Server: %s", e) + + +def schedule_reporters(config, receiver): + """Schedules various reporters based on the configuration.""" + for reporter_class in [AvisoRestReporter, AvisoAuthReporter, EtcdReporter]: + reporter = reporter_class(config.monitoring, receiver) + if reporter.enabled: + schedule.every(reporter.frequency).minutes.do(reporter.run) + + +def start_prometheus_reporter(config, receiver): + """Starts the Prometheus reporter if enabled.""" prometheus_reporter = PrometheusReporter(config.monitoring, receiver) if prometheus_reporter.enabled: prometheus_reporter.start() - # Loop so that the scheduling task keeps on running all time. - while True: - # Checks whether a scheduled task is pending to run or not - schedule.run_pending() - time.sleep(30) + +def main(): + """Main function to run the application.""" + # Load the configuration + config = Config() + logger.info(f"Running Aviso-admin v.{__version__}") + logger.info(f"aviso_monitoring module v.{monitoring_version}") + logger.debug(f"Configuration loaded: {config}") + + # Set up compactor and cleaner + setup_compactor_and_cleaner(config) + + # Create the UDP server + receiver = Receiver() + udp_server = setup_udp_server(config, receiver) + + # Schedule reporters + schedule_reporters(config, receiver) + + # Start Prometheus reporter + start_prometheus_reporter(config, receiver) + + # Main loop for running scheduled tasks + try: + while True: + schedule.run_pending() + time.sleep(30) + except KeyboardInterrupt: + logger.info("Application stopped by user.") + except Exception as e: + logger.exception("Unexpected error occurred: %s", e) + finally: + if udp_server: + udp_server.stop() # Assuming a method to gracefully stop the UDP server + logger.info("Application shutdown.") -# when running directly from this file if __name__ == "__main__": main() From 20c4eebe75fe2df58cec17e985a97ec0e3b59e43 Mon Sep 17 00:00:00 2001 From: sametd Date: Thu, 21 Dec 2023 11:56:02 +0100 Subject: [PATCH 45/50] info level is better suited for config --- aviso-server/admin/aviso_admin/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aviso-server/admin/aviso_admin/admin.py b/aviso-server/admin/aviso_admin/admin.py index 71a9c1b..ccacce4 100644 --- a/aviso-server/admin/aviso_admin/admin.py +++ b/aviso-server/admin/aviso_admin/admin.py @@ -64,7 +64,7 @@ def main(): config = Config() logger.info(f"Running Aviso-admin v.{__version__}") logger.info(f"aviso_monitoring module v.{monitoring_version}") - logger.debug(f"Configuration loaded: {config}") + logger.info(f"Configuration loaded: {config}") # Set up compactor and cleaner setup_compactor_and_cleaner(config) From e0eacd295c5551ec9d49c6f516baad6e7ff8fffe Mon Sep 17 00:00:00 2001 From: sametd Date: Mon, 8 Jan 2024 02:27:06 +0100 Subject: [PATCH 46/50] better representations for user config --- pyaviso/user_config.py | 72 +++++++++++++++++++++++------------------- 1 file changed, 40 insertions(+), 32 deletions(-) diff --git a/pyaviso/user_config.py b/pyaviso/user_config.py index 3e3409b..1b0d696 100644 --- a/pyaviso/user_config.py +++ b/pyaviso/user_config.py @@ -67,19 +67,23 @@ def __init__( self.automatic_retry_delay = automatic_retry_delay def __str__(self): - config_string = ( - f"host: {self.host}" - + f", port: {self.port}" - + f", https: {self.https}" - + f", type: {self.type.name}" - + f", polling_interval: {self.polling_interval}" - + f", timeout: {self.timeout}" - + f", max_file_size: {self.max_file_size}" - + f", service: {self.service}" - + f", catchup: {self.catchup}" - + f", automatic_retry_delay: {self.automatic_retry_delay}" - ) - return config_string + config_items = [ + f"host: {self.host}", + f"port: {self.port}", + f"https: {self.https}", + f"type: {self.type.name}", + f"polling_interval: {self.polling_interval}", + f"timeout: {self.timeout}", + f"max_file_size: {self.max_file_size}", + f"service: {self.service}", + f"catchup: {self.catchup}", + f"automatic_retry_delay: {self.automatic_retry_delay}", + ] + config_string = "\n".join(config_items) + return f"Engine Configuration:\n{config_string}" + + def __repr__(self): + return f"{self.__class__.__name__}({self.__str__()})" class UserConfig: @@ -429,7 +433,7 @@ def configuration_engine(self, configuration_engine: Dict[str, any]): assert "max_file_size" in ce, "configuration_engine max_file_size has not been configured" assert "timeout" in ce, "configuration_engine timeout has not been configured" assert "automatic_retry_delay" in ce, "configuration_engine automatic_retry_delay has not been configured" - if type(ce["https"]) is str: + if isinstance(ce["https"], str): ce["https"] = ce["https"].casefold() == "true".casefold() # exclude file_based from the options for the configuration engine @@ -460,7 +464,7 @@ def remote_schema(self) -> bool: @remote_schema.setter def remote_schema(self, remote_schema: str): self._remote_schema = self._configure_property(remote_schema, "remote_schema") - if type(self._remote_schema) is str: + if isinstance(self._remote_schema, str): self._remote_schema = self._remote_schema.casefold() == "true".casefold() @property @@ -518,7 +522,7 @@ def no_fail(self) -> bool: @no_fail.setter def no_fail(self, no_fail: str): self._no_fail = self._configure_property(no_fail, "no_fail") - if type(self._no_fail) is str: + if isinstance(self._no_fail, str): self._no_fail = self._no_fail.casefold() == "true".casefold() if self.no_fail: # define a function to run at exit @@ -535,7 +539,7 @@ def debug(self) -> bool: @debug.setter def debug(self, debug: any): self._debug = self._configure_property(debug, "debug") - if type(self._debug) is str: + if isinstance(self._debug, str): self._debug = self._debug.casefold() == "true".casefold() if self._debug: logging_level = logging.DEBUG @@ -556,7 +560,7 @@ def quiet(self) -> bool: @quiet.setter def quiet(self, quiet: any): self._quiet = self._configure_property(quiet, "quiet") - if type(self._quiet) is str: + if isinstance(self._quiet, str): self._quiet = self._quiet.casefold() == "true".casefold() if self._quiet: logging_level = logging.ERROR @@ -569,20 +573,24 @@ def quiet(self, quiet: any): pass def __str__(self): - config_string = ( - f"notification_engine: {self.notification_engine}" - + f", configuration_engine: {self.configuration_engine}" - + f", auth_type: {self.auth_type}" - + f", debug: {self.debug}" - + f", quiet: {self.quiet}" - + f", username: {self.username}" - + f", username_file: {self.username_file}" - + f", no_fail: {self.no_fail}" - + f", key_ttl: {self.key_ttl}" - + f", schema_parser: {self.schema_parser}" - + f", remote_schema: {self.remote_schema}" - ) - return config_string + config_items = [ + f"notification_engine: {self.notification_engine}", + f"configuration_engine: {self.configuration_engine}", + f"auth_type: {self.auth_type}", + f"debug: {self.debug}", + f"quiet: {self.quiet}", + f"username: {self.username}", + f"username_file: {self.username_file}", + f"no_fail: {self.no_fail}", + f"key_ttl: {self.key_ttl}", + f"schema_parser: {self.schema_parser}", + f"remote_schema: {self.remote_schema}", + ] + config_string = "\n".join(config_items) + return f"User Configuration:\n{config_string}" + + def __repr__(self): + return f"{self.__class__.__name__}({self.__str__()})" def _configure_default_log(self): try: From eec9c740d8390884aa84eb95f2a3a1a905aa66f0 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Tue, 9 Jan 2024 16:00:23 +0000 Subject: [PATCH 47/50] version bumps --- aviso-server/admin/aviso_admin/__init__.py | 2 +- aviso-server/auth/aviso_auth/__init__.py | 2 +- aviso-server/monitoring/aviso_monitoring/__init__.py | 2 +- aviso-server/rest/aviso_rest/__init__.py | 2 +- pyaviso/version.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/aviso-server/admin/aviso_admin/__init__.py b/aviso-server/admin/aviso_admin/__init__.py index f4943f8..503609f 100644 --- a/aviso-server/admin/aviso_admin/__init__.py +++ b/aviso-server/admin/aviso_admin/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.4.1" +__version__ = "0.5.0" # setting application logger logger = logging.getLogger("aviso-admin") diff --git a/aviso-server/auth/aviso_auth/__init__.py b/aviso-server/auth/aviso_auth/__init__.py index eb1081e..72fff2b 100644 --- a/aviso-server/auth/aviso_auth/__init__.py +++ b/aviso-server/auth/aviso_auth/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.3.1" +__version__ = "0.4.0" # setting application logger logger = logging.getLogger("aviso-auth") diff --git a/aviso-server/monitoring/aviso_monitoring/__init__.py b/aviso-server/monitoring/aviso_monitoring/__init__.py index a1c7008..62cd9ef 100644 --- a/aviso-server/monitoring/aviso_monitoring/__init__.py +++ b/aviso-server/monitoring/aviso_monitoring/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.5.0" +__version__ = "0.6.0" logger = logging.getLogger("aviso-monitoring") logger.setLevel(logging.DEBUG) diff --git a/aviso-server/rest/aviso_rest/__init__.py b/aviso-server/rest/aviso_rest/__init__.py index 29fe3cf..ecb54a3 100644 --- a/aviso-server/rest/aviso_rest/__init__.py +++ b/aviso-server/rest/aviso_rest/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.3.3" +__version__ = "0.4.0" # setting application logger logger = logging.getLogger("aviso-rest") diff --git a/pyaviso/version.py b/pyaviso/version.py index 5c9a452..923280c 100644 --- a/pyaviso/version.py +++ b/pyaviso/version.py @@ -1,2 +1,2 @@ # version number for the application -__version__ = "0.11.1" +__version__ = "1.0.0" From b2ea1aeb501fe0a68c577b23892ed179e3e67a17 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Tue, 9 Jan 2024 16:00:58 +0000 Subject: [PATCH 48/50] documentation updates for whats new and ecmwf usage --- docs/source/contributing/whats_new.rst | 19 +++++++++++++++++++ docs/source/guide/aviso_ecmwf.rst | 6 ++++++ 2 files changed, 25 insertions(+) diff --git a/docs/source/contributing/whats_new.rst b/docs/source/contributing/whats_new.rst index eecfd94..95175ff 100644 --- a/docs/source/contributing/whats_new.rst +++ b/docs/source/contributing/whats_new.rst @@ -5,6 +5,25 @@ What's New ============== +Version 1.0.0 (09 January 2024) +-------------------------------- + +- **File-based Engine Update** + The backend for the file-based engine has been upgraded from Pyinotify to Watchdog, enhancing performance and reliability. + +- **MacOS Support** + Version 1.0.0 introduces initial support for MacOS, broadening the platform compatibility. + +- **Test Enhancements** + Tests have been improved to utilize relative paths, increasing the robustness and portability of the testing process. + +- **Prometheus Metrics** + Added token support for Prometheus metrics. + +- **Kubernetes Compatibility** + Enhancements in aviso-monitoring to offer better support for Kubernetes environments. + + v0.11.1 (02 February 2022) -------------------------- diff --git a/docs/source/guide/aviso_ecmwf.rst b/docs/source/guide/aviso_ecmwf.rst index c794f9c..06f9cda 100644 --- a/docs/source/guide/aviso_ecmwf.rst +++ b/docs/source/guide/aviso_ecmwf.rst @@ -11,6 +11,12 @@ This section presents how Aviso has been configured and deployed at ECMWF. This __ https://www.ecmwf.int/en/about/contact-us +.. note:: + + The `aviso-examples`_ repository includes helpful scripts and configuration files as examples for using aviso with ECMWF notifications. + +.. _aviso-examples: https://github.com/ecmwf/aviso-examples + ECMWF Aviso service ------------------- From e7f6357e06190b91dfdd68a6298687f407ca5705 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Tue, 9 Jan 2024 16:01:19 +0000 Subject: [PATCH 49/50] a minor change for post trigger message --- pyaviso/triggers/post_trigger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyaviso/triggers/post_trigger.py b/pyaviso/triggers/post_trigger.py index fc821cf..3158c81 100644 --- a/pyaviso/triggers/post_trigger.py +++ b/pyaviso/triggers/post_trigger.py @@ -48,7 +48,7 @@ def __init__(self, notification: Dict, params: Dict): self.protocol = ProtocolType[protocol_params.get("type").lower()].get_class()(notification, protocol_params) def execute(self): - logger.info("Starting Post Trigger for (params.get('protocol'))...'") + logger.info("Starting Post Trigger...'") # execute the specific protocol self.protocol.execute() From 444f78b197f4e31de142b3c35cf4992fb5b5fc62 Mon Sep 17 00:00:00 2001 From: Samet Demir Date: Tue, 9 Jan 2024 16:04:12 +0000 Subject: [PATCH 50/50] ip fix for auth log --- aviso-server/auth/aviso_auth/frontend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aviso-server/auth/aviso_auth/frontend.py b/aviso-server/auth/aviso_auth/frontend.py index 9a08473..3657bec 100644 --- a/aviso-server/auth/aviso_auth/frontend.py +++ b/aviso-server/auth/aviso_auth/frontend.py @@ -110,7 +110,7 @@ def index(): @handler.route(self.config.backend["route"], methods=["POST"]) def root(): - logger.info(f"New request received from {request.remote_addr}, content: {request.data}") + logger.info(f"New request received from {request.headers.get('X-Forwarded-For')}, content: {request.data}") resp_content = timed_process_request()