diff --git a/.github/workflows/check.yaml b/.github/workflows/check.yaml index e79e6ce..5969e1c 100644 --- a/.github/workflows/check.yaml +++ b/.github/workflows/check.yaml @@ -4,6 +4,7 @@ on: workflow_dispatch: push: + pull_request: branches: [develop] jobs: @@ -23,8 +24,8 @@ jobs: strategy: fail-fast: false matrix: - platform: ["ubuntu-latest"] - python-version: ["3.6", "3.7", "3.8", "3.9"] + platform: ["ubuntu-20.04","macos-13"] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] name: Python ${{ matrix.python-version }} on ${{ matrix.platform }} runs-on: ${{ matrix.platform }} @@ -36,7 +37,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install etcd + - name: Install etcd for linux run: | ETCD_VER=v3.4.14 DOWNLOAD_URL=https://github.com/etcd-io/etcd/releases/download @@ -44,6 +45,18 @@ jobs: mkdir /tmp/etcd-download-test tar xzvf /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz -C /tmp/etcd-download-test --strip-components=1 /tmp/etcd-download-test/etcd & + if: runner.os == 'Linux' + + - name: Install etcd for macos + run: | + ETCD_VER=v3.4.14 + DOWNLOAD_URL=https://github.com/etcd-io/etcd/releases/download + curl -L ${DOWNLOAD_URL}/${ETCD_VER}/etcd-${ETCD_VER}-darwin-amd64.zip -o /tmp/etcd-${ETCD_VER}-darwin-amd64.zip + mkdir /tmp/etcd-download-test + unzip /tmp/etcd-${ETCD_VER}-darwin-amd64.zip -d /tmp + mv /tmp/etcd-${ETCD_VER}-darwin-amd64/* /tmp/etcd-download-test + /tmp/etcd-download-test/etcd & + if: runner.os == 'macOS' - name: Install packages run: | diff --git a/.gitignore b/.gitignore index 1dfea94..cab6443 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,7 @@ aviso-server/rest/aviso_rest.egg-info/* aviso-server/auth/aviso_auth.egg-info/* aviso-server/monitoring/aviso_monitoring.egg-info/* docs/build/* +venv +.venv +build +default.etcd diff --git a/aviso-server/admin/aviso_admin/__init__.py b/aviso-server/admin/aviso_admin/__init__.py index f4943f8..503609f 100644 --- a/aviso-server/admin/aviso_admin/__init__.py +++ b/aviso-server/admin/aviso_admin/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.4.1" +__version__ = "0.5.0" # setting application logger logger = logging.getLogger("aviso-admin") diff --git a/aviso-server/admin/aviso_admin/admin.py b/aviso-server/admin/aviso_admin/admin.py index d53902b..ccacce4 100644 --- a/aviso-server/admin/aviso_admin/admin.py +++ b/aviso-server/admin/aviso_admin/admin.py @@ -22,53 +22,77 @@ from aviso_monitoring.udp_server import UdpServer -def main(): - # load the configuration - config = Config() - logger.info(f"Running Aviso-admin v.{__version__}") - logger.info(f"aviso_monitoring module v.{monitoring_version}") - logger.info(f"Configuration loaded: {config}") - - # instantiate the compactor and cleaner +def setup_compactor_and_cleaner(config): + """Sets up the compactor and cleaner with scheduling.""" compactor = Compactor(config.compactor) - cleaner = Cleaner(config.cleaner) - - # Every day at scheduled time run the compactor if compactor.enabled: schedule.every().day.at(config.compactor["scheduled_time"]).do(compactor.run) - # Every day at scheduled time run the cleaner + cleaner = Cleaner(config.cleaner) if cleaner.enabled: schedule.every().day.at(config.cleaner["scheduled_time"]).do(cleaner.run) - # create the UDP server - receiver = Receiver() - udp_server = UdpServer(config.monitoring.udp_server, receiver) - udp_server.start() - - # schedule reporters - rest_reporter = AvisoRestReporter(config.monitoring, receiver) - if rest_reporter.enabled: - schedule.every(rest_reporter.frequency).minutes.do(rest_reporter.run) - auth_reporter = AvisoAuthReporter(config.monitoring, receiver) - if auth_reporter.enabled: - schedule.every(auth_reporter.frequency).minutes.do(auth_reporter.run) - etcd_reporter = EtcdReporter(config.monitoring, receiver) - if etcd_reporter.enabled: - schedule.every(etcd_reporter.frequency).minutes.do(etcd_reporter.run) - - # launch the prometheus reporter, this expose some tlms to /metrics + +def setup_udp_server(config, receiver): + """Initializes and starts the UDP server.""" + try: + udp_server = UdpServer(config.monitoring.udp_server, receiver) + udp_server.start() + return udp_server + except Exception as e: + logger.exception("Failed to start UDP Server: %s", e) + + +def schedule_reporters(config, receiver): + """Schedules various reporters based on the configuration.""" + for reporter_class in [AvisoRestReporter, AvisoAuthReporter, EtcdReporter]: + reporter = reporter_class(config.monitoring, receiver) + if reporter.enabled: + schedule.every(reporter.frequency).minutes.do(reporter.run) + + +def start_prometheus_reporter(config, receiver): + """Starts the Prometheus reporter if enabled.""" prometheus_reporter = PrometheusReporter(config.monitoring, receiver) if prometheus_reporter.enabled: prometheus_reporter.start() - # Loop so that the scheduling task keeps on running all time. - while True: - # Checks whether a scheduled task is pending to run or not - schedule.run_pending() - time.sleep(30) + +def main(): + """Main function to run the application.""" + # Load the configuration + config = Config() + logger.info(f"Running Aviso-admin v.{__version__}") + logger.info(f"aviso_monitoring module v.{monitoring_version}") + logger.info(f"Configuration loaded: {config}") + + # Set up compactor and cleaner + setup_compactor_and_cleaner(config) + + # Create the UDP server + receiver = Receiver() + udp_server = setup_udp_server(config, receiver) + + # Schedule reporters + schedule_reporters(config, receiver) + + # Start Prometheus reporter + start_prometheus_reporter(config, receiver) + + # Main loop for running scheduled tasks + try: + while True: + schedule.run_pending() + time.sleep(30) + except KeyboardInterrupt: + logger.info("Application stopped by user.") + except Exception as e: + logger.exception("Unexpected error occurred: %s", e) + finally: + if udp_server: + udp_server.stop() # Assuming a method to gracefully stop the UDP server + logger.info("Application shutdown.") -# when running directly from this file if __name__ == "__main__": main() diff --git a/aviso-server/admin/aviso_admin/config.py b/aviso-server/admin/aviso_admin/config.py index 3e79a5f..897ce2a 100644 --- a/aviso-server/admin/aviso_admin/config.py +++ b/aviso-server/admin/aviso_admin/config.py @@ -158,7 +158,6 @@ def _read_env_variables(self): return config def logging_setup(self, logging_conf_path): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: diff --git a/aviso-server/admin/migration/etcd_migration.py b/aviso-server/admin/migration/etcd_migration.py deleted file mode 100755 index 39dad66..0000000 --- a/aviso-server/admin/migration/etcd_migration.py +++ /dev/null @@ -1,186 +0,0 @@ -# (C) Copyright 1996- ECMWF. -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. -# In applying this licence, ECMWF does not waive the privileges and immunities -# granted to it by virtue of its status as an intergovernmental organisation -# nor does it submit to any jurisdiction. - -import base64 -from typing import Dict - -import requests - -old_etcd = "http://k8s-dataservices-master.ecmwf.int:30000" -new_etcd = "http://k8s-dataservices-master.ecmwf.int:31302" -from_revision = 192631533 -to_revision = 192631538 -MAX_KV_RETURNED = 100 - - -def push_kvpairs(etcd_repo, kvs): - """ - Submit key-value pairs to the etcd_repo - :param etcd_repo - :param kvs - :return: True if completed - """ - - print(f"Pushing key-value pairs to {etcd_repo} ...") - - url = etcd_repo + "/v3/kv/txn" - - ops = [] - # Prepare the transaction with a put operation for each KV pair - for kv in kvs: - k = encode_to_str_base64(kv["key"]) - v = encode_to_str_base64(kv["value"]) - put = {"requestPut": {"key": k, "value": v}} - ops.append(put) - - body = {"success": ops} - - # commit transaction - resp = requests.post(url, json=body) - resp.raise_for_status() - - print("Operation completed") - - resp_body = resp.json() - # read the header - if "header" in resp_body: - h = resp_body["header"] - rev = int(h["revision"]) - print(f"New server revision {rev}") - - return True - - -def pull_kvpairs(etcd_repo, revision): - """ - Retrieve key-value pairs newer than the revision number from the etcd_repo - :param etcd_repo - :param revision - :return: kv pairs as dictionary - """ - main_key = "/ec/" - - old_etcd_url = etcd_repo + "/v3/kv/range" - - print(f"Getting key-value pairs to {etcd_repo} for rev {revision} ...") - - range_end = encode_to_str_base64(str(incr_last_byte(main_key), "utf-8")) - - # encode key - encoded_key = encode_to_str_base64(main_key) - - # create the body for the get range on the etcd sever, order them newest first - body = { - "key": encoded_key, - "range_end": range_end, - "limit": MAX_KV_RETURNED, - "sort_order": "DESCEND", - "sort_target": "KEY", - "min_mod_revision": revision, - "max_mod_revision": revision, - "revision": revision, - - } - # make the call - # print(f"Pull request: {body}") - - resp = requests.post(old_etcd_url, json=body) - resp.raise_for_status() - - print("Retrival completed") - - # parse the result to return just key-value pairs - new_kvs = [] - resp_body = resp.json() - if "kvs" in resp_body: - for kv in resp_body["kvs"]: - new_kv = parse_raw_kv(kv, False) - new_kvs.append(new_kv) - print(f"Key: {new_kv['key']} pulled successfully") - - print(f"{len(new_kvs)} keys found") - return new_kvs - - -def parse_raw_kv(kv: Dict[str, any], key_only: bool = False) -> Dict[str, any]: - """ - Internal method to translate the kv pair coming from the etcd server into a dictionary that fits better this - application - :param kv: raw kv pair from the etcd server - :param key_only: - :return: translated kv pair as dictionary - """ - new_kv = {} - if not key_only: - new_kv["value"] = decode_to_bytes(kv["value"]) # leave it as binary - new_kv["key"] = decode_to_bytes(kv["key"]).decode() - new_kv["version"] = int(kv["version"]) - new_kv["create_rev"] = int(kv["create_revision"]) - new_kv["mod_rev"] = int(kv["mod_revision"]) - return new_kv - - -def encode_to_str_base64(obj: any) -> str: - """ - Internal method to translate the object passed in a field that could be accepted by etcd and the request library - for the key or value. The request library accepts only strings encoded in base64 while etcd wants binaries for - the key and value fields. - :param obj: - :return: a base64 string representation of the binary translation - """ - if type(obj) is bytes: - binary = obj - elif type(obj) is str: - binary = obj.encode() - else: - binary = str(obj).encode() - - return str(base64.b64encode(binary), "utf-8") - - -def decode_to_bytes(string: str) -> any: - """ - Internal method to translate what is coming back from the notification server. - The request library returns only string base64 encoded - :param string: - :return: the payload decoded from the base64 string representation - """ - return base64.decodebytes(string.encode()) - - -def incr_last_byte(path: str) -> bytes: - """ - This function determines the end of the range required for a range call with the etcd3 API - By incrementing the last byte of the input path, it allows to make a range call describing the input - path as a branch rather than a leaf path. - - :param path: the path representing the start of the range - :return: the path representing the end of the range - """ - bytes_types = (bytes, bytearray) - if not isinstance(path, bytes_types): - if not isinstance(path, str): - path = str(path) - path = path.encode("utf-8") - s = bytearray(path) - # increment the last byte - s[-1] = s[-1] + 1 - return bytes(s) - - -################ -# main worflow # -################ - -for rev in range(from_revision, to_revision + 1): - - # first get the key-value pairs from the old repo - kvs = pull_kvpairs(old_etcd, rev) - - # send them to the new repo - completed = push_kvpairs(new_etcd, kvs) diff --git a/aviso-server/admin/tests/store_size_cycle.py b/aviso-server/admin/tests/store_size_cycle.py index 7cc55cc..4e67b12 100644 --- a/aviso-server/admin/tests/store_size_cycle.py +++ b/aviso-server/admin/tests/store_size_cycle.py @@ -8,6 +8,7 @@ import datetime import queue +from pathlib import Path import requests from aviso_admin import config @@ -35,7 +36,8 @@ def conf() -> config.Config: # this automatically configure the logging - c = config.Config(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent + c = config.Config(conf_path=Path(tests_path / "config.yaml")) return c @@ -68,7 +70,9 @@ def delete_destination_keys(date): def store_size(): reporter = EtcdReporter(conf().monitoring) checker = StoreSize( - EtcdMetricType.etcd_store_size, member_urls=reporter.member_urls, raw_tlms=reporter.retrive_raw_tlms() + EtcdMetricType.etcd_store_size, + member_urls=reporter.member_urls, + raw_tlms=reporter.retrive_raw_tlms(), ) size = checker.max_store_size("etcd_mvcc_db_total_size_in_use_in_bytes") return size @@ -148,7 +152,7 @@ def defrag(): # print(f"Total number of keys deleted {tot}") # defrag # r = defrag() - # print(f"Deframentation: {r}") + # print(f"Defragmentation: {r}") print(store_size()) # print(f"Current store size {store_size()}") diff --git a/aviso-server/admin/tests/test_cleaner.py b/aviso-server/admin/tests/test_cleaner.py index 41e029f..453405e 100644 --- a/aviso-server/admin/tests/test_cleaner.py +++ b/aviso-server/admin/tests/test_cleaner.py @@ -8,6 +8,7 @@ import datetime import os +from pathlib import Path import requests from aviso_admin import config, logger @@ -16,7 +17,8 @@ def conf() -> config.Config: # this automatically configure the logging - c = config.Config(conf_path="aviso-server/admin/tests/config.yaml") + tests_path = Path(__file__).parent + c = config.Config(conf_path=Path(tests_path / "config.yaml")) return c @@ -69,9 +71,18 @@ def test_delete_dissemination_keys(): date = datetime.datetime.now() - datetime.timedelta(days=1) # first put a few dissemination keys - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc") + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc", + ) # delete dissemination keys n_deleted = cleaner.delete_keys(date, "EC1") @@ -86,9 +97,18 @@ def test_delete_mars_keys(): date = datetime.datetime.now() - datetime.timedelta(days=1) # first put a few MARS keys - put_key(config["url"], config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/aaaa") - put_key(config["url"], config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/bbbb") - put_key(config["url"], config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/cccc") + put_key( + config["url"], + config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/aaaa", + ) + put_key( + config["url"], + config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/bbbb", + ) + put_key( + config["url"], + config["mars_path"] + "date=" + date.strftime(DATE_FORMAT) + "/cccc", + ) # delete MARS keys n_deleted = cleaner.delete_keys(date) @@ -109,9 +129,18 @@ def test_run_cleaner(): # first put a few destinations and keys prefix = config["dest_path"] + date.strftime(DATE_FORMAT) + "/" put_key(config["url"], prefix + "EC1") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb") - put_key(config["url"], config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc") + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/aaaa", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/bbbb", + ) + put_key( + config["url"], + config["diss_path"] + "EC1/date=" + date.strftime(DATE_FORMAT) + "/cccc", + ) # run the whole workflow cleaner.run() diff --git a/aviso-server/admin/tests/test_compactor.py b/aviso-server/admin/tests/test_compactor.py index 25ed8e4..12f3869 100644 --- a/aviso-server/admin/tests/test_compactor.py +++ b/aviso-server/admin/tests/test_compactor.py @@ -10,6 +10,7 @@ import random import time from datetime import datetime +from pathlib import Path import pytest import requests @@ -19,7 +20,8 @@ def conf() -> config.Config: # this automatically configure the logging - c = config.Config(conf_path="aviso-server/admin/tests/config.yaml") + tests_path = Path(__file__).parent + c = config.Config(conf_path=Path(tests_path / "config.yaml")) return c diff --git a/aviso-server/auth/aviso_auth/__init__.py b/aviso-server/auth/aviso_auth/__init__.py index 8acf638..72fff2b 100644 --- a/aviso-server/auth/aviso_auth/__init__.py +++ b/aviso-server/auth/aviso_auth/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.3.2" +__version__ = "0.4.0" # setting application logger logger = logging.getLogger("aviso-auth") diff --git a/aviso-server/auth/aviso_auth/authorisation.py b/aviso-server/auth/aviso_auth/authorisation.py index cb322f0..89692dd 100644 --- a/aviso-server/auth/aviso_auth/authorisation.py +++ b/aviso-server/auth/aviso_auth/authorisation.py @@ -62,7 +62,7 @@ def _allowed_destinations_impl(self, username: str): :param username: :return: - the list of allowed destinations associated to this username if valid - - UserNotFoundException if the user is not registred in ECPDS + - UserNotFoundException if the user is not registered in ECPDS - AuthorisationUnavailableException if the ECPDS server is unreachable - InternalSystemError otherwise """ @@ -132,7 +132,7 @@ def is_authorised_impl(self, username: str, request): :return: - True if authorised - False if not authorised - - UserNotFoundException if the user is not registred in ECPDS + - UserNotFoundException if the user is not registered in ECPDS - AuthorisationUnavailableException if the ECPDS server is unreachable - InternalSystemError otherwise """ @@ -160,7 +160,7 @@ def _is_backend_key_allowed(self, username: str, backend_key: str): :return: - True if authorised - False if not authorised - - UserNotFoundException if the user is not registred in ECPDS + - UserNotFoundException if the user is not registered in ECPDS - AuthorisationUnavailableException if the ECPDS server is unreachable - InternalSystemError otherwise """ @@ -170,7 +170,6 @@ def _is_backend_key_allowed(self, username: str, backend_key: str): # now check if we are accessing to a key space that is open only to authorised users elif len(list(filter(lambda x: backend_key.startswith(x), self.protected_keys))) > 0: - allowed_destinations = self._allowed_destinations(username) logger.debug(f"Destination allowed: {allowed_destinations}") diff --git a/aviso-server/auth/aviso_auth/config.py b/aviso-server/auth/aviso_auth/config.py index 4c28fff..3f8ed6f 100644 --- a/aviso-server/auth/aviso_auth/config.py +++ b/aviso-server/auth/aviso_auth/config.py @@ -200,7 +200,6 @@ def _read_env_variables(self) -> Dict[str, any]: return config def logging_setup(self, logging_conf_path: str): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: diff --git a/aviso-server/monitoring/aviso_monitoring/__init__.py b/aviso-server/monitoring/aviso_monitoring/__init__.py index a1c7008..62cd9ef 100644 --- a/aviso-server/monitoring/aviso_monitoring/__init__.py +++ b/aviso-server/monitoring/aviso_monitoring/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.5.0" +__version__ = "0.6.0" logger = logging.getLogger("aviso-monitoring") logger.setLevel(logging.DEBUG) diff --git a/aviso-server/monitoring/aviso_monitoring/collector/config.py b/aviso-server/monitoring/aviso_monitoring/collector/config.py index 2584caa..84e4ce6 100644 --- a/aviso-server/monitoring/aviso_monitoring/collector/config.py +++ b/aviso-server/monitoring/aviso_monitoring/collector/config.py @@ -55,7 +55,6 @@ def __init__(self, transmitter=None, enabled=None, conf_from_file=None): @staticmethod def _create_default_config() -> Dict: - transmitter = { "monitoring_server_host": "127.0.0.1", "monitoring_server_port": 1111, diff --git a/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py b/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py index a8e1e19..e97aec8 100644 --- a/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py +++ b/aviso-server/monitoring/aviso_monitoring/collector/transmitter.py @@ -58,7 +58,6 @@ def transmitter_cycle(self): # logger.debug("Telemetry transmitter cycle started") if len(self.tlm_buffer): # don't do anything if the buffer is empty - # read the event buffer and clear it tlms = self.tlm_buffer.copy() logger.debug(f"{len(tlms)} TLMs found in the buffer") diff --git a/aviso-server/monitoring/aviso_monitoring/config.py b/aviso-server/monitoring/aviso_monitoring/config.py index ea54dcc..a6ded4d 100644 --- a/aviso-server/monitoring/aviso_monitoring/config.py +++ b/aviso-server/monitoring/aviso_monitoring/config.py @@ -27,8 +27,8 @@ def __init__( aviso_auth_reporter=None, etcd_reporter=None, prometheus_reporter=None, + kube_state_metrics=None, ): - try: # we build the configuration in priority order from the lower to the higher # start from the defaults @@ -42,6 +42,7 @@ def __init__( self.aviso_auth_reporter = aviso_auth_reporter self.etcd_reporter = etcd_reporter self.prometheus_reporter = prometheus_reporter + self.kube_state_metrics = kube_state_metrics logger.debug("Loading configuration completed") @@ -52,7 +53,6 @@ def __init__( @staticmethod def _create_default_config() -> Dict: - udp_server = {"host": "127.0.0.1", "port": 1111, "buffer_size": 64 * 1024} # this are the setting for sending the telemetry to a monitoring server like Opsview @@ -115,6 +115,8 @@ def _create_default_config() -> Dict: }, } + kube_state_metrics = {"ssl_enabled": False, "token": None} + # main config config = {} config["udp_server"] = udp_server @@ -123,6 +125,7 @@ def _create_default_config() -> Dict: config["aviso_auth_reporter"] = aviso_auth_reporter config["etcd_reporter"] = etcd_reporter config["prometheus_reporter"] = prometheus_reporter + config["kube_state_metrics"] = kube_state_metrics return config def _read_env_variables(self) -> Dict: @@ -181,7 +184,7 @@ def aviso_rest_reporter(self, aviso_rest_reporter): assert ar is not None, "aviso_rest_reporter has not been configured" assert ar.get("tlms") is not None, "aviso_rest_reporter tlms has not been configured" assert ar.get("enabled") is not None, "aviso_rest_reporter enabled has not been configured" - if type(ar["enabled"]) is str: + if isinstance(ar["enabled"], str): ar["enabled"] = ar["enabled"].casefold() == "true".casefold() assert ar.get("frequency") is not None, "aviso_rest_reporter frequency has not been configured" self._aviso_rest_reporter = ar @@ -201,7 +204,7 @@ def aviso_auth_reporter(self, aviso_auth_reporter): assert aa is not None, "aviso_auth_reporter has not been configured" assert aa.get("tlms") is not None, "aviso_auth_reporter tlms has not been configured" assert aa.get("enabled") is not None, "aviso_auth_reporter enabled has not been configured" - if type(aa["enabled"]) is str: + if isinstance(aa["enabled"], str): aa["enabled"] = aa["enabled"].casefold() == "true".casefold() assert aa.get("frequency") is not None, "aviso_auth_reporter frequency has not been configured" self._aviso_auth_reporter = aa @@ -221,7 +224,7 @@ def etcd_reporter(self, etcd_reporter): assert e is not None, "etcd_reporter has not been configured" assert e.get("tlms") is not None, "etcd_reporter tlms has not been configured" assert e.get("enabled") is not None, "etcd_reporter enabled has not been configured" - if type(e["enabled"]) is str: + if isinstance(e["enabled"], str): e["enabled"] = e["enabled"].casefold() == "true".casefold() assert e.get("frequency") is not None, "etcd_reporter frequency has not been configured" assert e.get("member_urls") is not None, "etcd_reporter member_urls has not been configured" @@ -243,11 +246,29 @@ def prometheus_reporter(self, prometheus_reporter): assert pr is not None, "prometheus_reporter has not been configured" assert pr.get("host") is not None, "prometheus_reporter host has not been configured" assert pr.get("enabled") is not None, "prometheus_reporter enabled has not been configured" - if type(pr["enabled"]) is str: + if isinstance(pr["enabled"], str): pr["enabled"] = pr["enabled"].casefold() == "true".casefold() assert pr.get("port") is not None, "prometheus_reporter port has not been configured" self._prometheus_reporter = pr + @property + def kube_state_metrics(self): + return self._kube_state_metrics + + @kube_state_metrics.setter + def kube_state_metrics(self, kube_state_metrics): + ksm = self._config.get("kube_state_metrics") + if kube_state_metrics is not None and ksm is not None: + Config.deep_update(ksm, kube_state_metrics) + elif kube_state_metrics is not None: + ksm = kube_state_metrics + # verify is valid + assert ksm is not None, "kube_state_metrics has not been configured" + assert ksm.get("ssl_enabled") is not None, "kube_state_metrics ssl_enabled has not been configured" + if ksm["ssl_enabled"]: + assert ksm.get("token") is not None, "kube_state_metrics token has not been configured" + self._kube_state_metrics = ksm + def __str__(self): config_string = ( f"udp_server: {self.udp_server}" @@ -256,6 +277,7 @@ def __str__(self): + f", aviso_auth_reporter: {self.aviso_auth_reporter}" + f", etcd_reporter: {self.etcd_reporter}" + f", prometheus_reporter: {self.prometheus_reporter}" + + f", kube_state_metrics: {self.kube_state_metrics}" ) return config_string diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py index ec7c39e..6526930 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_auth_reporter.py @@ -78,7 +78,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - # incoming tlms assert self.msg_receiver, "Msg receiver is None" new_tlms = self.msg_receiver.extract_incoming_tlms(self.metric_name) @@ -202,7 +201,12 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - pattern = r'kube_deployment_status_replicas{namespace="aviso",deployment="aviso-auth-\w+"}' + namespace = self.get_k8s_pod_namespace() + if not namespace: + logger.warning("Could not determine the pod's namespace.") + namespace = "aviso" + + pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-auth"}}' # defaults status = 0 message = "All pods available" @@ -210,7 +214,7 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = OpsviewReporter.retrive_metrics([self.metric_server_url], self.req_timeout)[ + metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: @@ -244,3 +248,30 @@ def metric(self): m_status = {"name": self.metric_name, "status": 1, "message": "Metric could not be retrieved"} logger.debug(f"{self.metric_name} metric: {m_status}") return m_status + + @staticmethod + def get_k8s_pod_namespace(): + """ + Retrieves the Kubernetes (k8s) namespace in which the current pod is running. + + This function reads the namespace name from a file that Kubernetes automatically + mounts inside the pod. This file is typically located at: + '/var/run/secrets/kubernetes.io/serviceaccount/namespace' + + Returns: + str: The namespace in which the pod is running. If the namespace cannot be determined + (e.g., the file doesn't exist or the pod is not running in a k8s environment), + the function returns None. + """ + namespace_file = "/var/run/secrets/kubernetes.io/serviceaccount/namespace" + try: + with open(namespace_file, "r") as file: + return file.read().strip() + except FileNotFoundError: + logger.error(f"Namespace file not found: {namespace_file}") + except IOError as e: + logger.error(f"I/O error occurred when reading namespace file: {e}") + except Exception as e: + logger.exception(f"Unexpected error occurred when reading namespace file: {e}") + + return None diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py index 2d99ad8..384f59d 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/aviso_rest_reporter.py @@ -18,6 +18,8 @@ def __init__(self, config, *args, **kwargs): self.frequency = aviso_rest_config["frequency"] self.enabled = aviso_rest_config["enabled"] self.tlms = aviso_rest_config["tlms"] + # configure the metric vars once only here + OpsviewReporter.configure_metric_vars(config) super().__init__(config, *args, **kwargs) def process_messages(self): @@ -76,7 +78,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - logger.debug(f"Processing tlms {self.metric_name}...") assert self.msg_receiver, "Msg receiver is None" @@ -183,7 +184,12 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def metric(self): - pattern = r'kube_deployment_status_replicas{namespace="aviso",deployment="aviso-rest-\w+"}' + namespace = self.get_k8s_pod_namespace() + if not namespace: + logger.warning("Could not determine the pod's namespace.") + namespace = "aviso" + + pattern = rf'kube_deployment_status_replicas{{namespace="{namespace}",deployment="aviso-rest"}}' # defaults status = 0 message = "All pods available" @@ -191,7 +197,7 @@ def metric(self): # fetch the cluster metrics if self.metric_server_url: - metrics = OpsviewReporter.retrive_metrics([self.metric_server_url], self.req_timeout)[ + metrics = OpsviewReporter.retrieve_metrics([self.metric_server_url], self.req_timeout)[ self.metric_server_url ] if metrics: @@ -225,3 +231,30 @@ def metric(self): m_status = {"name": self.metric_name, "status": 1, "message": "Metric could not be retrieved"} logger.debug(f"{self.metric_name} metric: {m_status}") return m_status + + @staticmethod + def get_k8s_pod_namespace(): + """ + Retrieves the Kubernetes (k8s) namespace in which the current pod is running. + + This function reads the namespace name from a file that Kubernetes automatically + mounts inside the pod. This file is typically located at: + '/var/run/secrets/kubernetes.io/serviceaccount/namespace' + + Returns: + str: The namespace in which the pod is running. If the namespace cannot be determined + (e.g., the file doesn't exist or the pod is not running in a k8s environment), + the function returns None. + """ + namespace_file = "/var/run/secrets/kubernetes.io/serviceaccount/namespace" + try: + with open(namespace_file, "r") as file: + return file.read().strip() + except FileNotFoundError: + logger.error(f"Namespace file not found: {namespace_file}") + except IOError as e: + logger.error(f"I/O error occurred when reading namespace file: {e}") + except Exception as e: + logger.exception(f"Unexpected error occurred when reading namespace file: {e}") + + return None diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py index c2cc1e4..6d6900b 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py @@ -33,7 +33,7 @@ def process_messages(self): logger.debug("Etcd processing metrics...") # fetch the raw tlms provided by etcd - raw_tlms = OpsviewReporter.retrive_metrics(self.member_urls, self.req_timeout) # noqa: F841 + raw_tlms = OpsviewReporter.retrieve_metrics(self.member_urls, self.req_timeout) # noqa: F841 # array of metric to return metrics = [] @@ -89,7 +89,6 @@ class StoreSize(EtcdChecker): """ def metric(self): - # defaults status = 0 message = "Store size is nominal" diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py index 08e9ec7..5d9415d 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/opsview_reporter.py @@ -19,12 +19,25 @@ class OpsviewReporter(ABC): + metric_token_enabled = False + metric_token = "" + def __init__(self, config: Config, msg_receiver=None): urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) self.monitor_servers = config.monitor_servers self.msg_receiver = msg_receiver self.token = {} + @classmethod + def configure_metric_vars(cls, config): + """ + Configures the class attributes based on the provided config. + """ + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + if config.kube_state_metrics["token_enabled"]: + cls.metric_token_enabled = True + cls.metric_token = config.kube_state_metrics["token"] + def ms_authenticate(self, m_server): """ This method authenticate to the monitoring server @@ -199,7 +212,8 @@ def aggregate_unique_counter_tlms(tlms): } return agg_tlm - def retrive_metrics(metric_servers, req_timeout): + @classmethod + def retrieve_metrics(cls, metric_servers, req_timeout): """ This methods retrieves the metrics provided by specific metric servers using a Prometheus interface. """ @@ -207,8 +221,11 @@ def retrive_metrics(metric_servers, req_timeout): for u in metric_servers: url = u + "/metrics" logger.debug(f"Retrieving metrics from {url}...") + headers = {} try: - resp = requests.get(url, verify=False, timeout=req_timeout) + if cls.metric_token_enabled: + headers["Authorization"] = f"Bearer {cls.metric_token}" + resp = requests.get(url, verify=False, timeout=req_timeout, headers=headers) except Exception as e: logger.exception(f"Not able to get metrics from {url}, error {e}") raw_tlms[u] = None diff --git a/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py b/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py index d32b5ce..4cdde63 100644 --- a/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py +++ b/aviso-server/monitoring/aviso_monitoring/reporter/prometheus_reporter.py @@ -121,7 +121,6 @@ def __init__(self, tlm_type, *args, **kwargs): self.msg_receiver = kwargs["msg_receiver"] def metric(self): - logger.debug(f"Processing tlms {self.metric_name}...") assert self.msg_receiver, "Msg receiver is None" diff --git a/aviso-server/monitoring/tests/test_prometheus_reporter.py b/aviso-server/monitoring/tests/test_prometheus_reporter.py index d62fe11..bf61918 100644 --- a/aviso-server/monitoring/tests/test_prometheus_reporter.py +++ b/aviso-server/monitoring/tests/test_prometheus_reporter.py @@ -23,7 +23,6 @@ def receiver(): - counter_tlm1 = { "telemetry_type": counter_type, "component_name": "counter_comp", diff --git a/aviso-server/rest/aviso_rest/__init__.py b/aviso-server/rest/aviso_rest/__init__.py index 29fe3cf..ecb54a3 100644 --- a/aviso-server/rest/aviso_rest/__init__.py +++ b/aviso-server/rest/aviso_rest/__init__.py @@ -9,7 +9,7 @@ import logging # version number for the application. -__version__ = "0.3.3" +__version__ = "0.4.0" # setting application logger logger = logging.getLogger("aviso-rest") diff --git a/aviso-server/rest/aviso_rest/config.py b/aviso-server/rest/aviso_rest/config.py index 9e4d269..cea1459 100644 --- a/aviso-server/rest/aviso_rest/config.py +++ b/aviso-server/rest/aviso_rest/config.py @@ -87,7 +87,6 @@ def __init__( @staticmethod def _create_default_config() -> Dict: - # main config config = {} config["monitoring"] = None @@ -165,7 +164,6 @@ def _read_env_variables(self) -> Dict[str, any]: return config def logging_setup(self, logging_conf_path: str): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: diff --git a/aviso-server/rest/requirements.txt b/aviso-server/rest/requirements.txt index 69ab6cb..5c54d14 100644 --- a/aviso-server/rest/requirements.txt +++ b/aviso-server/rest/requirements.txt @@ -4,7 +4,7 @@ PyYAML>=5.1.2 python-json-logger>=0.1.11 parse>=1.12.1 requests>=2.23.0 -pyinotify>=0.9.6 +watchdog==2.3.1 gunicorn>=20.0.4 flask>=1.1.2 cloudevents>=1.2.0 diff --git a/aviso-server/rest/tests/test_rest_frontend.py b/aviso-server/rest/tests/test_rest_frontend.py index b574488..a89ec24 100644 --- a/aviso-server/rest/tests/test_rest_frontend.py +++ b/aviso-server/rest/tests/test_rest_frontend.py @@ -10,6 +10,7 @@ import os import threading import time +from pathlib import Path import pytest import requests @@ -20,7 +21,8 @@ from pyaviso.cli_aviso import _parse_inline_params from pyaviso.notification_manager import NotificationManager -config = Config(conf_path="aviso-server/rest/tests/config.yaml") +tests_path = Path(__file__).parent +config = Config(conf_path=Path(tests_path / "config.yaml")) frontend_url_home = f"http://{config.host}:{config.port}" frontend_url_api = f"{frontend_url_home}/api/v1" diff --git a/docs/source/contributing/aviso_server.rst b/docs/source/contributing/aviso_server.rst index 18bd03f..2e46fbe 100644 --- a/docs/source/contributing/aviso_server.rst +++ b/docs/source/contributing/aviso_server.rst @@ -17,7 +17,7 @@ The source of the components presented here is available in the ``aviso-server`` Key-Value store --------------- -The core component of Aviso Server is a Key-Value store. This is a critical component because it guarantees persistency and consistency for all the notifications processed. +The core component of Aviso Server is a Key-Value store. This is a critical component because it guarantees persistence and consistency for all the notifications processed. The current Key-Value store technology used is etcd_. This is a strongly consistent, distributed key-value store able to reach consensus thanks to Raft algorithm. This allows it to gracefully tolerate machine failure and network partition. Moreover, it is designed for high-throughput. We are running it in is default configuration of a cluster of 3 components. diff --git a/docs/source/contributing/whats_new.rst b/docs/source/contributing/whats_new.rst index eecfd94..95175ff 100644 --- a/docs/source/contributing/whats_new.rst +++ b/docs/source/contributing/whats_new.rst @@ -5,6 +5,25 @@ What's New ============== +Version 1.0.0 (09 January 2024) +-------------------------------- + +- **File-based Engine Update** + The backend for the file-based engine has been upgraded from Pyinotify to Watchdog, enhancing performance and reliability. + +- **MacOS Support** + Version 1.0.0 introduces initial support for MacOS, broadening the platform compatibility. + +- **Test Enhancements** + Tests have been improved to utilize relative paths, increasing the robustness and portability of the testing process. + +- **Prometheus Metrics** + Added token support for Prometheus metrics. + +- **Kubernetes Compatibility** + Enhancements in aviso-monitoring to offer better support for Kubernetes environments. + + v0.11.1 (02 February 2022) -------------------------- diff --git a/docs/source/guide/aviso_ecmwf.rst b/docs/source/guide/aviso_ecmwf.rst index c794f9c..06f9cda 100644 --- a/docs/source/guide/aviso_ecmwf.rst +++ b/docs/source/guide/aviso_ecmwf.rst @@ -11,6 +11,12 @@ This section presents how Aviso has been configured and deployed at ECMWF. This __ https://www.ecmwf.int/en/about/contact-us +.. note:: + + The `aviso-examples`_ repository includes helpful scripts and configuration files as examples for using aviso with ECMWF notifications. + +.. _aviso-examples: https://github.com/ecmwf/aviso-examples + ECMWF Aviso service ------------------- diff --git a/docs/source/reference/configuration.rst b/docs/source/reference/configuration.rst index 3db5573..05ada17 100644 --- a/docs/source/reference/configuration.rst +++ b/docs/source/reference/configuration.rst @@ -13,7 +13,7 @@ where each step merges on the previous one: 3. Loading Home config file `~/.aviso/config.yaml` 4. Loading config defined by environment variables, AVISO_CONFIG 5. Loading config defined by command line option, ``-c``, ``--config`` -6. Loading all environnement variables +6. Loading all environment variables 7. Loading all command line options System and Home config files are optional. diff --git a/pyaviso/cli_aviso.py b/pyaviso/cli_aviso.py index 7f933ad..825d641 100644 --- a/pyaviso/cli_aviso.py +++ b/pyaviso/cli_aviso.py @@ -52,11 +52,9 @@ def catch_all_exceptions(cls, handler): """ class Cls(cls): - _original_args = None def make_context(self, info_name, args, parent=None, **extra): - # grab the original command line arguments self._original_args = " ".join(args) @@ -169,7 +167,7 @@ def user_config_setup(f): @click.option("--key", "-k", help="File path to the key required to authenticate to the server.") @functools.wraps(f) def functor(*args, **kwargs): - # CLIK automatically sets the flags, put back None values like for the other parameters + # CLICK automatically sets the flags, put back None values like for the other parameters kwargs["debug"] = None if not kwargs["debug"] else True kwargs["quiet"] = None if not kwargs["quiet"] else True kwargs["no_fail"] = None if not kwargs["no_fail"] else True diff --git a/pyaviso/engine/etcd_grpc_engine.py b/pyaviso/engine/etcd_grpc_engine.py index afb83c2..e071c00 100644 --- a/pyaviso/engine/etcd_grpc_engine.py +++ b/pyaviso/engine/etcd_grpc_engine.py @@ -33,7 +33,7 @@ def __init__(self, config: EngineConfig, auth: Auth): self._base_url = f"http://{self._host}:{self._port}/v3/" def _initialise_server(self): - if type(self.auth) == EtcdAuth: + if type(self.auth) == EtcdAuth: # noqa: E721 self._server = Etcd3Client( self.host, self.port, user=self.auth.username, password=self.auth.password, timeout=self.timeout ) diff --git a/pyaviso/engine/etcd_rest_engine.py b/pyaviso/engine/etcd_rest_engine.py index efb05e9..8a0c28b 100644 --- a/pyaviso/engine/etcd_rest_engine.py +++ b/pyaviso/engine/etcd_rest_engine.py @@ -120,7 +120,7 @@ def pull( f"Not able to pull key {key}, status {resp.status_code}, {resp.reason}, " f"{resp.content.decode()}" ) - # we got a good responce, exit from the loop + # we got a good response, exit from the loop break logger.debug(f"Query for {key} completed") @@ -247,7 +247,7 @@ def _authenticate(self) -> bool: This method authenticates the user and set the internal token, this is only done for Etcd authentication :return: True if successfully authenticated """ - if type(self.auth) == EtcdAuth: + if type(self.auth) == EtcdAuth: # noqa: E721 logger.debug(f"Authenticating user {self.auth.username}...") url = self._base_url + "auth/authenticate" @@ -304,7 +304,7 @@ def _latest_revision(self, key: str) -> int: f"{resp.content.decode()}" ) - # we got a good responce, exit from the loop + # we got a good response, exit from the loop break logger.debug("Query for latest revision completed") diff --git a/pyaviso/engine/file_based_engine.py b/pyaviso/engine/file_based_engine.py index 8c475c5..d57ddd6 100644 --- a/pyaviso/engine/file_based_engine.py +++ b/pyaviso/engine/file_based_engine.py @@ -15,7 +15,8 @@ from shutil import rmtree from typing import Dict, List -import pyinotify +from watchdog.events import FileSystemEventHandler +from watchdog.observers import Observer from .. import logger from ..authentication.auth import Auth @@ -192,7 +193,7 @@ def _polling( to_date: datetime = None, ): """ - This method implements the active polling + This method implements the active polling using watchdog :param key: key to watch as a prefix :param callback: function to call if any change happen :param channel: global communication channel among threads @@ -204,8 +205,9 @@ def _polling( logger.warning("from_date option is disabled in TestMode") if to_date: logger.warning("to_date option is disabled in TestMode") + try: - # first create the directory to watch + # Create the directory to watch if it doesn't exist if not os.path.exists(key): try: os.makedirs(key, exist_ok=True) @@ -214,50 +216,50 @@ def _polling( logger.debug("", exc_info=True) return False - # create a watch manager - wm = pyinotify.WatchManager() # Watch Manager - mask = pyinotify.IN_CLOSE_WRITE # watched events - - # define a class to handle the new events - class EventHandler(pyinotify.ProcessEvent): - def __init__(self, engine): - super(EventHandler, self).__init__() + # Define a class to handle the new events using watchdog + class EventHandler(FileSystemEventHandler): + def __init__(self, engine, debounce_time=0.5, polling_interval=1): + super().__init__() self._engine = engine + self._last_processed_time = 0 + self._debounce_time = debounce_time + self._polling_interval = polling_interval - def process_IN_CLOSE_WRITE(self, event): - if not os.path.isdir(event.pathname): - kvs = self._engine.pull(key=event.pathname) + def on_modified(self, event): + if not event.is_directory: + kvs = self._engine.pull(key=event.src_path) for kv in kvs: k = kv["key"] v = kv["value"].decode() - # skip the status if kv["key"].endswith("status"): continue logger.debug(f"Notification received for key {k}") try: - # execute the trigger callback(k, v) except Exception as ee: logger.error(f"Error with notification trigger, exception: {type(ee)} {ee}") logger.debug("", exc_info=True) + time.sleep(self._polling_interval) - # add the handler to the watch manager and define the watching task - handler = EventHandler(engine=self) - notifier = pyinotify.Notifier(wm, handler, read_freq=self._polling_interval) - wm.add_watch(key, mask, rec=True, auto_add=True) + # Set up the observer and event handler + event_handler = EventHandler(engine=self) + observer = Observer() + observer.schedule(event_handler, key, recursive=True) + observer.start() - # encapsulate the watcher in a daemon thread so we can stop it + # Daemon thread to allow stopping def t_run(): - notifier.loop() + observer.join() t = threading.Thread(target=t_run) t.setDaemon(True) t.start() - # this is the stop condition + # Stop condition while key in self._listeners: time.sleep(0.1) - notifier.stop() + observer.stop() + observer.join() except Exception as e: logger.error(f"Error while listening to key {key}, {e}") diff --git a/pyaviso/event_listeners/listener_schema_parser.py b/pyaviso/event_listeners/listener_schema_parser.py index 2fd29a1..7c03ac3 100644 --- a/pyaviso/event_listeners/listener_schema_parser.py +++ b/pyaviso/event_listeners/listener_schema_parser.py @@ -170,7 +170,7 @@ def _update_schema(self, mars_schema, evl_schema) -> Dict[str, any]: # search for all the enum keys in all event listener types and add the mars values to it logger.debug("Parsing mars language schema...") for e in evl_schema.items(): - if type(e[1]) == dict and e[1].get("request"): + if isinstance(e[1], dict) and e[1].get("request"): request = e[1].get("request") for k in request: for t in request[k]: @@ -180,7 +180,7 @@ def _update_schema(self, mars_schema, evl_schema) -> Dict[str, any]: # check the mars schema mars_enums = mars_schema["_field"][k]["values"] for me in mars_enums: - if type(me) == list: + if isinstance(me, list): for en in me: t["values"].append(en) else: diff --git a/pyaviso/service_config_manager.py b/pyaviso/service_config_manager.py index 9a31964..d2cb93a 100644 --- a/pyaviso/service_config_manager.py +++ b/pyaviso/service_config_manager.py @@ -169,7 +169,6 @@ def pull_and_save(self, service: str, directory: str, delete: bool) -> List[str] pulled_files = [] pulled_files_tmp = [] if len(kvs) > 0: - # First check if we need to delete the existing folder if delete and os.path.exists(directory): try: diff --git a/pyaviso/triggers/post_trigger.py b/pyaviso/triggers/post_trigger.py index a995d5c..3158c81 100644 --- a/pyaviso/triggers/post_trigger.py +++ b/pyaviso/triggers/post_trigger.py @@ -48,7 +48,7 @@ def __init__(self, notification: Dict, params: Dict): self.protocol = ProtocolType[protocol_params.get("type").lower()].get_class()(notification, protocol_params) def execute(self): - logger.info("Starting Post Trigger for (params.get('protocol'))...'") + logger.info("Starting Post Trigger...'") # execute the specific protocol self.protocol.execute() @@ -83,7 +83,6 @@ def __init__(self, notification: Dict, params: Dict): self.source = self.SOURCE_DEFAULT def execute(self): - # prepare the CloudEvents message attributes = { @@ -117,7 +116,7 @@ def execute(self): class PostCloudEventsAws: """ - This class implements a trigger in charge of translating the notification in a CloudEvents messag and send it to a + This class implements a trigger in charge of translating the notification in a CloudEvents message and send it to a AWS topic specified by the user. """ @@ -147,7 +146,6 @@ def __init__(self, notification: Dict, params: Dict): self.source = self.SOURCE_DEFAULT def execute(self): - # prepare the AWS topic message attributes = { diff --git a/pyaviso/triggers/trigger_factory.py b/pyaviso/triggers/trigger_factory.py index 2c688ad..ee3f197 100644 --- a/pyaviso/triggers/trigger_factory.py +++ b/pyaviso/triggers/trigger_factory.py @@ -18,7 +18,6 @@ class TriggerFactory: """ def create_trigger(self, notification: Dict[str, any], params: Dict[str, any]) -> Trigger: - assert "type" in params, "'type' is a mandatory field in trigger" # find specific trigger class trigger_type = TriggerType[params.get("type").lower()] diff --git a/pyaviso/user_config.py b/pyaviso/user_config.py index 38d9f98..1b0d696 100644 --- a/pyaviso/user_config.py +++ b/pyaviso/user_config.py @@ -67,19 +67,23 @@ def __init__( self.automatic_retry_delay = automatic_retry_delay def __str__(self): - config_string = ( - f"host: {self.host}" - + f", port: {self.port}" - + f", https: {self.https}" - + f", type: {self.type.name}" - + f", polling_interval: {self.polling_interval}" - + f", timeout: {self.timeout}" - + f", max_file_size: {self.max_file_size}" - + f", service: {self.service}" - + f", catchup: {self.catchup}" - + f", automatic_retry_delay: {self.automatic_retry_delay}" - ) - return config_string + config_items = [ + f"host: {self.host}", + f"port: {self.port}", + f"https: {self.https}", + f"type: {self.type.name}", + f"polling_interval: {self.polling_interval}", + f"timeout: {self.timeout}", + f"max_file_size: {self.max_file_size}", + f"service: {self.service}", + f"catchup: {self.catchup}", + f"automatic_retry_delay: {self.automatic_retry_delay}", + ] + config_string = "\n".join(config_items) + return f"Engine Configuration:\n{config_string}" + + def __repr__(self): + return f"{self.__class__.__name__}({self.__str__()})" class UserConfig: @@ -337,7 +341,6 @@ def _read_env_variables(self) -> Dict[str, any]: return config def logging_setup(self, logging_conf_path: str): - if logging_conf_path is not None: try: with open(logging_conf_path, "r") as f: @@ -430,7 +433,7 @@ def configuration_engine(self, configuration_engine: Dict[str, any]): assert "max_file_size" in ce, "configuration_engine max_file_size has not been configured" assert "timeout" in ce, "configuration_engine timeout has not been configured" assert "automatic_retry_delay" in ce, "configuration_engine automatic_retry_delay has not been configured" - if type(ce["https"]) is str: + if isinstance(ce["https"], str): ce["https"] = ce["https"].casefold() == "true".casefold() # exclude file_based from the options for the configuration engine @@ -461,7 +464,7 @@ def remote_schema(self) -> bool: @remote_schema.setter def remote_schema(self, remote_schema: str): self._remote_schema = self._configure_property(remote_schema, "remote_schema") - if type(self._remote_schema) is str: + if isinstance(self._remote_schema, str): self._remote_schema = self._remote_schema.casefold() == "true".casefold() @property @@ -519,7 +522,7 @@ def no_fail(self) -> bool: @no_fail.setter def no_fail(self, no_fail: str): self._no_fail = self._configure_property(no_fail, "no_fail") - if type(self._no_fail) is str: + if isinstance(self._no_fail, str): self._no_fail = self._no_fail.casefold() == "true".casefold() if self.no_fail: # define a function to run at exit @@ -536,7 +539,7 @@ def debug(self) -> bool: @debug.setter def debug(self, debug: any): self._debug = self._configure_property(debug, "debug") - if type(self._debug) is str: + if isinstance(self._debug, str): self._debug = self._debug.casefold() == "true".casefold() if self._debug: logging_level = logging.DEBUG @@ -557,7 +560,7 @@ def quiet(self) -> bool: @quiet.setter def quiet(self, quiet: any): self._quiet = self._configure_property(quiet, "quiet") - if type(self._quiet) is str: + if isinstance(self._quiet, str): self._quiet = self._quiet.casefold() == "true".casefold() if self._quiet: logging_level = logging.ERROR @@ -570,20 +573,24 @@ def quiet(self, quiet: any): pass def __str__(self): - config_string = ( - f"notification_engine: {self.notification_engine}" - + f", configuration_engine: {self.configuration_engine}" - + f", auth_type: {self.auth_type}" - + f", debug: {self.debug}" - + f", quiet: {self.quiet}" - + f", username: {self.username}" - + f", username_file: {self.username_file}" - + f", no_fail: {self.no_fail}" - + f", key_ttl: {self.key_ttl}" - + f", schema_parser: {self.schema_parser}" - + f", remote_schema: {self.remote_schema}" - ) - return config_string + config_items = [ + f"notification_engine: {self.notification_engine}", + f"configuration_engine: {self.configuration_engine}", + f"auth_type: {self.auth_type}", + f"debug: {self.debug}", + f"quiet: {self.quiet}", + f"username: {self.username}", + f"username_file: {self.username_file}", + f"no_fail: {self.no_fail}", + f"key_ttl: {self.key_ttl}", + f"schema_parser: {self.schema_parser}", + f"remote_schema: {self.remote_schema}", + ] + config_string = "\n".join(config_items) + return f"User Configuration:\n{config_string}" + + def __repr__(self): + return f"{self.__class__.__name__}({self.__str__()})" def _configure_default_log(self): try: diff --git a/pyaviso/version.py b/pyaviso/version.py index 5c9a452..923280c 100644 --- a/pyaviso/version.py +++ b/pyaviso/version.py @@ -1,2 +1,2 @@ # version number for the application -__version__ = "0.11.1" +__version__ = "1.0.0" diff --git a/pyproject.toml b/pyproject.toml index e34796e..1bc5fe2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,8 @@ [tool.black] -line-length = 120 \ No newline at end of file +line-length = 120 +exclude = ''' +/( + \. # Ignore directories starting with . + | venv # Ignore venv directory +)/ +''' \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 83fb9e5..8aa3d72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,10 +4,7 @@ PyYAML python-json-logger requests parse -pyinotify +watchdog==2.3.1 cloudevents>=1.2.0 boto3 - - - - +protobuf<=3.20.3 \ No newline at end of file diff --git a/tests/requirements-dev.txt b/tests/requirements-dev.txt index 57e38af..4d9461e 100644 --- a/tests/requirements-dev.txt +++ b/tests/requirements-dev.txt @@ -6,5 +6,4 @@ debugpy black isort flake8 -tox - +tox \ No newline at end of file diff --git a/tests/system/test_aviso.py b/tests/system/test_aviso.py index c416494..c71ff1f 100644 --- a/tests/system/test_aviso.py +++ b/tests/system/test_aviso.py @@ -11,6 +11,7 @@ import os import time from datetime import datetime +from pathlib import Path from shutil import rmtree import pytest @@ -28,8 +29,14 @@ aviso = NotificationManager() +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent + + def create_conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c @@ -62,7 +69,8 @@ def clear_environment(): @pytest.mark.parametrize("config", configs) @pytest.fixture(autouse=True) # this runs before and after every test -def pre_post_test(config): +def pre_post_test(config, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) # delete the revision state full_home_path = os.path.expanduser(HOME_FOLDER) full_state_path = os.path.join(full_home_path, LOCAL_STATE_FOLDER) @@ -120,7 +128,7 @@ def caplog_for_logger(caplog): # this is needed to assert over the logging outp lo.removeHandler(caplog.handler) -def reset_previuous_run(): +def reset_previous_run(): file_path = "tests/system/fixtures/received.txt" full_path = os.path.join(os.getcwd(), file_path) if os.path.exists(full_path): @@ -132,11 +140,12 @@ def reset_previuous_run(): @pytest.mark.parametrize("config", [c1, c2]) -def test_command_listener(config): +def test_command_listener(config, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) # delete previous file of notification - file_path = reset_previuous_run() + file_path = reset_previous_run() aviso._listen(config, ["tests/system/fixtures/listeners/command_listener.yaml"]) @@ -157,7 +166,7 @@ def test_command_listener(config): assert received # delete result of notification - file_path = reset_previuous_run() + file_path = reset_previous_run() @pytest.mark.parametrize("config", configs) @@ -250,7 +259,8 @@ def test_notify_ttl(config): @pytest.mark.skip @pytest.mark.parametrize("config", [c1]) -def test_history_on_server(config: user_config.UserConfig, caplog): +def test_history_on_server(config: user_config.UserConfig, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output config.notification_engine.host = "aviso.ecmwf.int" diff --git a/tests/system/test_aviso_config.py b/tests/system/test_aviso_config.py index eeb17c9..d50218a 100644 --- a/tests/system/test_aviso_config.py +++ b/tests/system/test_aviso_config.py @@ -9,6 +9,7 @@ import os import time from filecmp import dircmp +from pathlib import Path from shutil import rmtree import pytest @@ -24,8 +25,14 @@ config_folder_to_pull = "tests/system/fixtures/config_test_pull1" +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent + + def create_conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c @@ -39,7 +46,8 @@ def create_conf() -> user_config.UserConfig: # this automatically configure the @pytest.mark.parametrize("conf", confs) @pytest.fixture(autouse=True) # this runs before and after every test -def pre_post_test(conf): +def pre_post_test(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) # set environment os.environ["AVISO_CONFIG"] = "tests/config.yaml" yield @@ -55,7 +63,8 @@ def clear_environment(): @pytest.fixture(autouse=True) -def remove_test_svc(): +def remove_test_svc(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) yield result = CliRunner().invoke(cli, ["remove", test_svc, "-f"]) assert result.exit_code == 0 @@ -118,7 +127,8 @@ def test_help(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull(conf): +def test_push_and_pull(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["push", test_svc, "-D", config_folder_to_push1, "-m", "test configuration"]) @@ -149,13 +159,14 @@ def test_push_and_pull(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow1(conf): +def test_push_and_pull_workflow1(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set, then a small set with NO delete -> pulled folder is like the union of the larger and smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -193,12 +204,13 @@ def test_push_and_pull_workflow1(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow2(conf): +def test_push_and_pull_workflow2(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set, then a small set with delete -> pulled folder is like the smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -233,13 +245,14 @@ def test_push_and_pull_workflow2(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow3(conf): +def test_push_and_pull_workflow3(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set and pull, then push small set with delete and pull with NO delete -> pulled folder is like the union of larger and smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -288,13 +301,14 @@ def test_push_and_pull_workflow3(conf): @pytest.mark.parametrize("conf", confs) -def test_push_and_pull_workflow4(conf): +def test_push_and_pull_workflow4(conf, monkeypatch: pytest.MonkeyPatch): """ First push larger set and pull, then push small set with delete and pull with delete -> pulled folder is like the smaller set :param conf: :return: """ + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push larger set @@ -338,7 +352,8 @@ def test_push_and_pull_workflow4(conf): @pytest.mark.parametrize("conf", confs) -def test_remove(conf): +def test_remove(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -371,7 +386,8 @@ def test_remove(conf): @pytest.mark.parametrize("conf", confs) -def test_remove_doit(conf): +def test_remove_doit(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -400,7 +416,8 @@ def test_remove_doit(conf): @pytest.mark.parametrize("conf", confs) -def test_revert(conf): +def test_revert(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -455,7 +472,8 @@ def test_revert(conf): @pytest.mark.parametrize("conf", confs) -def test_status(conf): +def test_status(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() # First push @@ -470,7 +488,8 @@ def test_status(conf): @pytest.mark.parametrize("conf", confs) -def test_pull_nothing(conf): +def test_pull_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["pull", test_svc, "-D", config_folder_to_pull]) @@ -479,7 +498,8 @@ def test_pull_nothing(conf): @pytest.mark.parametrize("conf", confs) -def test_remove_nothing(conf): +def test_remove_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["remove", test_svc, "-f"]) @@ -488,7 +508,8 @@ def test_remove_nothing(conf): @pytest.mark.parametrize("conf", confs) -def test_revert_nothing(conf): +def test_revert_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["revert", test_svc]) @@ -497,7 +518,8 @@ def test_revert_nothing(conf): @pytest.mark.parametrize("conf", confs) -def test_status_nothing(conf): +def test_status_nothing(conf, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) runner = CliRunner() result = runner.invoke(cli, ["status", test_svc]) diff --git a/tests/unit/test_auth.py b/tests/unit/test_auth.py index 5dd2575..d523225 100644 --- a/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest @@ -16,7 +17,8 @@ def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c @@ -39,9 +41,9 @@ def test_auth_type(conf): auth2 = auth.Auth.get_auth(conf) conf.auth_type = "ecmwf" auth3 = auth.Auth.get_auth(conf) - assert type(auth1) == none_auth.NoneAuth - assert type(auth2) == etcd_auth.EtcdAuth - assert type(auth3) == ecmwf_auth.EcmwfAuth + assert isinstance(auth1, none_auth.NoneAuth) + assert isinstance(auth2, etcd_auth.EtcdAuth) + assert isinstance(auth3, ecmwf_auth.EcmwfAuth) @pytest.mark.skip # we cannot authenticate in this test setup diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py index 46548dc..0eba240 100644 --- a/tests/unit/test_cli.py +++ b/tests/unit/test_cli.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest from click.testing import CliRunner @@ -18,8 +19,9 @@ @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") - os.environ["AVISO_CONFIG"] = "tests/config.yaml" + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) + os.environ["AVISO_CONFIG"] = str(Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_etcd_engine.py b/tests/unit/test_etcd_engine.py index 3331577..4a585e5 100644 --- a/tests/unit/test_etcd_engine.py +++ b/tests/unit/test_etcd_engine.py @@ -13,6 +13,7 @@ import os import subprocess import time +from pathlib import Path from shutil import rmtree from threading import Thread @@ -26,14 +27,16 @@ def rest_engine(): # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = EtcdRestEngine(c.notification_engine, authenticator) return engine def grpc_engine(): # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = EtcdGrpcEngine(c.notification_engine, authenticator) return engine @@ -429,7 +432,6 @@ def test_find_compacted_revision(engine): @pytest.mark.parametrize("engine", engines) def test_push_with_lease(engine): - # submit a key expiring kvs = [{"key": "test/test0", "value": "0"}] assert engine.push_with_status(kvs, base_key="test/", message="test/test0", ttl=1) @@ -448,7 +450,6 @@ def test_push_with_lease(engine): @pytest.mark.parametrize("engine", engines) def test_status_as_linked_list(engine): - status0 = {"date_time": "2020-08-28T10:58:17.829Z"} kv0 = {"mod_rev": "100", "value": json.dumps(status0).encode()} diff --git a/tests/unit/test_event_listener_factory.py b/tests/unit/test_event_listener_factory.py index 5db45fd..a4badb2 100644 --- a/tests/unit/test_event_listener_factory.py +++ b/tests/unit/test_event_listener_factory.py @@ -8,6 +8,7 @@ import json import os +from pathlib import Path import pytest import yaml @@ -17,17 +18,19 @@ from pyaviso.engine import engine_factory as ef from pyaviso.event_listeners import event_listener_factory as elf +tests_path = Path(__file__).parent.parent + @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c @pytest.fixture() def schema(conf): # Load test schema - with open("tests/unit/fixtures/listener_schema.json") as schema: + with Path(tests_path / "unit/fixtures/listener_schema.json").open() as schema: return json.load(schema) @@ -38,7 +41,7 @@ def test_empty_file(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/empty.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/empty.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -54,7 +57,7 @@ def test_no_listeners(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/noListeners.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/noListeners.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -70,7 +73,7 @@ def test_bad_tree_structure(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badTree.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTree.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -86,7 +89,7 @@ def test_bad_attribute(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badAttribute.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badAttribute.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -102,7 +105,7 @@ def test_bad_format(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badFormat.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badFormat.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -118,7 +121,7 @@ def test_no_trigger(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/noTrigger.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/noTrigger.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -134,7 +137,7 @@ def test_bad_trigger_type(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badTriggerType.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTriggerType.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -150,7 +153,7 @@ def test_bad_trigger(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/bad_listeners/badTrigger.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/bad_listeners/badTrigger.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it try: @@ -166,7 +169,7 @@ def test_single_listener_complete(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/complete_flight_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/complete_flight_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -183,7 +186,7 @@ def test_single_listener(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/basic_flight_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/basic_flight_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -200,7 +203,7 @@ def test_multiple_listener(conf: user_config.UserConfig, schema): engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator) listener_factory = elf.EventListenerFactory(engine_factory, schema) # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/multiple_flight_listeners.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_flight_listeners.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) diff --git a/tests/unit/test_file_based_engine.py b/tests/unit/test_file_based_engine.py index 25d4292..cca6ee8 100644 --- a/tests/unit/test_file_based_engine.py +++ b/tests/unit/test_file_based_engine.py @@ -8,6 +8,7 @@ import os import time +from pathlib import Path from shutil import rmtree import pytest @@ -20,7 +21,8 @@ @pytest.fixture() def test_engine(): # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) authenticator = auth.Auth.get_auth(c) engine = FileBasedEngine(c.notification_engine, authenticator) return engine @@ -38,7 +40,7 @@ def pre_post_test(test_engine): pass yield # delete all the keys at the end of the test - test_engine.delete("/tmp/aviso/test") + # test_engine.delete("/tmp/aviso/test") test_engine.stop() @@ -72,35 +74,33 @@ def test_listen(test_engine): callback_list = [] def callback(key, value): + logger.debug(f"Callback triggered for key: {key}") callback_list.append(1) - # listen to a test key + # Listen to a test key assert test_engine.listen(["/tmp/aviso/test"], callback) - # wait a fraction and check the function has been triggered - time.sleep(0.5) + time.sleep(0.5) # Increased wait time - # create independent change to the test key to trigger the notification + # Create independent change to the test key to trigger the notification kvs = [{"key": "/tmp/aviso/test/test1", "value": "1"}] assert test_engine.push(kvs) - # wait a fraction and check the function has been triggered - time.sleep(1) + time.sleep(1) # Increased wait time assert len(callback_list) == 1 - # repeat the push operation + # Repeat the push operation kvs = [{"key": "/tmp/aviso/test/test1", "value": "2"}] assert test_engine.push(kvs) - # wait a fraction and check the function has been triggered - time.sleep(1) + time.sleep(1) # Increased wait time assert len(callback_list) == 2 - # stop listening + # Stop listening resp = test_engine.stop() assert resp - # repeat the push operation - kvs = [{"key": "/tmp/aviso/test/test1", "value": "2"}] + # Repeat the push operation + kvs = [{"key": "/tmp/aviso/test/test1", "value": "3"}] assert test_engine.push(kvs) - # wait a fraction and check the function has NOT been triggered + # Wait and check that the function has NOT been triggered time.sleep(1) assert len(callback_list) == 2 diff --git a/tests/unit/test_listener_manager.py b/tests/unit/test_listener_manager.py index 18af2e3..7052dd3 100644 --- a/tests/unit/test_listener_manager.py +++ b/tests/unit/test_listener_manager.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest @@ -20,7 +21,8 @@ @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c diff --git a/tests/unit/test_triggers.py b/tests/unit/test_triggers.py index fd981ad..7b24fa4 100644 --- a/tests/unit/test_triggers.py +++ b/tests/unit/test_triggers.py @@ -10,6 +10,7 @@ import logging import os import time +from pathlib import Path from threading import Thread import pytest @@ -22,10 +23,18 @@ from pyaviso.event_listeners import event_listener_factory as elf from pyaviso.event_listeners.listener_schema_parser import ListenerSchemaParser +tests_path = Path(__file__).parent.parent + + +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent + @pytest.fixture() def conf() -> user_config.UserConfig: # this automatically configure the logging - c = user_config.UserConfig(conf_path="tests/config.yaml") + tests_path = Path(__file__).parent.parent + c = user_config.UserConfig(conf_path=Path(tests_path / "config.yaml")) return c @@ -50,12 +59,12 @@ def caplog_for_logger(caplog): # this is needed to assert over the logging outp lo.removeHandler(caplog.handler) -def test_echo_trigger(conf, listener_factory, caplog): +def test_echo_trigger(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output - # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/echo_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -77,7 +86,8 @@ def test_echo_trigger(conf, listener_factory, caplog): assert "Echo Trigger completed" in caplog.text -def test_function_trigger(conf, listener_factory, caplog): +def test_function_trigger(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) # create a list that increments every time there is a new event trigger_list = [] @@ -103,12 +113,12 @@ def trigger_function(notification): assert trigger_list.__len__() == 1 -def test_logger_listener(conf, listener_factory, caplog): +def test_logger_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output - # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/log_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/log_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -133,12 +143,12 @@ def test_logger_listener(conf, listener_factory, caplog): os.remove("testLog.log") -def test_command_listener(conf, listener_factory, caplog): +def test_command_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output - # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -155,11 +165,12 @@ def test_command_listener(conf, listener_factory, caplog): assert "Command Trigger completed" in caplog.text -def test_command_json_listener(conf, listener_factory, caplog): +def test_command_json_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_json_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_json_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -176,11 +187,12 @@ def test_command_json_listener(conf, listener_factory, caplog): assert "Command Trigger completed" in caplog.text -def test_command_json_path_listener(conf, listener_factory, caplog): +def test_command_json_path_listener(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_json_path_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_json_path_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -213,7 +225,7 @@ def test_post_cloudEventshttp_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/post_cloudEventsHttp_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsHttp_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -241,7 +253,9 @@ def test_post_cloudeventsaws_listener(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/post_cloudEventsAws_fifo_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/post_cloudEventsAws_fifo_listener.yaml").open( + mode="r" + ) as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -268,7 +282,7 @@ def test_multiple_nots_echo(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/echo_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/echo_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -287,11 +301,12 @@ def test_multiple_nots_echo(conf, listener_factory, caplog): assert caplog.text.count("Echo Trigger completed") == n_nots -def test_multiple_nots_cmd(conf, listener_factory, caplog): +def test_multiple_nots_cmd(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/command_listener.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/command_listener.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -314,7 +329,7 @@ def test_multiple_listeners(conf, listener_factory, caplog): logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/multiple_listeners.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_listeners.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) @@ -331,11 +346,12 @@ def test_multiple_listeners(conf, listener_factory, caplog): assert caplog.text.count("Echo Trigger completed") == 3 -def test_multiple_triggers(conf, listener_factory, caplog): +def test_multiple_triggers(conf, listener_factory, caplog, monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) with caplog_for_logger(caplog): # this allows to assert over the logging output # open the listener yaml file - with open("tests/unit/fixtures/good_listeners/multiple_triggers.yaml", "r") as f: + with Path(tests_path / "unit/fixtures/good_listeners/multiple_triggers.yaml").open(mode="r") as f: listeners_dict = yaml.safe_load(f.read()) # parse it listeners: list = listener_factory.create_listeners(listeners_dict) diff --git a/tests/unit/test_user_config.py b/tests/unit/test_user_config.py index d92486b..aa433e5 100644 --- a/tests/unit/test_user_config.py +++ b/tests/unit/test_user_config.py @@ -7,6 +7,7 @@ # nor does it submit to any jurisdiction. import os +from pathlib import Path import pytest @@ -16,7 +17,13 @@ from pyaviso.event_listeners.listener_schema_parser import ListenerSchemaParserType from pyaviso.user_config import KEY_FILE, UserConfig -test_config_folder = "tests/unit/fixtures/" +tests_path = Path(__file__).parent.parent +test_config_folder = str(Path(tests_path / "unit/fixtures/")) + + +def base_path() -> Path: + """Get the current folder of the test""" + return Path(__file__).parent.parent.parent @pytest.fixture(autouse=True) @@ -124,7 +131,8 @@ def clear_environment(): pass -def test_default(): +def test_default(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) c = UserConfig._create_default_config() assert not c["debug"] @@ -155,9 +163,10 @@ def test_default(): assert not c["remote_schema"] -def test_config_file(): +def test_config_file(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) - c = UserConfig(conf_path=test_config_folder + "config.yaml") + c = UserConfig(conf_path=test_config_folder + "/config.yaml") assert c.debug assert c.notification_engine.polling_interval == 1 assert c.notification_engine.type == EngineType.ETCD_GRPC @@ -185,9 +194,10 @@ def test_config_file(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_config_file_with_ev(): +def test_config_file_with_ev(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) - os.environ["AVISO_CONFIG"] = test_config_folder + "config.yaml" + os.environ["AVISO_CONFIG"] = test_config_folder + "/config.yaml" c = UserConfig() assert c.debug assert c.notification_engine.polling_interval == 1 @@ -216,7 +226,8 @@ def test_config_file_with_ev(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_env_variables(): +def test_env_variables(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) os.environ["AVISO_NOTIFICATION_HOST"] = "test_env" os.environ["AVISO_NOTIFICATION_PORT"] = "3" @@ -273,7 +284,8 @@ def test_env_variables(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_env_variables_with_config_file(): +def test_env_variables_with_config_file(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) os.environ["AVISO_NOTIFICATION_HOST"] = "test_env" os.environ["AVISO_NOTIFICATION_PORT"] = "3" @@ -300,7 +312,7 @@ def test_env_variables_with_config_file(): os.environ["AVISO_SCHEMA_PARSER"] = "generic" # create a config with the configuration file but the environment variables take priority - c = UserConfig(conf_path=test_config_folder + "config.yaml") + c = UserConfig(conf_path=test_config_folder + "/config.yaml") assert not c.debug assert c.notification_engine.polling_interval == 3 @@ -329,7 +341,8 @@ def test_env_variables_with_config_file(): assert c.schema_parser == ListenerSchemaParserType.GENERIC -def test_constructor(): +def test_constructor(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) # create a config with passing the configuration file as well as the parameters. The parameters will take priority notification_engine = { @@ -354,7 +367,7 @@ def test_constructor(): } c = UserConfig( - conf_path=test_config_folder + "config.yaml", + conf_path=test_config_folder + "/config.yaml", notification_engine=notification_engine, configuration_engine=configuration_engine, debug=False, @@ -394,7 +407,8 @@ def test_constructor(): assert c.schema_parser == ListenerSchemaParserType.ECMWF -def test_constructor_with_env_var(): +def test_constructor_with_env_var(monkeypatch: pytest.MonkeyPatch): + monkeypatch.chdir(base_path()) logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0]) os.environ["AVISO_NOTIFICATION_HOST"] = "test_env" os.environ["AVISO_NOTIFICATION_PORT"] = "3" @@ -444,7 +458,7 @@ def test_constructor_with_env_var(): } c = UserConfig( - conf_path=test_config_folder + "config.yaml", + conf_path=test_config_folder + "/config.yaml", notification_engine=notification_engine, configuration_engine=configuration_engine, debug=False, diff --git a/tests/unit/test_validation.py b/tests/unit/test_validation.py index 91ecdde..d88d5ff 100644 --- a/tests/unit/test_validation.py +++ b/tests/unit/test_validation.py @@ -28,7 +28,7 @@ def test_int_handler(): try: validator = IntHandler(key="test", **schema) except TypeError as e: - assert e.args[0] == "__init__() got an unexpected keyword argument 'error'" + assert e.args[0].split()[-3:] == "IntHandler.__init__() got an unexpected keyword argument 'error'".split()[-3:] schema = {"range": [0, 20], "canonic": "{0:0>4}"} diff --git a/tox.ini b/tox.ini index 93f7fbb..83f780d 100644 --- a/tox.ini +++ b/tox.ini @@ -1,17 +1,17 @@ [flake8] max-line-length = 120 show-source = true -exclude = .* +exclude = .*,venv extend-ignore = E203 [isort] profile=black -skip_glob=.* +skip = venv +skip_glob = .* [tox] envlist = py36, quality [testenv] -deps = - pytest -passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY +deps = pytest +passenv = http_proxy,HTTP_PROXY,https_proxy,HTTPS_PROXY,no_proxy,NO_PROXY commands = pip install -e aviso-server/monitoring pip install -e aviso-server/rest @@ -26,5 +26,5 @@ deps = flake8 commands = isort --check . - black --check . + black --check --exclude='/(\..*|venv)/' . flake8 . \ No newline at end of file