diff --git a/jobbergate-cli/CHANGELOG.md b/jobbergate-cli/CHANGELOG.md index 92525f949..da61f2e5a 100644 --- a/jobbergate-cli/CHANGELOG.md +++ b/jobbergate-cli/CHANGELOG.md @@ -4,6 +4,9 @@ This file keeps track of all notable changes to jobbergate-cli ## Unreleased +- Enabled auto-login on the CLI [ASP-4779] + - Replaced authentication functionality by the one from `jobbergate-core` which was already able to handle OIDC authentication at request time + - Added custom error handling for `AuthenticationError` ## 5.2.0 -- 2024-07-01 - Change pydantic.BaseSettings config to use `extra=ignore` diff --git a/jobbergate-cli/jobbergate_cli/auth.py b/jobbergate-cli/jobbergate_cli/auth.py index e5465790c..56b9d45a6 100644 --- a/jobbergate-cli/jobbergate_cli/auth.py +++ b/jobbergate-cli/jobbergate_cli/auth.py @@ -3,267 +3,15 @@ """ import webbrowser -from time import sleep -from typing import Dict, Optional, cast +from typing import Iterable -from jose import jwt -from jose.exceptions import ExpiredSignatureError +from jobbergate_core.auth.handler import DeviceCodeData from loguru import logger -from pydantic import ValidationError from rich.console import Console +from rich.progress import track -from jobbergate_cli.config import settings -from jobbergate_cli.exceptions import Abort, JobbergateCliError from jobbergate_cli.render import terminal_message -from jobbergate_cli.requests import make_request -from jobbergate_cli.schemas import DeviceCodeData, IdentityData, JobbergateContext, Persona, TokenSet -from jobbergate_cli.text_tools import copy_to_clipboard, unwrap -from jobbergate_cli.time_loop import TimeLoop - - -def validate_token_and_extract_identity(token_set: TokenSet) -> IdentityData: - """ - Validate the access_token from a TokenSet and extract the user's identity data. - - Validations: - * Checks if access_token is not empty. - * Checks timestamp on the access token. - * Checks that the client_id is present - * Checks that email is present - - Reports an error in the logs and to the user if there is an issue with the access_token. - """ - logger.debug("Validating access token") - - token_file_is_empty = not token_set.access_token - if token_file_is_empty: - logger.debug("Access token file exists but it is empty") - raise Abort( - """ - Access token file exists but it is empty. - - Please try logging in again. - """, - subject="Empty access token file", - support=True, - log_message="Empty access token file", - sentry_context=dict(access_token=dict(access_token=token_set.access_token)), - ) - - try: - token_data = jwt.decode( - token_set.access_token, - None, - options=dict( - verify_signature=False, - verify_aud=False, - verify_exp=True, - ), - ) - except ExpiredSignatureError: - raise # Will be handled in calling context - except Exception as err: - raise Abort( - """ - There was an unknown error while validating the access token. - - Please try logging in again. - """, - subject="Invalid access token", - support=True, - log_message=f"Unknown error while validating access access token: {err}", - sentry_context=dict(access_token=dict(access_token=token_set.access_token)), - original_error=err, - ) - - logger.debug("Extracting identity data from the access token") - try: - identity = IdentityData( - email=token_data.get("email"), - client_id=token_data.get("azp"), - ) - except ValidationError as err: - raise Abort( - """ - There was an error extracting the user's identity from the access token. - - Please try logging in again. - """, - subject="Missing user data", - support=True, - log_message=f"Token data could not be extracted to identity: {err}", - original_error=err, - ) - - if settings.MULTI_TENANCY_ENABLED: - try: - org_dict = token_data["organization"] - JobbergateCliError.require_condition( - len(org_dict) == 1, - message="Did not find exactly one organization in token payload", - ) - identity.organization_id = list(org_dict.keys())[0] - except Exception as err: - raise Abort( - """ - The access token is invalid. - - Please try logging in again. - """, - subject="Invalid access token", - support=True, - log_message="Organization payload required in multi-tenancy mode is malformed ", - sentry_context=dict(token_data=token_data), - original_error=err, - ) - - return identity - - -def load_tokens_from_cache() -> TokenSet: - """ - Loads an access token (and a refresh token if one exists) from the cache. - """ - - # Make static type checkers happy - assert settings.JOBBERGATE_ACCESS_TOKEN_PATH is not None - assert settings.JOBBERGATE_REFRESH_TOKEN_PATH is not None - - Abort.require_condition( - settings.JOBBERGATE_ACCESS_TOKEN_PATH.exists(), - "Please login with your auth token first using the `jobbergate login` command", - raise_kwargs=dict(subject="You need to login"), - ) - - logger.debug("Retrieving access token from cache") - token_set = TokenSet(access_token=settings.JOBBERGATE_ACCESS_TOKEN_PATH.read_text()) - - if settings.JOBBERGATE_REFRESH_TOKEN_PATH.exists(): - logger.debug("Retrieving refresh token from cache") - token_set.refresh_token = settings.JOBBERGATE_REFRESH_TOKEN_PATH.read_text() - - return token_set - - -def save_tokens_to_cache(token_set: TokenSet): - """ - Saves tokens from a token_set to the cache. - """ - - # Make static type checkers happy - assert settings.JOBBERGATE_ACCESS_TOKEN_PATH is not None - assert settings.JOBBERGATE_REFRESH_TOKEN_PATH is not None - - logger.debug(f"Caching access token at {settings.JOBBERGATE_ACCESS_TOKEN_PATH}") - settings.JOBBERGATE_ACCESS_TOKEN_PATH.write_text(token_set.access_token) - settings.JOBBERGATE_ACCESS_TOKEN_PATH.chmod(0o600) - - if token_set.refresh_token is not None: - logger.debug(f"Caching refresh token at {settings.JOBBERGATE_REFRESH_TOKEN_PATH}") - settings.JOBBERGATE_REFRESH_TOKEN_PATH.write_text(token_set.refresh_token) - settings.JOBBERGATE_REFRESH_TOKEN_PATH.chmod(0o600) - - -def clear_token_cache(): - """ - Clears the token cache. - """ - logger.debug("Clearing cached tokens") - - logger.debug(f"Removing access token at {settings.JOBBERGATE_ACCESS_TOKEN_PATH}") - if settings.JOBBERGATE_ACCESS_TOKEN_PATH.exists(): - settings.JOBBERGATE_ACCESS_TOKEN_PATH.unlink() - - logger.debug(f"Removing refresh token at {settings.JOBBERGATE_REFRESH_TOKEN_PATH}") - if settings.JOBBERGATE_REFRESH_TOKEN_PATH.exists(): - settings.JOBBERGATE_REFRESH_TOKEN_PATH.unlink() - - -def init_persona(ctx: JobbergateContext, token_set: Optional[TokenSet] = None): - """ - Initializes the "persona" which contains the tokens and email address for a user. - - Retrieves the access token for the user from the cache. - - Token is retrieved from the cache, validated, and user email is extracted. - - If the access token is expired, a new one will be acquired via the cached refresh token (if there is one). - - Saves token_set to cache. - - Returns the persona. - """ - if token_set is None: - token_set = load_tokens_from_cache() - - try: - identity_data = validate_token_and_extract_identity(token_set) - except ExpiredSignatureError: - Abort.require_condition( - token_set.refresh_token is not None, - "The auth token is expired. Please retrieve a new and log in again.", - raise_kwargs=dict( - subject="Expired access token", - support=True, - ), - ) - - logger.debug("The access token is expired. Attempting to refresh token") - refresh_access_token(ctx, token_set) - identity_data = validate_token_and_extract_identity(token_set) - - logger.debug(f"Persona created with identity_data: {identity_data}") - - save_tokens_to_cache(token_set) - - return Persona( - token_set=token_set, - identity_data=identity_data, - ) - - -def refresh_access_token(ctx: JobbergateContext, token_set: TokenSet): - """ - Attempt to fetch a new access token given a refresh token in a token_set. - - Sets the access token in-place. - - If refresh fails, notify the user that they need to log in again. - """ - url = f"https://{settings.OIDC_DOMAIN}/protocol/openid-connect/token" - logger.debug(f"Requesting refreshed access token from {url}") - - JobbergateCliError.require_condition( - ctx.client is not None, - "Attempted to refresh with a null client. This should not happen", - ) - - # Make static type-checkers happy - assert ctx.client is not None - - refreshed_token_set = cast( - TokenSet, - make_request( - # Can this even work? this client should be for the armada api... - ctx.client, - "/protocol/openid-connect/token", - "POST", - abort_message="The auth token could not be refreshed. Please try logging in again.", - abort_subject="EXPIRED ACCESS TOKEN", - support=True, - response_model_cls=TokenSet, - data=dict( - client_id=settings.OIDC_CLIENT_ID, - audience=settings.OIDC_AUDIENCE, - grant_type="refresh_token", - refresh_token=token_set.refresh_token, - ), - ), - ) - - token_set.access_token = refreshed_token_set.access_token - if refreshed_token_set.refresh_token is not None: - token_set.refresh_token = refreshed_token_set.refresh_token +from jobbergate_cli.text_tools import copy_to_clipboard def open_on_browser(url: str) -> bool: @@ -279,11 +27,14 @@ def open_on_browser(url: str) -> bool: return False -def show_login_message(verification_uri: str): +def show_login_message(device_code_data: DeviceCodeData): """Show a message to the user with a link to the auth provider to login.""" console = Console() EXTRA_CHARS = 7 # for indentation and panel borders + verification_uri = device_code_data.verification_uri_complete + waiting_time = int(device_code_data.expires_in / 60) + kwargs = {} if open_on_browser(verification_uri): @@ -292,18 +43,18 @@ def show_login_message(verification_uri: str): kwargs["footer"] = "The output was copied to your clipboard" if console.width >= len(verification_uri) + EXTRA_CHARS: - _show_login_standard_screen(verification_uri, **kwargs) + _show_login_standard_screen(verification_uri, waiting_time, **kwargs) else: - _show_login_narrow_screen(verification_uri, console, **kwargs) + _show_login_narrow_screen(verification_uri, waiting_time, console, **kwargs) -def _show_login_narrow_screen(verification_uri: str, console: Console, **kwargs): +def _show_login_narrow_screen(verification_uri: str, waiting_time: int, console: Console, **kwargs): """Print the link out of the panel to make it easier to copy.""" terminal_message( f""" To complete login, please open the link bellow in a browser. - Waiting up to {settings.OIDC_MAX_POLL_TIME / 60} minutes for you to complete the process... + Waiting up to {waiting_time} minutes for you to complete the process... """, subject="Waiting for login", **kwargs, @@ -312,7 +63,7 @@ def _show_login_narrow_screen(verification_uri: str, console: Console, **kwargs) console.print() -def _show_login_standard_screen(verification_uri: str, **kwargs): +def _show_login_standard_screen(verification_uri: str, waiting_time: int, **kwargs): """Print a rich panel with a link to the auth provider to login.""" terminal_message( f""" @@ -320,85 +71,13 @@ def _show_login_standard_screen(verification_uri: str, **kwargs): {verification_uri} - Waiting up to {settings.OIDC_MAX_POLL_TIME / 60} minutes for you to complete the process... + Waiting up to {waiting_time} minutes for you to complete the process... """, subject="Waiting for login", **kwargs, ) -def fetch_auth_tokens(ctx: JobbergateContext) -> TokenSet: - """ - Fetch an access token (and possibly a refresh token) from Auth0. - - Prints out a URL for the user to use to authenticate and polls the token endpoint to fetch it when - the browser-based process finishes - """ - # Make static type-checkers happy - assert ctx.client is not None - - device_code_data = cast( - DeviceCodeData, - make_request( - ctx.client, - "/protocol/openid-connect/auth/device", - "POST", - expected_status=200, - abort_message="There was a problem retrieving a device verification code from the auth provider", - abort_subject="COULD NOT RETRIEVE TOKEN", - support=True, - response_model_cls=DeviceCodeData, - data=dict( - client_id=settings.OIDC_CLIENT_ID, - grant_type="client_credentials", - audience=settings.OIDC_AUDIENCE, - ), - ), - ) - show_login_message(device_code_data.verification_uri_complete) - - for tick in TimeLoop( - settings.OIDC_MAX_POLL_TIME, - message="Waiting for web login", - ): - response_data = cast( - Dict, - make_request( - ctx.client, - "/protocol/openid-connect/token", - "POST", - abort_message="There was a problem retrieving a device verification code from the auth provider", - abort_subject="COULD NOT FETCH ACCESS TOKEN", - support=True, - data=dict( - grant_type="urn:ietf:params:oauth:grant-type:device_code", - device_code=device_code_data.device_code, - client_id=settings.OIDC_CLIENT_ID, - ), - ), - ) - if "error" in response_data: - if response_data["error"] == "authorization_pending": - logger.debug(f"Token fetch attempt #{tick.counter} failed") - sleep(device_code_data.interval) - else: - # TODO: Test this failure condition - raise Abort( - unwrap( - """ - There was a problem retrieving a device verification code from the auth provider: - Unexpected failure retrieving access token. - """ - ), - subject="Unexpected error", - support=True, - log_message=f"Unexpected error response: {response_data}", - ) - else: - return TokenSet(**response_data) - - raise Abort( - "Login process was not completed in time. Please try again.", - subject="Timed out", - log_message="Timed out while waiting for user to complete login", - ) +def track_login_progress(iterable: Iterable) -> Iterable: + """Track the progress of the login process on a progress bar.""" + return track(iterable, description="[green]Waiting for web login...", update_period=1, transient=True) diff --git a/jobbergate-cli/jobbergate_cli/config.py b/jobbergate-cli/jobbergate_cli/config.py index 968846495..da78dd393 100644 --- a/jobbergate-cli/jobbergate_cli/config.py +++ b/jobbergate-cli/jobbergate_cli/config.py @@ -7,9 +7,8 @@ from sys import exit from typing import Optional -from pydantic import Field, ValidationError, model_validator +from pydantic import Field, ValidationError, computed_field, field_validator from pydantic_settings import BaseSettings, SettingsConfigDict -from typing_extensions import Self from jobbergate_cli import constants from jobbergate_cli.constants import OV_CONTACT @@ -60,16 +59,6 @@ class Settings(BaseSettings): JOBBERGATE_AWS_SECRET_ACCESS_KEY: Optional[str] = None JOBBERGATE_S3_LOG_BUCKET: str = Field("jobbergate-cli-logs") - # Computed values. Listed as Optional, but will *always* be set (or overridden) based on other values - JOBBERGATE_APPLICATION_MODULE_PATH: Optional[Path] = None - JOBBERGATE_APPLICATION_CONFIG_PATH: Optional[Path] = None - JOBBERGATE_LOG_PATH: Optional[Path] = None - JOBBERGATE_USER_TOKEN_DIR: Optional[Path] = None - JOBBERGATE_ACCESS_TOKEN_PATH: Optional[Path] = None - JOBBERGATE_REFRESH_TOKEN_PATH: Optional[Path] = None - - JOBBERGATE_CLUSTER_LIST_PATH: Optional[Path] = None - # Compatibility mode: If True, add commands as they appear in the legacy app JOBBERGATE_COMPATIBILITY_MODE: Optional[bool] = False JOBBERGATE_LEGACY_NAME_CONVENTION: Optional[bool] = False @@ -79,36 +68,39 @@ class Settings(BaseSettings): OIDC_AUDIENCE: str OIDC_CLIENT_ID: str OIDC_USE_HTTPS: bool = True - OIDC_MAX_POLL_TIME: int = 5 * 60 # 5 Minutes - - # Enable multi-tenancy to fix cluster name mapping by client_id - MULTI_TENANCY_ENABLED: bool = False - @model_validator(mode="after") - def compute_extra_settings(self) -> Self: + @field_validator("JOBBERGATE_CACHE_DIR", mode="after") + def _validate_cache_dir(cls, value: Path) -> Path: """ - Compute settings values that are based on other settings values. + Expand, resolve, and create cache directory. """ - self.JOBBERGATE_CACHE_DIR = Path(self.JOBBERGATE_CACHE_DIR).expanduser().resolve() - cache_dir = self.JOBBERGATE_CACHE_DIR - cache_dir.mkdir(exist_ok=True, parents=True) + value = value.expanduser().resolve() + value.mkdir(exist_ok=True, parents=True) + return value - self.JOBBERGATE_APPLICATION_MODULE_PATH = cache_dir / constants.JOBBERGATE_APPLICATION_MODULE_FILE_NAME - self.JOBBERGATE_APPLICATION_CONFIG_PATH = cache_dir / constants.JOBBERGATE_APPLICATION_CONFIG_FILE_NAME + @computed_field + def JOBBERGATE_USER_TOKEN_DIR(self) -> Path: + token_dir = self.JOBBERGATE_CACHE_DIR / "token" + token_dir.mkdir(exist_ok=True, parents=True) + return token_dir - log_dir = cache_dir / "logs" - log_dir.mkdir(exist_ok=True, parents=True) - self.JOBBERGATE_LOG_PATH = log_dir / "jobbergate-cli.log" + @computed_field + def JOBBERGATE_LOG_PATH(self) -> Path: + log_file = self.JOBBERGATE_CACHE_DIR / "logs" / "jobbergate-cli.log" + log_file.parent.mkdir(exist_ok=True, parents=True) + return log_file - token_dir = cache_dir / "token" - token_dir.mkdir(exist_ok=True, parents=True) - self.JOBBERGATE_USER_TOKEN_DIR = token_dir - self.JOBBERGATE_ACCESS_TOKEN_PATH = token_dir / "access.token" - self.JOBBERGATE_REFRESH_TOKEN_PATH = token_dir / "refresh.token" + @computed_field + def JOBBERGATE_APPLICATION_MODULE_PATH(self) -> Path: + return self.JOBBERGATE_CACHE_DIR / constants.JOBBERGATE_APPLICATION_MODULE_FILE_NAME - self.JOBBERGATE_CLUSTER_LIST_PATH = cache_dir / "clusters.json" + @computed_field + def JOBBERGATE_APPLICATION_CONFIG_PATH(self) -> Path: + return self.JOBBERGATE_CACHE_DIR / constants.JOBBERGATE_APPLICATION_CONFIG_FILE_NAME - return self + @computed_field + def JOBBERGATE_CLUSTER_LIST_PATH(self) -> Path: + return self.JOBBERGATE_CACHE_DIR / "clusters.json" @property def is_onsite_mode(self) -> bool: diff --git a/jobbergate-cli/jobbergate_cli/exceptions.py b/jobbergate-cli/jobbergate_cli/exceptions.py index 86501fb10..8cf4ea22f 100644 --- a/jobbergate-cli/jobbergate_cli/exceptions.py +++ b/jobbergate-cli/jobbergate_cli/exceptions.py @@ -12,6 +12,8 @@ from rich.console import Console from rich.panel import Panel +from jobbergate_core.auth import AuthenticationError + from jobbergate_cli.config import settings from jobbergate_cli.constants import OV_CONTACT from jobbergate_cli.text_tools import dedent, unwrap @@ -68,7 +70,7 @@ def handle_abort(func): @wraps(func) def wrapper(*args, **kwargs): try: - func(*args, **kwargs) + return func(*args, **kwargs) except Abort as err: if not err.warn_only: if err.log_message is not None: @@ -105,3 +107,28 @@ def wrapper(*args, **kwargs): raise typer.Exit(code=1) return wrapper + + +def handle_authentication_error(func): + """ + Adapter decorator that catches AuthenticationError exceptions and raises an appropriate Abort error. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except AuthenticationError as err: + subject = "Authentication error" + original_exception = err.__cause__ + if original_exception is not None: + original_exception_name = type(original_exception).__name__ + subject += f" -- {original_exception_name}" + raise Abort( + f"{err.message}\n\nPlease check your credentials and try again.", + subject=subject, + support=True, + original_error=err, + ) from err + + return wrapper diff --git a/jobbergate-cli/jobbergate_cli/main.py b/jobbergate-cli/jobbergate_cli/main.py index 658cede7a..a6cffd1e3 100644 --- a/jobbergate-cli/jobbergate_cli/main.py +++ b/jobbergate-cli/jobbergate_cli/main.py @@ -2,20 +2,19 @@ Provide main entry point for the Jobbergate CLI App. """ -from typing import Optional +import sys import httpx import importlib_metadata -import jose import typer +from jobbergate_core.auth.handler import JobbergateAuthHandler - -from jobbergate_cli.auth import clear_token_cache, fetch_auth_tokens, init_persona, load_tokens_from_cache +from jobbergate_cli.auth import show_login_message, track_login_progress from jobbergate_cli.config import settings -from jobbergate_cli.exceptions import Abort, handle_abort +from jobbergate_cli.exceptions import Abort, handle_abort, handle_authentication_error from jobbergate_cli.logging import init_logs, init_sentry from jobbergate_cli.render import render_demo, render_json, terminal_message -from jobbergate_cli.schemas import JobbergateContext, Persona, TokenSet +from jobbergate_cli.schemas import JobbergateContext from jobbergate_cli.subapps.applications.app import app as applications_app from jobbergate_cli.subapps.job_scripts.app import app as job_scripts_app from jobbergate_cli.subapps.job_submissions.app import app as job_submissions_app @@ -36,7 +35,6 @@ @app.callback(invoke_without_command=True) -@handle_abort def main( ctx: typer.Context, verbose: bool = typer.Option(False, help="Enable verbose logging to the terminal"), @@ -68,62 +66,52 @@ def main( init_logs(verbose=verbose) init_sentry() - persona = None protocol = "https" if settings.OIDC_USE_HTTPS else "http" - domain = settings.OIDC_DOMAIN - client = httpx.Client( - base_url=f"{protocol}://{domain}", - headers={"content-type": "application/x-www-form-urlencoded"}, - timeout=settings.JOBBERGATE_REQUESTS_TIMEOUT, + authentication_handler = JobbergateAuthHandler( + cache_directory=settings.JOBBERGATE_USER_TOKEN_DIR, + login_domain=f"{protocol}://{settings.OIDC_DOMAIN}", + login_audience=settings.OIDC_AUDIENCE, + login_client_id=settings.OIDC_CLIENT_ID, + login_url_handler=show_login_message, + login_sequence_handler=track_login_progress, ) - context = JobbergateContext(persona=None, client=client) - if ctx.invoked_subcommand not in ("login", "logout"): - persona = init_persona(context) - context.client = httpx.Client( + ctx.obj = JobbergateContext( + client=httpx.Client( base_url=settings.ARMADA_API_BASE, - headers=dict(Authorization=f"Bearer {persona.token_set.access_token}"), + auth=authentication_handler, timeout=settings.JOBBERGATE_REQUESTS_TIMEOUT, - ) - context.persona = persona - context.full_output = full - context.raw_output = raw - - ctx.obj = context + ), + authentication_handler=authentication_handler, + full_output=full, + raw_output=raw, + ) @app.command(rich_help_panel="Authentication") -@handle_abort def login(ctx: typer.Context): """ Log in to the jobbergate-cli by storing the supplied token argument in the cache. """ - token_set: TokenSet = fetch_auth_tokens(ctx.obj) - persona: Persona = init_persona(ctx.obj, token_set) - terminal_message( - f"User was logged in with email '{persona.identity_data.email}'", - subject="Logged in!", - ) + ctx.obj.authentication_handler.login() + identity_data = ctx.obj.authentication_handler.get_identity_data() + terminal_message(f"User was logged in with email '{identity_data.email}'", subject="Logged in!") render_demo() @app.command(rich_help_panel="Authentication") -@handle_abort -def logout(): +def logout(ctx: typer.Context): """ Logs out of the jobbergate-cli. Clears the saved user credentials. """ - clear_token_cache() - terminal_message( - "User was logged out.", - subject="Logged out", - ) + ctx.obj.authentication_handler.logout() + terminal_message("User was logged out.", subject="Logged out") @app.command(rich_help_panel="Authentication") -@handle_abort def show_token( + ctx: typer.Context, plain: bool = typer.Option( False, help="Show the token in plain text.", @@ -153,62 +141,48 @@ def show_token( Token output is automatically copied to your clipboard. """ - token_set: TokenSet = load_tokens_from_cache() - token: Optional[str] - if not refresh: - token = token_set.access_token - subject = "Access Token" - Abort.require_condition( - token is not None, - "User is not logged in. Please log in first.", - raise_kwargs=dict( - subject="Not logged in", - ), - ) + ctx.obj.authentication_handler.acquire_access() + if refresh: + token = ctx.obj.authentication_handler._refresh_token else: - token = token_set.refresh_token - subject = "Refresh Token" - Abort.require_condition( - token is not None, - "User is not logged in or does not have a refresh token. Please try loggin in again.", - raise_kwargs=dict( - subject="No refresh token", - ), - ) + token = ctx.obj.authentication_handler._access_token + + Abort.require_condition(token.is_valid(), f"Could not obtain {token.label}. Please try loggin in again.") if decode: # Decode the token with ALL verification turned off (we just want to unpack it) - content = jose.jwt.decode( - token, - "secret-will-be-ignored", - options=dict( - verify_signature=False, - verify_aud=False, - verify_iat=False, - verify_exp=False, - verify_nbf=False, - verify_iss=False, - verify_sub=False, - verify_jti=False, - verify_at_hash=False, - ), - ) - render_json(content) + render_json(token.data) return if show_header: - token_text = f"""{{ "Authorization": "Bearer {token}" }}""" + token_text = f"""{{ "Authorization": "{token.bearer_token}" }}""" + elif show_prefix: + token_text = token.bearer_token else: - prefix = "Bearer " if show_prefix else "" - token_text = f"{prefix}{token}" + token_text = token.content on_clipboard = copy_to_clipboard(token_text) if plain: print(token_text) else: + subject = f"{token.label.title()} Token" kwargs = dict(subject=subject, indent=False) if on_clipboard: kwargs["footer"] = "The output was copied to your clipboard" terminal_message(token_text, **kwargs) + + +def safe_entrypoint(): + """ + Entrypoint for the app including custom error handling. + + With this we ensure error handling is applied to all commands with no need + to duplicate the decorators on each of them. + """ + try: + safe_function = handle_abort(handle_authentication_error(app.__call__)) + safe_function() + except typer.Exit as e: + sys.exit(e.exit_code) diff --git a/jobbergate-cli/jobbergate_cli/schemas.py b/jobbergate-cli/jobbergate_cli/schemas.py index 57fd3541c..bb4659b1e 100644 --- a/jobbergate-cli/jobbergate_cli/schemas.py +++ b/jobbergate-cli/jobbergate_cli/schemas.py @@ -11,6 +11,7 @@ import pydantic.generics from jobbergate_cli.constants import FileType +from jobbergate_core.auth.handler import JobbergateAuthHandler class TokenSet(pydantic.BaseModel, extra="ignore"): @@ -57,10 +58,10 @@ class JobbergateContext(pydantic.BaseModel, arbitrary_types_allowed=True): A data object describing context passed from the main entry point. """ - persona: Optional[Persona] = None - full_output: bool = False + client: httpx.Client + authentication_handler: JobbergateAuthHandler raw_output: bool = False - client: Optional[httpx.Client] = None + full_output: bool = False class JobbergateConfig(pydantic.BaseModel, extra="allow"): diff --git a/jobbergate-cli/jobbergate_cli/subapps/applications/app.py b/jobbergate-cli/jobbergate_cli/subapps/applications/app.py index 828238505..af0a77723 100644 --- a/jobbergate-cli/jobbergate_cli/subapps/applications/app.py +++ b/jobbergate-cli/jobbergate_cli/subapps/applications/app.py @@ -9,7 +9,7 @@ import typer from jobbergate_cli.constants import SortOrder -from jobbergate_cli.exceptions import Abort, handle_abort +from jobbergate_cli.exceptions import Abort from jobbergate_cli.render import StyleMapper, render_single_result, terminal_message from jobbergate_cli.requests import make_request from jobbergate_cli.schemas import ApplicationResponse, JobbergateContext @@ -58,7 +58,6 @@ @app.command("list") -@handle_abort def list_all( ctx: typer.Context, show_all: bool = typer.Option(False, "--all", help="Show all applications, even the ones without identifier"), @@ -100,7 +99,6 @@ def list_all( @app.command() -@handle_abort def get_one( ctx: typer.Context, id: Optional[int] = typer.Option( @@ -128,7 +126,6 @@ def get_one( @app.command() -@handle_abort def create( ctx: typer.Context, name: str = typer.Option( @@ -209,7 +206,6 @@ def create( @app.command() -@handle_abort def update( ctx: typer.Context, id: Optional[int] = typer.Option( @@ -317,7 +313,6 @@ def update( @app.command() -@handle_abort def delete( ctx: typer.Context, id: Optional[int] = typer.Option( @@ -378,7 +373,6 @@ def delete( @app.command() -@handle_abort def download_files( ctx: typer.Context, id: Optional[int] = typer.Option( @@ -412,7 +406,6 @@ def download_files( @app.command() -@handle_abort def clone( ctx: typer.Context, id: Optional[int] = typer.Option( diff --git a/jobbergate-cli/jobbergate_cli/subapps/applications/tools.py b/jobbergate-cli/jobbergate_cli/subapps/applications/tools.py index 4942f5cb1..928c7f0b4 100644 --- a/jobbergate-cli/jobbergate_cli/subapps/applications/tools.py +++ b/jobbergate-cli/jobbergate_cli/subapps/applications/tools.py @@ -435,7 +435,7 @@ class ApplicationRuntime: supplied_params: Dict[str, Any] = field(default_factory=dict) fast_mode: bool = False - def __post_init__(self): + def __post_init__(self) -> None: self.app_config, self.app_module = load_application_data(self.app_data, self.app_source_code) self.answers: Dict[str, Any] = dict() diff --git a/jobbergate-cli/jobbergate_cli/subapps/clusters/app.py b/jobbergate-cli/jobbergate_cli/subapps/clusters/app.py index 6e2e20a1c..3d725409f 100644 --- a/jobbergate-cli/jobbergate_cli/subapps/clusters/app.py +++ b/jobbergate-cli/jobbergate_cli/subapps/clusters/app.py @@ -4,7 +4,6 @@ import typer -from jobbergate_cli.exceptions import handle_abort from jobbergate_cli.render import terminal_message from jobbergate_cli.schemas import JobbergateContext from jobbergate_cli.subapps.clusters.tools import get_client_ids @@ -14,7 +13,6 @@ @app.command("list") -@handle_abort def list_all( ctx: typer.Context, ): diff --git a/jobbergate-cli/jobbergate_cli/subapps/job_scripts/app.py b/jobbergate-cli/jobbergate_cli/subapps/job_scripts/app.py index d2ced8660..f13099074 100644 --- a/jobbergate-cli/jobbergate_cli/subapps/job_scripts/app.py +++ b/jobbergate-cli/jobbergate_cli/subapps/job_scripts/app.py @@ -10,7 +10,7 @@ from jobbergate_cli.config import settings from jobbergate_cli.constants import SortOrder -from jobbergate_cli.exceptions import Abort, handle_abort +from jobbergate_cli.exceptions import Abort from jobbergate_cli.render import StyleMapper, render_single_result, terminal_message from jobbergate_cli.requests import make_request from jobbergate_cli.schemas import JobbergateContext, JobScriptCreateRequest, JobScriptResponse @@ -49,7 +49,6 @@ @app.command("list") -@handle_abort def list_all( ctx: typer.Context, show_all: bool = typer.Option(False, "--all", help="Show all job scripts, even the ones owned by others"), @@ -93,7 +92,6 @@ def list_all( @app.command() -@handle_abort def get_one( ctx: typer.Context, id: int = typer.Option(..., "--id", "-i", help="The specific id of the job script."), @@ -112,7 +110,6 @@ def get_one( @app.command() -@handle_abort def create_stand_alone( ctx: typer.Context, name: str = typer.Option( @@ -167,7 +164,6 @@ def create_stand_alone( @app.command() -@handle_abort def create_locally( ctx: typer.Context, application_path: pathlib.Path = typer.Argument( @@ -228,7 +224,6 @@ def create_locally( @app.command() -@handle_abort def create( ctx: typer.Context, name: Optional[str] = typer.Option( @@ -394,7 +389,6 @@ def create( @app.command() -@handle_abort def update( ctx: typer.Context, id: int = typer.Option( @@ -448,7 +442,6 @@ def update( @app.command() -@handle_abort def delete( ctx: typer.Context, id: int = typer.Option( @@ -481,7 +474,6 @@ def delete( @app.command() -@handle_abort def show_files( ctx: typer.Context, id: int = typer.Option(..., help="The specific id of the job script."), @@ -514,7 +506,6 @@ def show_files( @app.command() -@handle_abort def download_files( ctx: typer.Context, id: int = typer.Option(..., help="The specific id of the job script."), @@ -536,7 +527,6 @@ def download_files( @app.command() -@handle_abort def clone( ctx: typer.Context, id: int = typer.Option( diff --git a/jobbergate-cli/jobbergate_cli/subapps/job_submissions/app.py b/jobbergate-cli/jobbergate_cli/subapps/job_submissions/app.py index 7529b9267..ef9c2b3c5 100644 --- a/jobbergate-cli/jobbergate_cli/subapps/job_submissions/app.py +++ b/jobbergate-cli/jobbergate_cli/subapps/job_submissions/app.py @@ -9,7 +9,7 @@ import typer from jobbergate_cli.constants import SortOrder -from jobbergate_cli.exceptions import Abort, handle_abort +from jobbergate_cli.exceptions import Abort from jobbergate_cli.render import StyleMapper, render_single_result, terminal_message from jobbergate_cli.requests import make_request from jobbergate_cli.schemas import JobbergateContext, JobSubmissionResponse @@ -37,7 +37,6 @@ @app.command() -@handle_abort def create( ctx: typer.Context, name: str = typer.Option( @@ -122,7 +121,6 @@ def create( @app.command("list") -@handle_abort def list_all( ctx: typer.Context, show_all: bool = typer.Option( @@ -143,11 +141,6 @@ def list_all( """ jg_ctx: JobbergateContext = ctx.obj - # Make static type checkers happy - assert jg_ctx is not None, "JobbergateContext is uninitialized" - assert jg_ctx.client is not None, "Client is uninitialized" - assert jg_ctx.persona is not None, "Persona is uninitialized" - params: Dict[str, Any] = dict(user_only=not show_all) if search is not None: params["search"] = search @@ -159,9 +152,9 @@ def list_all( params["from_job_script_id"] = from_job_script_id value_mappers = None - organization_id = jg_ctx.persona.identity_data.organization_id + organization_id = jg_ctx.authentication_handler.get_identity_data().organization_id if organization_id is not None: - value_mappers = dict(cluster_name=lambda cn: cn.replace(f"-{organization_id}", "")) + value_mappers = dict(cluster_name=lambda cn: cn.removesuffix(f"-{organization_id}")) handle_pagination( jg_ctx=jg_ctx, @@ -177,7 +170,6 @@ def list_all( @app.command() -@handle_abort def get_one( ctx: typer.Context, id: int = typer.Option(..., "--id", "-i", help="The specific id of the job submission."), @@ -189,12 +181,11 @@ def get_one( # Make static type checkers happy assert jg_ctx is not None, "JobbergateContext is uninitialized" - assert jg_ctx.persona is not None, "Persona is uninitialized" value_mappers = None - organization_id = jg_ctx.persona.identity_data.organization_id + organization_id = jg_ctx.authentication_handler.get_identity_data().organization_id if organization_id is not None: - value_mappers = dict(cluster_name=lambda cn: cn.replace(f"-{organization_id}", "")) + value_mappers = dict(cluster_name=lambda cn: cn.removesuffix(f"-{organization_id}")) result = fetch_job_submission_data(jg_ctx, id) render_single_result( @@ -210,7 +201,6 @@ def get_one( @app.command() -@handle_abort def delete( ctx: typer.Context, id: int = typer.Option( diff --git a/jobbergate-cli/jobbergate_cli/subapps/job_submissions/tools.py b/jobbergate-cli/jobbergate_cli/subapps/job_submissions/tools.py index adcc20f2f..3eb4e0f79 100644 --- a/jobbergate-cli/jobbergate_cli/subapps/job_submissions/tools.py +++ b/jobbergate-cli/jobbergate_cli/subapps/job_submissions/tools.py @@ -29,14 +29,11 @@ def _map_cluster_name( If the organization is undefined (multi-tenancy is disabled) or the cluster_name already includes the organization_id, use the base_cluster_name. """ - # Make static type checkers happy - assert jg_ctx.persona is not None, "jg_ctx.persona is uninitialized" - - org_id = jg_ctx.persona.identity_data.organization_id + org_id = jg_ctx.authentication_handler.get_identity_data().organization_id if org_id is None or base_cluster_name.endswith(org_id): return base_cluster_name - return f"{base_cluster_name}-{jg_ctx.persona.identity_data.organization_id}" + return f"{base_cluster_name}-{org_id}" @dataclass diff --git a/jobbergate-cli/jobbergate_cli/time_loop.py b/jobbergate-cli/jobbergate_cli/time_loop.py deleted file mode 100644 index 84eb5a4a2..000000000 --- a/jobbergate-cli/jobbergate_cli/time_loop.py +++ /dev/null @@ -1,134 +0,0 @@ -""" -Provide a time-loop class that can be used to to iterate during a given window of time. -""" - -from dataclasses import dataclass -from typing import Optional, Union - -import pendulum -from buzz import require_condition -from rich.progress import Progress - -from jobbergate_cli.exceptions import JobbergateCliError - - -@dataclass -class Tick: - """ - A helper class describing a "tick". - - Contains a counter, elapsed time since the last tick, and total elapsed time. - """ - - counter: int - elapsed: pendulum.Duration - total_elapsed: pendulum.Duration - - -class TimeLoop: - """ - A special iterator that will iterate for a specified duration of time. - - Uses a progress meter to show the user how much time is left. - Each iteration of the time-loop produces a tick. - """ - - advent: Optional[pendulum.DateTime] - moment: Optional[pendulum.DateTime] - last_moment: Optional[pendulum.DateTime] - counter: int - progress: Optional[Progress] - duration: pendulum.Duration - message: str - color: str - - def __init__( - self, - duration: Union[pendulum.Duration, int], - message: str = "Processing", - color: str = "green", - ): - """ - Initialize the time-loop. - - Duration may be either a count of seconds or a ``pendulum.duration``. - """ - self.moment = None - self.last_moment = None - self.counter = 0 - self.progress = None - if isinstance(duration, int): - JobbergateCliError.require_condition(duration > 0, "The duration must be a positive integer") - self.duration = pendulum.duration(seconds=duration) - else: - self.duration = duration - self.message = message - self.color = color - - def __del__(self): - """ - Explicitly clear the progress meter if the time-loop is destroyed. - """ - self.clear() - - def __iter__(self) -> "TimeLoop": - """ - Start the iterator. - - Creates and starts the progress meter - """ - self.advent = self.last_moment = self.moment = pendulum.now() - self.counter = 0 - self.progress = Progress() - self.progress.add_task( - f"[{self.color}]{self.message}...", - total=self.duration.total_seconds(), - ) - self.progress.start() - return self - - def __next__(self) -> Tick: - """ - Iterates the time loop and returns a tick. - - If the duration is complete, clear the progress meter and stop iteration. - """ - # Keep mypy happy - assert self.progress is not None - - self.counter += 1 - self.last_moment = self.moment - self.moment: pendulum.DateTime = pendulum.now() - require_condition( - all(isinstance(x, pendulum.DateTime) for x in (self.advent, self.last_moment, self.moment)), - "The time-loop has not been initialized", - TypeError, - ) - elapsed: pendulum.Duration = self.moment - self.last_moment # type: ignore - total_elapsed: pendulum.Duration = self.moment - self.advent # type: ignore - - for task_id in self.progress.task_ids: - self.progress.advance(task_id, elapsed.total_seconds()) - - if self.progress.finished: - self.clear() - raise StopIteration - - return Tick( - counter=self.counter, - elapsed=elapsed, - total_elapsed=total_elapsed, - ) - - def clear(self): - """ - Clear the time-loop. - - Stops the progress meter (if it is set) and reset moments, counter, progress meter. - """ - if self.progress is not None: - self.progress.stop() - self.counter = 0 - self.progress = None - self.moment = None - self.last_moment = None diff --git a/jobbergate-cli/pyproject.toml b/jobbergate-cli/pyproject.toml index 6f1923395..9e0214fa7 100644 --- a/jobbergate-cli/pyproject.toml +++ b/jobbergate-cli/pyproject.toml @@ -36,7 +36,7 @@ pydantic = "^2.7" pydantic-settings = "^2.3.3" [tool.poetry.scripts] -jobbergate = "jobbergate_cli.main:app" +jobbergate = "jobbergate_cli.main:safe_entrypoint" [tool.stickywheel] # This will resolve the relative path to the jobbergate-core package at build time @@ -77,7 +77,6 @@ env = [ "OIDC_DOMAIN = dummy_auth_domain.com", "OIDC_AUDIENCE = https://dummy_auth_audience.com", "OIDC_CLIENT_ID = dummy_client_id", - "MULTI_TENANCY_ENABLED = false", ] [tool.coverage.report] diff --git a/jobbergate-cli/tests/subapps/clusters/test_tools.py b/jobbergate-cli/tests/subapps/clusters/test_tools.py index b80cde620..816ab2610 100644 --- a/jobbergate-cli/tests/subapps/clusters/test_tools.py +++ b/jobbergate-cli/tests/subapps/clusters/test_tools.py @@ -5,8 +5,9 @@ import plummet import pytest +from jobbergate_cli.config import settings from jobbergate_cli.exceptions import Abort -from jobbergate_cli.schemas import ClusterCacheData, JobbergateContext +from jobbergate_cli.schemas import ClusterCacheData from jobbergate_cli.subapps.clusters.tools import ( get_client_ids, load_clusters_from_cache, @@ -20,14 +21,6 @@ def dummy_domain(): return "https://dummy.com" -@pytest.fixture -def dummy_context(dummy_domain): - return JobbergateContext( - persona=None, - client=httpx.Client(base_url=dummy_domain, headers={"Authorization": "Bearer XXXXXXXX"}), - ) - - def test_pull_client_ids_from_api__success(respx_mock, dummy_domain, dummy_context): clusters_route = respx_mock.post(f"{dummy_domain}/cluster/graphql/query") clusters_route.mock( @@ -74,47 +67,44 @@ def test_pull_client_ids_from_api__raises_abort_on_malformed_response(respx_mock def test_save_clusters_to_cache(tmp_path, tweak_settings): - cluster_cache_path = tmp_path / "clusters.json" - with tweak_settings(JOBBERGATE_CLUSTER_LIST_PATH=cluster_cache_path): + with tweak_settings(JOBBERGATE_CACHE_DIR=tmp_path): with plummet.frozen_time("2022-05-13 16:56:00"): save_clusters_to_cache(["cluster1", "cluster2", "cluster3"]) - cache_data = ClusterCacheData(**json.loads(cluster_cache_path.read_text())) + cache_data = ClusterCacheData(**json.loads(settings.JOBBERGATE_CLUSTER_LIST_PATH.read_text())) + assert cache_data.client_ids == ["cluster1", "cluster2", "cluster3"] assert plummet.moments_match(cache_data.updated_at, "2022-05-13 16:56:00") def test_load_clusters_from_cache__success(tmp_path, tweak_settings): - cluster_cache_path = tmp_path / "clusters.json" - with tweak_settings(JOBBERGATE_CLUSTER_LIST_PATH=cluster_cache_path, JOBBERGATE_CLUSTER_CACHE_LIFETIME=5): + with tweak_settings(JOBBERGATE_CACHE_DIR=tmp_path, JOBBERGATE_CLUSTER_CACHE_LIFETIME=5): with plummet.frozen_time("2022-05-13 16:56:00"): cache_data = ClusterCacheData( updated_at=datetime.utcnow(), client_ids=["cluster1", "cluster2", "cluster3"], ) - cluster_cache_path.write_text(cache_data.model_dump_json()) + settings.JOBBERGATE_CLUSTER_LIST_PATH.write_text(cache_data.model_dump_json()) assert load_clusters_from_cache() == ["cluster1", "cluster2", "cluster3"] def test_load_clusters_from_cache__returns_None_if_cache_is_expired(tmp_path, tweak_settings): - cluster_cache_path = tmp_path / "clusters.json" - with tweak_settings(JOBBERGATE_CLUSTER_LIST_PATH=cluster_cache_path, JOBBERGATE_CLUSTER_CACHE_LIFETIME=5): + with tweak_settings(JOBBERGATE_CACHE_DIR=tmp_path, JOBBERGATE_CLUSTER_CACHE_LIFETIME=5): with plummet.frozen_time("2022-05-13 16:56:00"): cache_data = ClusterCacheData( updated_at=datetime.utcnow(), client_ids=["cluster1", "cluster2", "cluster3"], ) - cluster_cache_path.write_text(cache_data.model_dump_json()) + settings.JOBBERGATE_CLUSTER_LIST_PATH.write_text(cache_data.model_dump_json()) with plummet.frozen_time("2022-05-13 16:56:06"): assert load_clusters_from_cache() is None def test_load_clusters_from_cache__returns_None_if_cache_is_invalid(tmp_path, tweak_settings): - cluster_cache_path = tmp_path / "clusters.json" - with tweak_settings(JOBBERGATE_CLUSTER_LIST_PATH=cluster_cache_path, JOBBERGATE_CLUSTER_CACHE_LIFETIME=5): - cluster_cache_path.write_text("BAD DATA") + with tweak_settings(JOBBERGATE_CACHE_DIR=tmp_path, JOBBERGATE_CLUSTER_CACHE_LIFETIME=5): + settings.JOBBERGATE_CLUSTER_LIST_PATH.write_text("BAD DATA") assert load_clusters_from_cache() is None @@ -142,14 +132,13 @@ def test_get_client_ids__pulls_from_api_if_no_cache_available( ), ) - dummy_cache_path = tmp_path / "cluster-names.json" - with tweak_settings(JOBBERGATE_CLUSTER_LIST_PATH=dummy_cache_path): + with tweak_settings(JOBBERGATE_CACHE_DIR=tmp_path): with plummet.frozen_time("2022-05-16 15:38:00"): assert get_client_ids(dummy_context) == ["cluster1", "cluster2", "cluster3"] - assert clusters_route.called - cached_data = json.loads(dummy_cache_path.read_text()) - assert cached_data["client_ids"] == ["cluster1", "cluster2", "cluster3"] - assert plummet.moments_match(cached_data["updated_at"], "2022-05-16 15:38:00") + assert clusters_route.called + cached_data = json.loads(settings.JOBBERGATE_CLUSTER_LIST_PATH.read_text()) + assert cached_data["client_ids"] == ["cluster1", "cluster2", "cluster3"] + assert plummet.moments_match(cached_data["updated_at"], "2022-05-16 15:38:00") def test_get_client_ids__loads_from_cache_when_available(mocker, respx_mock, dummy_domain, dummy_context): diff --git a/jobbergate-cli/tests/subapps/conftest.py b/jobbergate-cli/tests/subapps/conftest.py index 241ef369d..aeac5ef19 100644 --- a/jobbergate-cli/tests/subapps/conftest.py +++ b/jobbergate-cli/tests/subapps/conftest.py @@ -1,13 +1,16 @@ -from typing import Any, Callable, Dict, Optional +from pathlib import Path +from typing import Any, Callable, Dict, Generator, Optional import httpx +from jobbergate_core import JobbergateAuthHandler import pytest import yaml from typer import Context, Typer from typer.testing import CliRunner from jobbergate_cli.constants import JOBBERGATE_APPLICATION_CONFIG_FILE_NAME, JOBBERGATE_APPLICATION_MODULE_FILE_NAME -from jobbergate_cli.schemas import IdentityData, JobbergateApplicationConfig, JobbergateContext, Persona, TokenSet +from jobbergate_cli.exceptions import handle_abort, handle_authentication_error +from jobbergate_cli.schemas import IdentityData, JobbergateApplicationConfig, JobbergateContext from jobbergate_cli.subapps.applications.tools import load_application_from_source from jobbergate_cli.text_tools import dedent @@ -30,34 +33,38 @@ def _main_callback(ctx: Context): def _helper(command_name: str, command_function: Callable): main_app = Typer() main_app.callback()(_main_callback) - main_app.command(name=command_name)(command_function) + safe_command = handle_abort(handle_authentication_error(command_function)) + main_app.command(name=command_name)(safe_command) return main_app return _helper @pytest.fixture -def dummy_context(dummy_domain): - return JobbergateContext( - persona=None, - client=httpx.Client(base_url=dummy_domain, headers={"Authorization": "Bearer XXXXXXXX"}), +def dummy_context(mocker, tmp_path, dummy_domain) -> Generator[JobbergateContext, None, None]: + def dummy_auth(request: httpx.Request) -> httpx.Request: + request.headers["Authorization"] = "Bearer XXXXXXXX" + return request + + authentication_handler = JobbergateAuthHandler( + cache_directory=Path(tmp_path), + login_domain="test-domain", + login_audience="test-audience", ) + with mocker.patch.object(authentication_handler, attribute="acquire_access", return_value=dummy_auth): + yield JobbergateContext( + client=httpx.Client(base_url=dummy_domain), authentication_handler=authentication_handler + ) @pytest.fixture -def attach_persona(dummy_context): +def attach_persona(mocker, dummy_context): def _helper( email: str, client_id: str = "dummy-client", access_token: str = "foo", organization_id: Optional[str] = None ): - identity_data = IdentityData( - client_id=client_id, - email=email, - ) - if organization_id is not None: - identity_data.organization_id = organization_id - dummy_context.persona = Persona( - token_set=TokenSet(access_token=access_token), - identity_data=identity_data, + identity_data = IdentityData(client_id=client_id, email=email, organization_id=organization_id) + return mocker.patch.object( + dummy_context.authentication_handler, attribute="get_identity_data", return_value=identity_data ) return _helper diff --git a/jobbergate-cli/tests/subapps/test_main.py b/jobbergate-cli/tests/subapps/test_main.py index 38b7fbf8c..c5a6d712d 100644 --- a/jobbergate-cli/tests/subapps/test_main.py +++ b/jobbergate-cli/tests/subapps/test_main.py @@ -1,12 +1,9 @@ import shlex -from jobbergate_cli.main import main +from jobbergate_cli.main import main, login, logout -def test_main_command_with_ignore_username_and_password( - make_test_app, - cli_runner, -): +def test_main_command_with_ignore_username_and_password(make_test_app, cli_runner): test_app = make_test_app("jobbergate", main) result = cli_runner.invoke(test_app, shlex.split("jobbergate --username user --password pass")) @@ -16,3 +13,29 @@ def test_main_command_with_ignore_username_and_password( result = cli_runner.invoke(test_app, shlex.split("jobbergate -u user -p pass")) assert result.exit_code == 0 assert "No command provided" in result.stdout + + +def test_login_success(make_test_app, cli_runner, dummy_context, attach_persona, mocker): + test_app = make_test_app("login", login) + + mocked_login = mocker.patch.object(dummy_context.authentication_handler, "login") + + attach_persona("dummy@dummy.com") + + result = cli_runner.invoke(test_app, shlex.split("login")) + assert result.exit_code == 0 + + assert mocked_login.call_count == 1 + assert "User was logged in with email 'dummy@dummy.com'" in result.stdout + + +def test_logout_success(make_test_app, cli_runner, dummy_context, mocker): + test_app = make_test_app("logout", logout) + + mocked_logout = mocker.patch.object(dummy_context.authentication_handler, "logout") + + result = cli_runner.invoke(test_app, shlex.split("logout")) + assert result.exit_code == 0 + + assert mocked_logout.call_count == 1 + assert "User was logged out" in result.stdout diff --git a/jobbergate-cli/tests/test_auth.py b/jobbergate-cli/tests/test_auth.py index 211d3593b..1b8adc24a 100644 --- a/jobbergate-cli/tests/test_auth.py +++ b/jobbergate-cli/tests/test_auth.py @@ -1,41 +1,14 @@ -import typing -from pathlib import Path - -import httpx -import pendulum -import plummet import pytest -from jose import ExpiredSignatureError, jwt from jobbergate_cli.auth import ( Console, - TokenSet, + DeviceCodeData, _show_login_narrow_screen, _show_login_standard_screen, - clear_token_cache, - fetch_auth_tokens, - init_persona, - load_tokens_from_cache, - refresh_access_token, - save_tokens_to_cache, + open_on_browser, show_login_message, - validate_token_and_extract_identity, + webbrowser, ) -from jobbergate_cli.config import settings -from jobbergate_cli.exceptions import Abort -from jobbergate_cli.schemas import JobbergateContext -from jobbergate_cli.time_loop import Tick - - -LOGIN_DOMAIN = "https://dummy-auth.com" - - -@pytest.fixture -def dummy_context(): - return JobbergateContext( - persona=None, - client=httpx.Client(base_url=LOGIN_DOMAIN, headers={"content-type": "application/x-www-form-urlencoded"}), - ) @pytest.fixture(autouse=True) @@ -48,554 +21,37 @@ def mocked_open_on_browser(mocker): return mocked -@pytest.fixture -def make_token(): - """ - Provide a fixture that returns a helper function for creating an access_token for testing. - """ - - def _helper( - azp: typing.Optional[str] = None, - email: typing.Optional[str] = None, - expires: plummet.AGGREGATE_TYPE = pendulum.now(tz="UTC"), - organization_id: typing.Optional[str] = None, - **extras, - ) -> str: +class TestOpenOnBrowser: + def test_open_on_browser_valid_url(self, mocker): """ - Create an access_token with a given user email, org name, and expiration moment. + Test that open_on_browser returns True when a valid browser is available. """ - expires_moment: pendulum.DateTime = plummet.momentize(expires) - - extra_claims = dict() - if azp is not None: - extra_claims["azp"] = azp - if email is not None: - extra_claims["email"] = email - if organization_id is not None: - extra_claims["organization"] = { - organization_id: { - "name": "Dummy Organization", - } - } - - for k, v in extras.items(): - extra_claims[k] = v - - return jwt.encode( - { - "exp": expires_moment.int_timestamp, - **extra_claims, - }, - "fake-secret", - algorithm="HS256", - ) - - return _helper - - -def test_validate_token_and_extract_identity__success(make_token): - """ - Validate that the ``validate_token_and_extract_identity()`` function can successfully validate a good - access token and extract the user's identity from it. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - with plummet.frozen_time("2022-02-16 21:30:00"): - identity_data = validate_token_and_extract_identity(TokenSet(access_token=access_token)) - assert identity_data.client_id == "dummy-client" - assert identity_data.email == "good@email.com" - - -def test_validate_token_and_extract_identity__re_raises_ExpiredSignatureError(make_token): - """ - Validate that the ``validate_token_and_extract_identity()`` function will catch and then re-raise a - ``ExpiredSignatureError`` thrown by the ``jwt.decode()`` function. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 20:30:00", - ) - with plummet.frozen_time("2022-02-16 21:30:00"): - with pytest.raises(ExpiredSignatureError): - validate_token_and_extract_identity(TokenSet(access_token=access_token)) - - -def test_validate_token_and_extract_identity__raises_abort_on_empty_token(): - """ - Validate that the ``validate_token_and_extract_identity()`` function will - raise an ``Abort`` when the access_token exists but is an empty string/file. - """ - test_token_set = TokenSet(access_token="") - with pytest.raises(Abort, match="Access token file exists but it is empty"): - validate_token_and_extract_identity(test_token_set) - - -def test_validate_token_and_extract_identity__raises_abort_on_unknown_error(mocker): - """ - Validate that the ``validate_token_and_extract_identity()`` function will raise an ``Abort`` when the - ``jwt.decode()`` function raises an exception type besides ``ExpiredSignatureError``. - """ - test_token_set = TokenSet(access_token="BOGUS-TOKEN") - mocker.patch("jose.jwt.decode", side_effect=Exception("BOOM!")) - with pytest.raises(Abort, match="There was an unknown error while validating"): - validate_token_and_extract_identity(test_token_set) - - -def test_validate_token_and_extract_identity__raises_abort_if_token_is_missing_identity_data(make_token): - """ - Validate that the ``validate_token_and_extract_identity()`` function will raise an Abort if the - access_token doesn't carry all the required identity data in it. - """ - access_token = make_token(expires="2022-02-16 22:30:00") - with plummet.frozen_time("2022-02-16 21:30:00"): - with pytest.raises(Abort, match="error extracting the user's identity"): - validate_token_and_extract_identity(TokenSet(access_token=access_token)) - - -def test_validate_token_and_extract_identity__includes_org_id_in_multitenant_mode(make_token, tweak_settings): - """ - Validate that the ``validate_token_and_extract_identity()`` function can successfully validate a good - access token that includes the organization_id and extract it when multi-tenancy mode is enabled. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - organization_id="some-org", - ) - with tweak_settings(MULTI_TENANCY_ENABLED=True): - with plummet.frozen_time("2022-02-16 21:30:00"): - identity_data = validate_token_and_extract_identity(TokenSet(access_token=access_token)) - assert identity_data.client_id == "dummy-client" - assert identity_data.email == "good@email.com" - assert identity_data.organization_id == "some-org" - - -def test_validate_token_and_extract_identity__raises_exception_for_malformed_organization_in_multitenant_mode( - make_token, tweak_settings -): - """ - Validate that the ``validate_token_and_extract_identity()`` function will raise an Abort if the organization - payload is malformed when multi-tenancy mode is enabled. - """ - # Case when the organization payload is garbage - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - organization="some-org", - ) - with pytest.raises(Abort, match="The access token is invalid"): - with tweak_settings(MULTI_TENANCY_ENABLED=True): - with plummet.frozen_time("2022-02-16 21:30:00"): - validate_token_and_extract_identity(TokenSet(access_token=access_token)) - - # Case when no organizations are embedded - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - organization={}, - ) - with pytest.raises(Abort, match="The access token is invalid"): - with tweak_settings(MULTI_TENANCY_ENABLED=True): - with plummet.frozen_time("2022-02-16 21:30:00"): - validate_token_and_extract_identity(TokenSet(access_token=access_token)) - - # Case when multiple organizations are embedded - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - organization={ - "some-org": {}, - "some-other-org": {}, - }, - ) - with pytest.raises(Abort, match="The access token is invalid"): - with tweak_settings(MULTI_TENANCY_ENABLED=True): - with plummet.frozen_time("2022-02-16 21:30:00"): - validate_token_and_extract_identity(TokenSet(access_token=access_token)) - - -def test_load_tokens_from_cache__success(make_token, tmp_path, mocker): - """ - Validate that the ``load_tokens_from_cache()`` function can successfully load tokens from the token - cache on disk. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - access_token_path.write_text(access_token) - refresh_token = "dummy-refresh-token" - refresh_token_path = tmp_path / "refresh.jwt" - refresh_token_path.write_text(refresh_token) - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - token_set = load_tokens_from_cache() - - assert token_set.access_token == access_token - assert token_set.refresh_token == refresh_token - - -def test_load_tokens_from_cache__raises_abort_if_access_token_path_does_not_exist(mocker): - """ - Validate taht the ``load_tokens_from_cache()`` function raises an Abort if the token does not exist - at the location specified by ``settings.JOBBERGATE_ACCESS_TOKEN_PATH``. - """ - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=Path("/some/fake/path")) - with pytest.raises(Abort, match="login with your auth token first"): - load_tokens_from_cache() - - -def test_load_tokens_from_cache__omits_refresh_token_if_it_is_not_found(make_token, tmp_path, mocker): - """ - Validate that the ``load_tokens_from_cache()`` function can successfully create a token set without the - refresh token if the location specified by ``settings.JOBBERGATE_REFRESH_TOKEN_PATH`` does not exist. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - access_token_path.write_text(access_token) - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=Path("/some/fake/path")) - token_set = load_tokens_from_cache() - - assert token_set.access_token == access_token - assert token_set.refresh_token is None - - -def test_save_tokens_to_cache__success(make_token, tmp_path, mocker): - """ - Validate that the ``save_tokens_to_cache()`` function will write a access and refresh token from a - ``TokenSet`` instance to the locations described by ``JOBBERGATE_ACCESS_TOKEN_PATH`` and - ``JOBBERGATE_REFRESH_TOKEN_PATH``. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - refresh_token = "dummy-refresh-token" - refresh_token_path = tmp_path / "refresh.jwt" - token_set = TokenSet( - access_token=access_token, - refresh_token=refresh_token, - ) - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - save_tokens_to_cache(token_set) - - assert access_token_path.exists() - assert access_token_path.read_text() == access_token - assert access_token_path.stat().st_mode & 0o777 == 0o600 - - assert refresh_token_path.exists() - assert refresh_token_path.read_text() == refresh_token - assert access_token_path.stat().st_mode & 0o777 == 0o600 - - -def test_save_tokens_to_cache__only_saves_access_token_if_refresh_token_is_not_defined(make_token, tmp_path, mocker): - """ - Validate that the ``save_tokens_to_cache()`` function will only write an access token to the cache if the - ``TokenSet`` instance does not carry a refresh token. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - refresh_token_path = tmp_path / "refresh.jwt" - token_set = TokenSet( - access_token=access_token, - ) - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - save_tokens_to_cache(token_set) + mock_browser = mocker.Mock() + mock_browser.open.return_value = True + mocker.patch("webbrowser.get", return_value=mock_browser) - assert access_token_path.exists() - assert access_token_path.read_text() == access_token + result = open_on_browser("https://example.com") + assert result is True + mock_browser.open.assert_called_once_with("https://example.com") - assert not refresh_token_path.exists() - - -def test_clear_token_cache__success(make_token, tmp_path, mocker): - """ - Validate that the ``clear_token_cache()`` function removes the access token and refresh token from the - cache. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - access_token_path.write_text(access_token) - refresh_token = "dummy-refresh-token" - refresh_token_path = tmp_path / "refresh.jwt" - refresh_token_path.write_text(refresh_token) - - assert access_token_path.exists() - assert refresh_token_path.exists() - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - clear_token_cache() - - assert not access_token_path.exists() - - -def test_clear_token_cache__does_not_fail_if_no_tokens_are_in_cache(tmp_path, mocker): - """ - Validate that the ``clear_token_cache()`` function does not fail if there are no tokens in the cache. - """ - access_token_path = tmp_path / "access.jwt" - refresh_token_path = tmp_path / "refresh.jwt" - - assert not access_token_path.exists() - assert not refresh_token_path.exists() - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - clear_token_cache() - - -def test_init_persona__success(make_token, tmp_path, dummy_context, mocker): - """ - Validate that the ``init_persona()`` function will load tokens from the cache, validate them, - extract user email, and return a new ``Persona`` instance with the tokens and user email contained - within it. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - access_token_path.write_text(access_token) - refresh_token = "dummy-refresh-token" - refresh_token_path = tmp_path / "refresh.jwt" - refresh_token_path.write_text(refresh_token) - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - with plummet.frozen_time("2022-02-16 21:30:00"): - persona = init_persona(dummy_context) - - assert persona.token_set.access_token == access_token - assert persona.token_set.refresh_token == refresh_token - assert persona.identity_data.client_id == "dummy-client" - assert persona.identity_data.email == "good@email.com" - - -def test_init_persona__uses_passed_token_set(make_token, tmp_path, dummy_context, mocker): - """ - Validate that the ``init_persona()`` function will used the passed ``TokenSet`` instance instead of - loading it from the cache and will write the tokens to the cache after validating them. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - refresh_token = "dummy-refresh-token" - refresh_token_path = tmp_path / "refresh.jwt" - - token_set = TokenSet( - access_token=access_token, - refresh_token=refresh_token, - ) - - assert not access_token_path.exists() - assert not refresh_token_path.exists() - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - with plummet.frozen_time("2022-02-16 21:30:00"): - persona = init_persona(dummy_context, token_set) - - assert persona.token_set.access_token == access_token - assert persona.token_set.refresh_token == refresh_token - assert persona.identity_data.client_id == "dummy-client" - assert persona.identity_data.email == "good@email.com" - - assert access_token_path.exists() - assert access_token_path.read_text() == access_token - assert refresh_token_path.exists() - - -def test_init_persona__refreshes_access_token_if_it_is_expired(make_token, tmp_path, respx_mock, dummy_context, mocker): - """ - Validate that the ``init_persona()`` function will refresh the access token if it is expired and, after - validating it, save it to the cache. - """ - access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-16 22:30:00", - ) - access_token_path = tmp_path / "access.jwt" - access_token_path.write_text(access_token) - refresh_token = "dummy-refresh-token" - refresh_token_path = tmp_path / "refresh.jwt" - refresh_token_path.write_text(refresh_token) - - refreshed_access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-17 22:30:00", - ) - - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/token").mock( - return_value=httpx.Response( - httpx.codes.OK, - json=dict(access_token=refreshed_access_token), - ), - ) - - mocker.patch.object(settings, "JOBBERGATE_ACCESS_TOKEN_PATH", new=access_token_path) - mocker.patch.object(settings, "JOBBERGATE_REFRESH_TOKEN_PATH", new=refresh_token_path) - with plummet.frozen_time("2022-02-16 23:30:00"): - persona = init_persona(dummy_context) - - assert persona.token_set.access_token == refreshed_access_token - assert persona.token_set.refresh_token == refresh_token - assert persona.identity_data.client_id == "dummy-client" - assert persona.identity_data.email == "good@email.com" - - assert access_token_path.exists() - assert access_token_path.read_text() == refreshed_access_token - assert refresh_token_path.exists() - assert refresh_token_path.read_text() == refresh_token - - -def test_refresh_access_token__success(make_token, respx_mock, dummy_context): - """ - Validate that the ``refreshed_access_token()`` function uses a refresh token to retrieve a new access - token from the ``oauth/protocol/openid-connect/token`` - endpoint of the ``settings.OIDC_DOMAIN``. - """ - access_token = "expired-access-token" - refresh_token = "dummy-refresh-token" - token_set = TokenSet(access_token=access_token, refresh_token=refresh_token) - - refreshed_access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-17 22:30:00", - ) - - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/token").mock( - return_value=httpx.Response( - httpx.codes.OK, - json=dict(access_token=refreshed_access_token), - ), - ) - - refresh_access_token(dummy_context, token_set) - assert token_set.access_token == refreshed_access_token - assert token_set.refresh_token == refresh_token - - -def test_refresh_access_token__includes_refresh(make_token, respx_mock, dummy_context): - """ - Validate that the ``refreshed_access_token()`` function also updates the refresh token if it is available. - """ - access_token = "expired-access-token" - refresh_token = "dummy-refresh-token" - token_set = TokenSet(access_token=access_token, refresh_token=refresh_token) - - refreshed_access_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-17 22:30:00", - ) - refreshed_refresh_token = make_token( - azp="dummy-client", - email="good@email.com", - expires="2022-02-17 22:30:00", - ) - - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/token").mock( - return_value=httpx.Response( - httpx.codes.OK, - json=dict(access_token=refreshed_access_token, refresh_token=refreshed_refresh_token), - ), - ) - - refresh_access_token(dummy_context, token_set) - assert token_set.access_token == refreshed_access_token - assert token_set.refresh_token == refreshed_refresh_token - - -def test_refresh_access_token__raises_abort_on_non_200_response(respx_mock, dummy_context): - """ - Validate that the ``refreshed_access_token()`` function raises an abort if the response from the - ``oauth//protocol/openid-connect/token`` endpoint of the - ``settings.OIDC_DOMAIN`` is not a 200. - """ - access_token = "expired-access-token" - refresh_token = "dummy-refresh-token" - token_set = TokenSet(access_token=access_token, refresh_token=refresh_token) - - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/token").mock( - return_value=httpx.Response( - httpx.codes.BAD_REQUEST, - json=dict(error_description="BOOM!"), - ), - ) - - with pytest.raises(Abort, match="auth token could not be refreshed"): - refresh_access_token(dummy_context, token_set) + def test_open_on_browser_invalid_browser(self, mocker): + """ + Test that open_on_browser returns False when a GenericBrowser is used. + """ + mock_browser = webbrowser.GenericBrowser("dummy") + mocker.patch("webbrowser.get", return_value=mock_browser) + result = open_on_browser("https://example.com") + assert result is False -def test_fetch_auth_tokens__success(respx_mock, dummy_context): - """ - Validate that the ``fetch_auth_tokens()`` function can successfully retrieve auth tokens from the - OIDC provider. - """ + def test_open_on_browser_exception(self, mocker): + """ + Test that open_on_browser returns False when an exception is raised. + """ + mocker.patch("webbrowser.get", side_effect=Exception("Test exception")) - access_token = "dummy-access-token" - refresh_token = "dummy-refresh-token" - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/auth/device").mock( - return_value=httpx.Response( - httpx.codes.OK, - json=dict( - device_code="dummy-code", - verification_uri_complete="https://dummy-uri.com", - interval=1, - ), - ), - ) - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/token").mock( - return_value=httpx.Response( - httpx.codes.OK, - json=dict( - access_token=access_token, - refresh_token=refresh_token, - ), - ), - ) - token_set = fetch_auth_tokens(dummy_context) - assert token_set.access_token == access_token - assert token_set.refresh_token == refresh_token + result = open_on_browser("https://example.com") + assert result is False class TestShowLoginMessage: @@ -611,6 +67,10 @@ def test_show_login_message__standard_screen(self, mocker, mocked_helpers): verification_uri = "https://example.com" console_width = len(verification_uri) + 7 + device_code_data = DeviceCodeData( + verification_uri_complete=verification_uri, expires_in=60, device_code="1234", interval=5 + ) + mocked_console = mocker.MagicMock() mocked_console.width = console_width mocker.patch("jobbergate_cli.auth.Console", return_value=mocked_console) @@ -618,12 +78,12 @@ def test_show_login_message__standard_screen(self, mocker, mocked_helpers): mocked_show_on_narrow_screen = mocker.patch("jobbergate_cli.auth._show_login_narrow_screen") mocked_show_on_standard_screen = mocker.patch("jobbergate_cli.auth._show_login_standard_screen") - show_login_message(verification_uri) + show_login_message(device_code_data) assert mocked_show_on_narrow_screen.call_count == 0 - mocked_show_on_standard_screen.assert_called_once_with(verification_uri) + mocked_show_on_standard_screen.assert_called_once_with(verification_uri, 1) - _show_login_standard_screen(verification_uri) + _show_login_standard_screen(verification_uri, 1) def test_show_login_message__narrow_screen(self, mocker, mocked_helpers): """ @@ -632,6 +92,10 @@ def test_show_login_message__narrow_screen(self, mocker, mocked_helpers): verification_uri = "https://example.com" console_width = len(verification_uri) + 7 - 1 + device_code_data = DeviceCodeData( + verification_uri_complete=verification_uri, expires_in=60, device_code="1234", interval=5 + ) + mocked_console = mocker.MagicMock() mocked_console.width = console_width mocker.patch("jobbergate_cli.auth.Console", return_value=mocked_console) @@ -639,33 +103,9 @@ def test_show_login_message__narrow_screen(self, mocker, mocked_helpers): mocked_show_on_narrow_screen = mocker.patch("jobbergate_cli.auth._show_login_narrow_screen") mocked_show_on_standard_screen = mocker.patch("jobbergate_cli.auth._show_login_standard_screen") - show_login_message(verification_uri) + show_login_message(device_code_data) assert mocked_show_on_standard_screen.call_count == 0 - mocked_show_on_narrow_screen.assert_called_once_with(verification_uri, mocked_console) + mocked_show_on_narrow_screen.assert_called_once_with(verification_uri, 1, mocked_console) - _show_login_narrow_screen(verification_uri, Console()) - - -def test_fetch_auth_tokens__raises_Abort_when_it_times_out_waiting_for_the_user(respx_mock, dummy_context, mocker): - """ - Validate that the ``fetch_auth_tokens()`` function will raise an Abort if the time runs out before a user - completes the login process. - """ - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/auth/device").mock( - return_value=httpx.Response( - httpx.codes.OK, - json=dict( - device_code="dummy-code", - verification_uri_complete="https://dummy-uri.com", - interval=0, - ), - ), - ) - respx_mock.post(f"{LOGIN_DOMAIN}/protocol/openid-connect/token").mock( - return_value=httpx.Response(httpx.codes.BAD_REQUEST, json=dict(error="authorization_pending")), - ) - one_tick = Tick(counter=1, elapsed=pendulum.Duration(seconds=1), total_elapsed=pendulum.Duration(seconds=1)) - mocker.patch("jobbergate_cli.auth.TimeLoop", return_value=[one_tick]) - with pytest.raises(Abort, match="not completed in time"): - fetch_auth_tokens(dummy_context) + _show_login_narrow_screen(verification_uri, 1, Console()) diff --git a/jobbergate-core/CHANGELOG.md b/jobbergate-core/CHANGELOG.md index b4f61ee64..9f3a08572 100644 --- a/jobbergate-core/CHANGELOG.md +++ b/jobbergate-core/CHANGELOG.md @@ -3,7 +3,10 @@ This file keeps track of all notable changes to jobbergate-core ## Unreleased - +- Refined authentication handler to get it ready to be used on the CLI and enable auto-login [ASP-4779] + - Created a request handler to facilitate the communication with OIDC + - Added Pydantic back as a dependency and applied it to validate responses from the OIDC server + - Created extra exceptions to handle authentication errors, that in turn can be more easily handled by client code ## 5.2.0 -- 2024-07-01 - Removed unused dependency for pydantic diff --git a/jobbergate-core/jobbergate_core/auth/handler.py b/jobbergate-core/jobbergate_core/auth/handler.py index 5199b4739..6370187ba 100644 --- a/jobbergate-core/jobbergate_core/auth/handler.py +++ b/jobbergate-core/jobbergate_core/auth/handler.py @@ -3,22 +3,72 @@ """ import time -from collections import namedtuple from dataclasses import dataclass, field from pathlib import Path -from typing import Dict +from typing import Callable, Iterable -import httpx from loguru import logger +import pendulum +from pydantic import BaseModel -from jobbergate_core.auth.exceptions import AuthenticationError, TokenError +from jobbergate_core.auth.exceptions import AuthenticationError from jobbergate_core.auth.token import Token, TokenType +from jobbergate_core.tools.requests import Client, RequestHandler -_LoginInformation = namedtuple( - "_LoginInformation", - ["verification_url", "wait_interval", "device_code", "expires_at"], -) +class DeviceCodeData(BaseModel): + """ + A model representing the data that is returned from the OIDC provider's device code endpoint. + """ + + verification_uri_complete: str + interval: int + device_code: str + expires_in: float + + +class TokenInformation(BaseModel): + access_token: str | None = None + refresh_token: str | None = None + + +class IdentityData(BaseModel): + """ + A model representing the identifying data for a user from an auth token. + """ + + email: str + client_id: str + organization_id: str | None = None + + +@dataclass +class TimedIterator: + """ + An iterator that runs for a given time interval, yielding the current iteration number. + """ + + total: int + step: int + + def __post_init__(self): + now = pendulum.now() + interval = pendulum.interval(now, now.add(seconds=self.total)) + self.range = interval.range("seconds", self.step) + + def __iter__(self): + for i, date_time in enumerate(self.range): + time_to_sleep = date_time.diff(pendulum.now()).in_seconds() + time.sleep(time_to_sleep) + yield i + + def __len__(self): + return self.total // self.step + 1 + + +def print_login_url(device_code_data: DeviceCodeData): + """Basic way to handle the login url.""" + print(f"Login Here: {device_code_data.verification_uri_complete}") @dataclass @@ -82,12 +132,18 @@ class to authenticate a request: login_domain: str login_audience: str login_client_id: str = "default" + login_client_secret: str | None = None + login_url_handler: Callable[[DeviceCodeData], None] = print_login_url + login_sequence_handler: Callable[[Iterable], Iterable] = lambda i: i + + _client: Client = field(init=False, repr=False) _access_token: Token = field(init=False, repr=False) _refresh_token: Token = field(init=False, repr=False) def __post_init__(self): self._access_token = Token(cache_directory=self.cache_directory, label=TokenType.ACCESS.value) self._refresh_token = Token(cache_directory=self.cache_directory, label=TokenType.REFRESH.value) + self._client = Client(base_url=self.login_domain, headers={"content-type": "application/x-www-form-urlencoded"}) def __call__(self, request): """ @@ -121,26 +177,57 @@ def acquire_access(self) -> str: Raises: AuthenticationError: If all of the steps above fail to acquire a valid access token. """ + if self._access_token.is_valid(): + return self._access_token.bearer_token + logger.debug("Acquiring access token") - for procedure_name in (None, "load_from_cache", "refresh_tokens", "login"): - if procedure_name: - procedure = getattr(self, procedure_name) - try: - procedure() - except TokenError: - logger.debug("{} failed, moving to the next procedure", procedure_name) + for procedure_name in ( + "load_from_cache", + "refresh_tokens", + "get_access_from_secret", + "login", + ): + procedure = getattr(self, procedure_name) + try: + procedure() + except AuthenticationError as err: + logger.debug("{} failed due to: {}", procedure_name, str(err)) if self._access_token.is_valid(): return self._access_token.bearer_token - raise AuthenticationError("Unable to acquire the access token") + raise AuthenticationError("Unable to acquire the access token, all attempts failed") + + def get_access_from_secret(self) -> None: + AuthenticationError.require_condition(self.login_client_secret is not None, message="Client secret is unset") + + with AuthenticationError.handle_errors("Failed to get access token from client secret"): + data = ( + RequestHandler( + client=self._client, + url_path="/protocol/openid-connect/token", + method="POST", + request_kwargs={ + "data": { + "audience": self.login_audience, + "client_id": self.login_client_id, + "client_secret": self.login_client_secret, + "grant_type": "client_credentials", + } + }, + ) + .raise_for_status() + .to_model(TokenInformation) + ) + self._update_tokens_from_info(data) def load_from_cache(self) -> None: """ Load the tokens that are available at the cache directory. """ logger.debug("Loading tokens from cache directory: {}", self.cache_directory.as_posix()) - self._access_token = self._access_token.load_from_cache() - self._refresh_token = self._refresh_token.load_from_cache() + with AuthenticationError.handle_errors("Failed to load tokens from cache"): + self._access_token = self._access_token.load_from_cache() + self._refresh_token = self._refresh_token.load_from_cache() def save_to_cache(self) -> None: """ @@ -149,23 +236,24 @@ def save_to_cache(self) -> None: Note: This method will create the cache directory if it does not exist. """ - logger.debug( - "Saving tokens to cache directory: {}", - self.cache_directory.as_posix(), - ) - self.cache_directory.mkdir(parents=True, exist_ok=True) - self._access_token.save_to_cache() - self._refresh_token.save_to_cache() + logger.debug("Saving tokens to cache directory: {}", self.cache_directory.as_posix()) + with AuthenticationError.handle_errors("Failed to save tokens to cache"): + self.cache_directory.mkdir(parents=True, exist_ok=True) + if self._access_token.is_valid(): + self._access_token.save_to_cache() + if self._refresh_token.is_valid(): + self._refresh_token.save_to_cache() def logout(self) -> None: """ Logout from Jobbergate by clearing the loaded tokens and their cache on the disk. """ logger.debug("Logging out from Jobbergate") - self._access_token = self._access_token.replace(content="") - self._access_token.clear_cache() - self._refresh_token = self._refresh_token.replace(content="") - self._refresh_token.clear_cache() + with AuthenticationError.handle_errors("Failed to logout from Jobbergate"): + self._access_token = self._access_token.replace(content="") + self._access_token.clear_cache() + self._refresh_token = self._refresh_token.replace(content="") + self._refresh_token.clear_cache() def login(self) -> None: """ @@ -177,69 +265,58 @@ def login(self) -> None: After the login is completed, the tokens will be saved to the cache directory. """ logger.debug("Preparing to login to Jobbergate") - login_info = self._get_login_information() - response = self._wait_for_login_confirmation(login_info) - self._process_tokens_from_response(response) + with AuthenticationError.handle_errors("Failed to login to Jobbergate"): + login_info = self._get_device_code() + token_info = self._wait_for_login_confirmation(login_info) + self._update_tokens_from_info(token_info) logger.success("Login completed") - def _wait_for_login_confirmation(self, login_info: _LoginInformation) -> httpx.Response: - print(f"Login Here: {login_info.verification_url}") - while True: - AuthenticationError.require_condition( - login_info.expires_at > time.time(), "Login expired, please try again" - ) - response = self._get_login_confirmation(login_info) - try: - response.raise_for_status() - break - except httpx.HTTPStatusError: - logger.debug( - " Login not completed yet, waiting {} seconds", - login_info.wait_interval, - ) - time.sleep(login_info.wait_interval) - logger.debug("Preparing to login to Jobbergate") - return response - - def _get_login_confirmation(self, login_info: _LoginInformation) -> httpx.Response: - response = httpx.post( - f"{self.login_domain}/protocol/openid-connect/token", - data=dict( - grant_type="urn:ietf:params:oauth:grant-type:device_code", - device_code=login_info.device_code, - client_id=self.login_client_id, - ), - ) - - return response - - def _get_login_information(self) -> _LoginInformation: - with AuthenticationError.handle_errors( - "Unexpected error while fetching the tokens", + def _wait_for_login_confirmation(self, device_code_data: DeviceCodeData) -> TokenInformation: + self.login_url_handler(device_code_data) + for counter in self.login_sequence_handler( + TimedIterator(int(device_code_data.expires_in), device_code_data.interval) ): - response = httpx.post( - f"{self.login_domain}/protocol/openid-connect/auth/device", - data=dict( - client_id=self.login_client_id, - grant_type="client_credentials", - audience=self.login_audience, - ), + request_handler = RequestHandler( + client=self._client, + url_path="/protocol/openid-connect/token", + method="POST", + request_kwargs={ + "data": { + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + "device_code": device_code_data.device_code, + "client_id": self.login_client_id, + } + }, + ).check_status_code(200, 400) + + if request_handler.response.is_success: + return request_handler.to_model(TokenInformation) + + logger.debug( + "Login not completed completed on attempt #{}, waiting {} seconds", + counter + 1, + device_code_data.interval, ) - response.raise_for_status() - device_code_data = response.json() - with AuthenticationError.handle_errors( - f"Error processing the request data after fetching the token, {device_code_data=}", - ): - verification_url = device_code_data["verification_uri_complete"] - wait_interval = device_code_data["interval"] - device_code = device_code_data["device_code"] - expires_at = time.time() + device_code_data["expires_in"] - return _LoginInformation( - verification_url, - wait_interval, - device_code, - expires_at, + raise AuthenticationError("Login process was not completed in time. Please try again.") + + def _get_device_code(self) -> DeviceCodeData: + return ( + RequestHandler( + client=self._client, + url_path="/protocol/openid-connect/auth/device", + method="POST", + request_kwargs={ + "data": { + "client_id": self.login_client_id, + "grant_type": "client_credentials", + "audience": self.login_audience, + } + }, + ) + .raise_for_status() + .check_status_code(200) + .to_model(DeviceCodeData) ) def refresh_tokens(self) -> None: @@ -254,48 +331,64 @@ def refresh_tokens(self) -> None: logger.debug("Preparing to refresh the tokens") if not self._refresh_token.content: - raise TokenError("The refresh is unavailable, please login again") + raise AuthenticationError("Session can no be refreshed since the refresh token is unavailable") if self._refresh_token.is_expired(): - raise TokenError("Refresh token is expired, please login again") + raise AuthenticationError("Session can no be refreshed since the refresh token is expired") - response = self._get_refresh_token() - self._process_tokens_from_response(response) + with AuthenticationError.handle_errors("Failed to refresh the session"): + token_info = self._get_refresh_token() + self._update_tokens_from_info(token_info) logger.success("Tokens refreshed successfully") - def _get_refresh_token(self): - with AuthenticationError.handle_errors( - "Unexpected error while refreshing the tokens", - ): - response = httpx.post( - f"{self.login_domain}/protocol/openid-connect/token", - data=dict( - client_id=self.login_client_id, - audience=self.login_audience, - grant_type="refresh_token", - refresh_token=self._refresh_token.content, - ), + def _get_refresh_token(self) -> TokenInformation: + return ( + RequestHandler( + client=self._client, + url_path="/protocol/openid-connect/token", + method="POST", + request_kwargs={ + "data": { + "client_id": self.login_client_id, + "audience": self.login_audience, + "grant_type": "refresh_token", + "refresh_token": self._refresh_token.content, + }, + }, ) - response.raise_for_status() - return response - - def _process_tokens_from_response(self, response): - response_data = response.json() - - tokens_content = {t: response_data.get(f"{t.value}_token") for t in TokenType} - AuthenticationError.require_condition( - all(tokens_content.values()), "Not all tokens were included in the response" + .raise_for_status() + .to_model(TokenInformation) ) - self._update_tokens(tokens_content) - self.save_to_cache() - def _update_tokens(self, tokens_content: Dict[TokenType, str]): + def _update_tokens_from_info(self, token_information: TokenInformation): """ Update the tokens with the new content. """ - access_token = tokens_content.get(TokenType.ACCESS, "") - if access_token: - self._access_token = self._access_token.replace(content=access_token) - refresh_token = tokens_content.get(TokenType.REFRESH, "") - if refresh_token: - self._refresh_token = self._refresh_token.replace(content=refresh_token) + if token_information.access_token: + self._access_token = self._access_token.replace(content=token_information.access_token) + if token_information.refresh_token: + self._refresh_token = self._refresh_token.replace(content=token_information.refresh_token) + self.save_to_cache() + + def get_identity_data(self) -> IdentityData: + if not self._access_token.is_valid(): + self.acquire_access() + token_data = self._access_token.data + email = AuthenticationError.enforce_defined( + token_data.get("email"), + "Could not retrieve user email from token", + ) + client_id = AuthenticationError.enforce_defined( + token_data.get("azp"), "Could not retrieve client_id from token" + ) + org_dict = token_data.get("organization", {}) + AuthenticationError.require_condition( + len(org_dict) <= 1, + message="More than one organization id found in token payload", + ) + organization_id = set(org_dict.keys()).pop() if org_dict else None + return IdentityData( + email=email, + client_id=client_id, + organization_id=organization_id, + ) diff --git a/jobbergate-core/jobbergate_core/auth/token.py b/jobbergate-core/jobbergate_core/auth/token.py index b0923656b..7ecaded57 100644 --- a/jobbergate-core/jobbergate_core/auth/token.py +++ b/jobbergate-core/jobbergate_core/auth/token.py @@ -7,7 +7,7 @@ from dataclasses import dataclass, field, replace from enum import Enum from pathlib import Path -from typing import Any, Dict +from typing import TypedDict import pendulum from jose.jwt import decode @@ -25,6 +25,18 @@ class TokenType(str, Enum): REFRESH = "refresh" +class TokenData(TypedDict, total=False): + """ + Expected data from the token to make type checking easier. + """ + + email: str + azp: str + exp: int + iat: int + organization: dict[str, str] + + @dataclass(frozen=True) class Token: """ @@ -46,8 +58,8 @@ class Token: label: str content: str = "" file_path: Path = field(init=False, hash=False, repr=False) - data: Dict[str, Any] = field( - default_factory=dict, + data: TokenData = field( + default_factory=lambda: TokenData(), init=False, hash=False, repr=False, @@ -70,24 +82,28 @@ def __post_init__(self): data = self._get_metadata() object.__setattr__(self, "data", data) - def _get_metadata(self) -> Dict[str, Any]: + def _get_metadata(self) -> TokenData: """ Extract the data from the token. """ - logger.debug(f"Getting data from {self.label} token") - with TokenError.handle_errors("Unable to extract data from the token"): data = decode( token=self.content, - key="", + key="secret-will-be-ignored", options=dict( verify_signature=False, verify_aud=False, + verify_iat=False, verify_exp=False, + verify_nbf=False, + verify_iss=False, + verify_sub=False, + verify_jti=False, + verify_at_hash=False, ), ) - return data + return TokenData(**data) def load_from_cache(self) -> Token: """ @@ -101,7 +117,7 @@ def load_from_cache(self) -> Token: A new token with the content replaced. """ file_path = self.cache_directory / f"{self.label}.token" - logger.debug(f"Loading token from {file_path.as_posix()}") + logger.debug(f"Loading {self.label} token from {file_path.as_posix()}") TokenError.require_condition(file_path.exists(), "Token file was not found") @@ -120,7 +136,7 @@ def save_to_cache(self) -> None: """ if not self.content: return - logger.debug(f"Saving token to {self.file_path}") + logger.debug(f"Saving {self.label} token to {self.file_path}") TokenError.require_condition(self.file_path.parent.exists(), "Parent directory does not exist") with TokenError.handle_errors("Unknown error while saving the token"): @@ -145,14 +161,14 @@ def is_expired(self) -> bool: Raises: TokenError: If the expiration date is not found. """ - logger.debug(f"Checking if {self.label} token has expired") - - TokenError.require_condition("exp" in self.data, "The expiration date was not found") + TokenError.require_condition( + "exp" in self.data, f"Failed checking {self.label} token since the expiration date was not found" + ) token_expiration = self.data["exp"] current_time_UTC = pendulum.now().int_timestamp is_expired = token_expiration <= current_time_UTC - logger.debug(f" Token is expired: {is_expired}") + logger.debug(f"{self.label.capitalize()} token is {'' if is_expired else 'NOT'} expired") return is_expired diff --git a/jobbergate-core/jobbergate_core/tools/requests.py b/jobbergate-core/jobbergate_core/tools/requests.py new file mode 100644 index 000000000..35f950a19 --- /dev/null +++ b/jobbergate-core/jobbergate_core/tools/requests.py @@ -0,0 +1,182 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from pathlib import Path +from textwrap import dedent +from typing import Any, Type, TypeVar + +from buzz import check_expressions, handle_errors +from httpx import Client, HTTPStatusError, RequestError, Response, Request +from loguru import logger +from pydantic import BaseModel + + +class JobbergateRequestError(RequestError): + """ + Jobbergate specific exceptions that may occur when preparing a request. + """ + + +class RequestModelError(JobbergateRequestError): + """ + An error occurred while preparing the request data from a model. + """ + + +class JobbergateResponseError(HTTPStatusError): + """ + Jobbergate specific exceptions that may occur when handling a response. + """ + + +ResponseModel = TypeVar("ResponseModel", bound=BaseModel) + + +def deserialize_request_model(request_model: BaseModel, request_kwargs: dict[str, Any]): + """ + Deserialize a pydantic model instance into request_kwargs for an httpx client request in place. + """ + with check_expressions( + main_message="Request was incorrectly structured to use a `request_model`", + raise_exc_class=RequestModelError, + ) as check: + for key in ("data", "json", "content"): + check(key not in request_kwargs, f"It already contains '{key}'") + + with handle_errors("Failed to deserialize request model", raise_exc_class=RequestModelError): + request_kwargs["content"] = request_model.model_dump_json() + + request_kwargs["headers"] = {"Content-Type": "application/json"} + + +@dataclass +class RequestHandler: + """ + Provide utilities for making requests and handling responses. + + Notice most methods return self as a syntax sugar to allow chaining. + + Arguments: + client: The httpx client to use for the request + url_path: The path to add to the base url of the client where the request should be sent + method: The REST method to use for the request (GET, PUT, UPDATE, POST, DELETE, etc) + request_model: Use a pydantic model instance as the data body for the request + request_kwargs: Any additional keyword arguments that need to be passed on to the client + + Attributes: + response: The response object from the request is kept for reference. + """ + + client: Client + url_path: str + method: str # HTTPMethod is new in Python 3.11 and can replace string here + request_model: BaseModel | None = None + request_kwargs: dict[str, Any] = field(default_factory=dict) + + request: Request = field(init=False, repr=False) + response: Response = field(init=False, repr=False) + + def __post_init__(self): + """ + Post init method. + """ + logger.debug(f"Making request to url_path={self.url_path}") + + if self.request_model is not None: + try: + deserialize_request_model(self.request_model, self.request_kwargs) + except RequestModelError as err: + logger.error(str(err)) + raise err + + self.request = self.client.build_request(self.method, self.url_path, **self.request_kwargs) + + # Look for the request body in the request_kwargs + debug_request_body = self.request_kwargs.get( + "data", self.request_kwargs.get("json", self.request_kwargs.get("content")) + ) + logger.debug( + dedent( + f""" + Request built with: + url: {self.request.url} + method: {self.method} + headers: {self.request.headers} + body: {debug_request_body} + """ + ).strip() + ) + + try: + self.response = self.client.send(self.request) + except RequestError as err: + logger.error(str(err)) + raise err + + logger.debug(f"Response received with status: {self.response.reason_phrase} [{self.response.status_code}]") + + def raise_for_status(self) -> RequestHandler: + """ + Raise the `HTTPStatusError` if one occurred. + """ + try: + self.response.raise_for_status() + except HTTPStatusError as err: + original_error_message = str(err) + logger.error(original_error_message) + raise JobbergateResponseError( + message=original_error_message, request=self.request, response=self.response + ) from err + return self + + def check_status_code(self, *statuses: int) -> RequestHandler: + """ + Check if the response status code is in the provided set of status codes. + """ + if self.response.status_code not in set(statuses): + message = "Unexpected response status code. Got: {}. Expected one of: {}".format( + self.response.status_code, ", ".join(str(status) for status in statuses) + ) + logger.error(message) + raise JobbergateResponseError(message=message, request=self.request, response=self.response) + return self + + def to_file(self, file_path: Path) -> Path: + """ + Write the response content to a file. + """ + try: + file_path.write_bytes(self.response.content) + except Exception as err: + logger.error(str(err)) + raise JobbergateResponseError( + message=f"Failed writing file to {file_path.as_posix()}", request=self.request, response=self.response + ) from err + return file_path + + def to_json(self) -> dict[str, Any]: + """ + Unpack the response content as json. + """ + try: + data = self.response.json() + except Exception as err: + logger.error(str(err)) + raise JobbergateResponseError( + message="Failed unpacking json from response", request=self.request, response=self.response + ) from err + + logger.debug(f"Extracted data from response: {data}") + return data + + def to_model(self, model: Type[ResponseModel]) -> ResponseModel: + """ + Unpack the response content as json and validate it against a pydantic model. + """ + try: + return model.model_validate(self.to_json()) + except Exception as err: + logger.error(str(err)) + raise JobbergateResponseError( + message="Failed to validate response to model", request=self.request, response=self.response + ) from err diff --git a/jobbergate-core/poetry.lock b/jobbergate-core/poetry.lock index 4b35ab67b..1043ec674 100644 --- a/jobbergate-core/poetry.lock +++ b/jobbergate-core/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "anyio" version = "4.0.0" @@ -352,41 +363,50 @@ dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils [[package]] name = "mypy" -version = "0.931" +version = "1.10.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"}, - {file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"}, - {file = "mypy-0.931-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:300717a07ad09525401a508ef5d105e6b56646f7942eb92715a1c8d610149714"}, - {file = "mypy-0.931-cp310-cp310-win_amd64.whl", hash = "sha256:7b3f6f557ba4afc7f2ce6d3215d5db279bcf120b3cfd0add20a5d4f4abdae5bc"}, - {file = "mypy-0.931-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1bf752559797c897cdd2c65f7b60c2b6969ffe458417b8d947b8340cc9cec08d"}, - {file = "mypy-0.931-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4365c60266b95a3f216a3047f1d8e3f895da6c7402e9e1ddfab96393122cc58d"}, - {file = "mypy-0.931-cp36-cp36m-win_amd64.whl", hash = "sha256:1b65714dc296a7991000b6ee59a35b3f550e0073411ac9d3202f6516621ba66c"}, - {file = "mypy-0.931-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e839191b8da5b4e5d805f940537efcaa13ea5dd98418f06dc585d2891d228cf0"}, - {file = "mypy-0.931-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:50c7346a46dc76a4ed88f3277d4959de8a2bd0a0fa47fa87a4cde36fe247ac05"}, - {file = "mypy-0.931-cp37-cp37m-win_amd64.whl", hash = "sha256:d8f1ff62f7a879c9fe5917b3f9eb93a79b78aad47b533911b853a757223f72e7"}, - {file = "mypy-0.931-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9fe20d0872b26c4bba1c1be02c5340de1019530302cf2dcc85c7f9fc3252ae0"}, - {file = "mypy-0.931-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1b06268df7eb53a8feea99cbfff77a6e2b205e70bf31743e786678ef87ee8069"}, - {file = "mypy-0.931-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8c11003aaeaf7cc2d0f1bc101c1cc9454ec4cc9cb825aef3cafff8a5fdf4c799"}, - {file = "mypy-0.931-cp38-cp38-win_amd64.whl", hash = "sha256:d9d2b84b2007cea426e327d2483238f040c49405a6bf4074f605f0156c91a47a"}, - {file = "mypy-0.931-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ff3bf387c14c805ab1388185dd22d6b210824e164d4bb324b195ff34e322d166"}, - {file = "mypy-0.931-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b56154f8c09427bae082b32275a21f500b24d93c88d69a5e82f3978018a0266"}, - {file = "mypy-0.931-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ca7f8c4b1584d63c9a0f827c37ba7a47226c19a23a753d52e5b5eddb201afcd"}, - {file = "mypy-0.931-cp39-cp39-win_amd64.whl", hash = "sha256:74f7eccbfd436abe9c352ad9fb65872cc0f1f0a868e9d9c44db0893440f0c697"}, - {file = "mypy-0.931-py3-none-any.whl", hash = "sha256:1171f2e0859cfff2d366da2c7092b06130f232c636a3f7301e3feb8b41f6377d"}, - {file = "mypy-0.931.tar.gz", hash = "sha256:0038b21890867793581e4cb0d810829f5fd4441aa75796b53033af3aa30430ce"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" -tomli = ">=1.1.0" -typing-extensions = ">=3.10" +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<2)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] [[package]] name = "mypy-extensions" @@ -547,6 +567,129 @@ files = [ {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, ] +[[package]] +name = "pydantic" +version = "2.8.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pytest" version = "7.4.2" @@ -905,13 +1048,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -959,4 +1102,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "5b2a200db4280e5061b51caeed8120d242fca6feb720642e0a1fcee3ad0422e2" +content-hash = "38601169d969ee100ce38cff4921ae4e1f113dc3e1e64b932ecc19d7edf59a74" diff --git a/jobbergate-core/pyproject.toml b/jobbergate-core/pyproject.toml index da053d843..6f8b9cbdd 100644 --- a/jobbergate-core/pyproject.toml +++ b/jobbergate-core/pyproject.toml @@ -25,9 +25,10 @@ loguru = "^0.6.0" pendulum = { extras = ["test"], version = "^3.0.0" } py-buzz = "^4.0.0" python-jose = "^3.3.0" +pydantic = "^2.7" [tool.poetry.dev-dependencies] -mypy = "^0.931" +mypy = "^1.10" pytest-cov = "^3.0.0" pytest-env = "^0.6.2" pytest-freezegun = "^0.4.2" @@ -55,6 +56,9 @@ addopts = [ fail_under = 85 show_missing = true +[tool.mypy] +plugins = "pydantic.mypy" + [[tool.mypy.overrides]] module = ["click", "dataclasses", "toml", "requests", "jose.*"] ignore_missing_imports = true diff --git a/jobbergate-core/tests/auth/test_handler.py b/jobbergate-core/tests/auth/test_handler.py index 146e1b255..7f8cacfe7 100644 --- a/jobbergate-core/tests/auth/test_handler.py +++ b/jobbergate-core/tests/auth/test_handler.py @@ -2,15 +2,17 @@ Test the utilities for handling auth in Jobbergate. """ +from dataclasses import replace from unittest import mock import httpx +import pendulum import pytest import requests from jobbergate_core.auth import JobbergateAuthHandler from jobbergate_core.auth.exceptions import AuthenticationError -from jobbergate_core.auth.token import TokenError, TokenType +from jobbergate_core.auth.token import TokenType DUMMY_LOGIN_DOMAIN = "http://keycloak.local:8080/realms/jobbergate-local" @@ -123,7 +125,7 @@ def test_no_tokens_found(self, dummy_jobbergate_auth): If no tokens are found in cache, the tokens dictionary should stay empty. """ - with pytest.raises(TokenError, match="Token file was not found"): + with pytest.raises(AuthenticationError, match="Token file was not found"): dummy_jobbergate_auth.load_from_cache() def test_tokens_found__replace_loaded(self, dummy_jobbergate_auth, valid_token, expired_token): @@ -151,7 +153,7 @@ def test_tokens_found__replace_loaded(self, dummy_jobbergate_auth, valid_token, assert dummy_jobbergate_auth._refresh_token.content == expected_content -def test_save_to_cache(dummy_jobbergate_auth, valid_token, expired_token): +def test_save_to_cache(dummy_jobbergate_auth, valid_token): """ Test that the save_to_cache function works as expected. @@ -168,7 +170,7 @@ def test_save_to_cache(dummy_jobbergate_auth, valid_token, expired_token): ) dummy_jobbergate_auth._access_token = access_token refresh_token = dummy_jobbergate_auth._refresh_token.replace( - content=expired_token.content, cache_directory=new_cache_directory + content=valid_token.content, cache_directory=new_cache_directory ) dummy_jobbergate_auth._refresh_token = refresh_token @@ -229,8 +231,7 @@ def test_refresh_tokens__failure_no_refresh_token(self, dummy_jobbergate_auth): """ assert dummy_jobbergate_auth._refresh_token.content == "" with pytest.raises( - AuthenticationError, - match="The refresh is unavailable", + AuthenticationError, match="Session can no be refreshed since the refresh token is unavailable" ): dummy_jobbergate_auth.refresh_tokens() @@ -241,45 +242,10 @@ def test_refresh_tokens__failure_expired_refresh_token(self, dummy_jobbergate_au dummy_jobbergate_auth._refresh_token = dummy_jobbergate_auth._refresh_token.replace( content=expired_token.content, ) - with pytest.raises(AuthenticationError, match="Refresh token is expired"): + with pytest.raises(AuthenticationError, match="Session can no be refreshed since the refresh token is expired"): dummy_jobbergate_auth.refresh_tokens() - def test_refresh_tokens__failure_by_missing_data( - self, - respx_mock, - jwt_token, - dummy_jobbergate_auth, - valid_token, - ): - """ - Test that the function raises an exception if the response is missing data. - """ - dummy_jobbergate_auth._refresh_token = dummy_jobbergate_auth._refresh_token.replace( - content=valid_token.content, - ) - - refreshed_access_token_content = jwt_token(custom_data="refreshed_access_token") - - endpoint = f"{dummy_jobbergate_auth.login_domain}/protocol/openid-connect/token" - respx_mock.post(endpoint).mock( - return_value=httpx.Response( - httpx.codes.OK, - json=dict(access_token=refreshed_access_token_content), - ), # note that the refresh token is missing - ) - with pytest.raises( - AuthenticationError, - match="Not all tokens were included in the response", - ): - dummy_jobbergate_auth.refresh_tokens() - - def test_refresh_tokens__request_failure( - self, - respx_mock, - dummy_jobbergate_auth, - expired_token, - valid_token, - ): + def test_refresh_tokens__request_failure(self, respx_mock, dummy_jobbergate_auth, valid_token): """ Test that the function raises an exception if the tokens are not refreshed. """ @@ -292,10 +258,7 @@ def test_refresh_tokens__request_failure( return_value=httpx.Response(httpx.codes.BAD_REQUEST), ) - with pytest.raises( - AuthenticationError, - match="Unexpected error while refreshing the tokens", - ): + with pytest.raises(AuthenticationError): dummy_jobbergate_auth.refresh_tokens() @@ -392,5 +355,119 @@ def test_login__raises_timeout(self, respx_mock, dummy_jobbergate_auth): ), ) - with pytest.raises(AuthenticationError, match="Login expired, please try again"): + with pytest.raises(AuthenticationError, match="Login process was not completed in time"): dummy_jobbergate_auth.login() + + +class TestJobbergateAuthHandlerFromSecret: + """ + Test the from_secret method on JobbergateAuthHandler class. + """ + + def test_secret__success(self, respx_mock, dummy_jobbergate_auth, valid_token): + """ + Test that the function works as expected. + """ + assert dummy_jobbergate_auth._access_token.content == "" + + secret_jobbergate_auth = replace(dummy_jobbergate_auth, login_client_secret="dummy-secret") + + endpoint = f"{secret_jobbergate_auth.login_domain}/protocol/openid-connect/token" + mocked = respx_mock.post(endpoint).mock( + return_value=httpx.Response(httpx.codes.OK, json=dict(access_token=valid_token.content)), + ) + + secret_jobbergate_auth.get_access_from_secret() + + assert secret_jobbergate_auth._access_token.content == valid_token.content + + assert mocked.called + + def test_secret__bad_request(self, respx_mock, dummy_jobbergate_auth): + """ + Test that the function works as expected. + """ + + secret_jobbergate_auth = replace(dummy_jobbergate_auth, login_client_secret="dummy-secret") + + endpoint = f"{secret_jobbergate_auth.login_domain}/protocol/openid-connect/token" + mocked = respx_mock.post(endpoint).mock(return_value=httpx.Response(httpx.codes.BAD_REQUEST)) + + with pytest.raises(AuthenticationError, match="Failed to get access token from client secret"): + secret_jobbergate_auth.get_access_from_secret() + + assert dummy_jobbergate_auth._access_token.content == "" + + assert mocked.called + + +class TestJobbergateAuthHandlerGetIdentityData: + """ + Test the get_identity_data method on JobbergateAuthHandler class. + """ + + def test_get_identity_data__success(self, dummy_jobbergate_auth, jwt_token): + """ + Test that the function works as expected. + """ + + access_token = jwt_token( + azp="dummy-client", + email="good@email.com", + organization={"some-id": "some-name"}, + exp=pendulum.tomorrow().int_timestamp, + ) + dummy_jobbergate_auth._access_token = dummy_jobbergate_auth._access_token.replace(content=access_token) + + identity_data = dummy_jobbergate_auth.get_identity_data() + + assert identity_data.email == "good@email.com" + assert identity_data.client_id == "dummy-client" + assert identity_data.organization_id == "some-id" + + def test_get_identity_data__fails_no_email(self, dummy_jobbergate_auth, jwt_token): + """ + Test that the function raises an exception if the email is missing. + """ + + access_token = jwt_token( + azp="dummy-client", + exp=pendulum.tomorrow().int_timestamp, + ) + dummy_jobbergate_auth._access_token = dummy_jobbergate_auth._access_token.replace(content=access_token) + + with pytest.raises(AuthenticationError, match="Could not retrieve user email from token"): + dummy_jobbergate_auth.get_identity_data() + + def test_get_identity_data__fails_no_client_id(self, dummy_jobbergate_auth, jwt_token): + """ + Test that the function raises an exception if the client_id is missing. + """ + + access_token = jwt_token( + email="good@email.com", + exp=pendulum.tomorrow().int_timestamp, + ) + dummy_jobbergate_auth._access_token = dummy_jobbergate_auth._access_token.replace(content=access_token) + + with pytest.raises(AuthenticationError, match="Could not retrieve client_id from token"): + dummy_jobbergate_auth.get_identity_data() + + def test_get_identify_data__fails_more_than_one_organization(self, dummy_jobbergate_auth, jwt_token): + """ + Test that the function raises an exception if there is more than one organization. + """ + + access_token = jwt_token( + azp="dummy-client", + email="good@email.com", + organization={"some-id": "some-name", "other-id": "other-name"}, + exp=pendulum.tomorrow().int_timestamp, + ) + dummy_jobbergate_auth._access_token = dummy_jobbergate_auth._access_token.replace(content=access_token) + + with pytest.raises( + AuthenticationError, + match="More than one organization id found in token payload", + ): + dummy_jobbergate_auth.get_identity_data() diff --git a/jobbergate-core/tests/tools/test_requests.py b/jobbergate-core/tests/tools/test_requests.py new file mode 100644 index 000000000..95a3fd477 --- /dev/null +++ b/jobbergate-core/tests/tools/test_requests.py @@ -0,0 +1,312 @@ +import json +from typing import Dict, Optional + +import httpx +import pydantic +import pytest + +from jobbergate_core.tools.requests import ( + JobbergateResponseError, + RequestHandler, + RequestModelError, + deserialize_request_model, +) + +DEFAULT_DOMAIN = "https://dummy-domain.com" + + +@pytest.fixture +def dummy_client(): + """ + Provide factory for a test client. Can supply custom base_url and headers. + """ + + def _helper(base_url: str = DEFAULT_DOMAIN, headers: Optional[Dict] = None) -> httpx.Client: + """ + Create the dummy httpx client. + """ + if headers is None: + headers = dict() + + return httpx.Client(base_url=base_url, headers=headers) + + return _helper + + +class DummyResponseModel(pydantic.BaseModel): + """ + Provide a dummy pydantic model for testing standard responses. + """ + + foo: int + bar: str + + +class ErrorResponseModel(pydantic.BaseModel): + """ + Provide a dummy pydantic model for testing error responses. + """ + + error: str + + +def test__deserialize_request_model__success(): + """ + Validate that the ``_deserialize_request_model`` method can successfully deserialize a pydantic model instance into + the ``content`` part of the ``request_kwargs``. Also, validate that the ``content-type`` part of the request is set + to ``application/json``. + """ + request_kwargs = dict() + deserialize_request_model(DummyResponseModel(foo=1, bar="one"), request_kwargs) + assert json.loads(request_kwargs["content"]) == dict(foo=1, bar="one") + assert request_kwargs["headers"] == {"Content-Type": "application/json"} + + +def test__deserialize_request_model__raises_Abort_if_request_kwargs_already_has_other_body_parts(): + """ + Validate that the ``_deserialize_request_model`` raises an Abort if the ``request_kwargs`` already has a "body" part + (``data``, ``json``, or ``content``). + """ + with pytest.raises(RequestModelError, match="Request was incorrectly structured"): + deserialize_request_model(DummyResponseModel(foo=1, bar="one"), dict(data=dict(foo=11))) + + with pytest.raises(RequestModelError, match="Request was incorrectly structured"): + deserialize_request_model(DummyResponseModel(foo=1, bar="one"), dict(json=dict(foo=11))) + + with pytest.raises(RequestModelError, match="Request was incorrectly structured"): + deserialize_request_model(DummyResponseModel(foo=1, bar="one"), dict(content=json.dumps(dict(foo=11)))) + + +class TestRequestHandler: + def test_make_request__success(self, respx_mock, dummy_client): + """ + Validate that the RequestHandler can successfully make a request. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + + reponse_status = httpx.codes.OK + response_json = dict( + foo=1, + bar="one", + ) + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(reponse_status, json=response_json), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + assert mocked_get.call_count == 1 + + assert request_handler.response.status_code == reponse_status + assert request_handler.response.json() == response_json + + def test_make_request__raises_request_error(self, respx_mock, dummy_client): + """ + Validate that the RequestHandler raises an error if the request fails. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + original_error = httpx.RequestError("BOOM!") + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock(side_effect=original_error) + + with pytest.raises(httpx.RequestError, match="BOOM!"): + RequestHandler(client=client, url_path=req_path, method="GET") + + assert mocked_get.call_count == 1 + + def test_raise_for_status__success(self, respx_mock, dummy_client): + """ + Validate that the RequestHandler can successfully raise an error if the response status code is not 2XX. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.BAD_REQUEST), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + with pytest.raises(httpx.HTTPStatusError, match="Client error '400 Bad Request'"): + request_handler.raise_for_status() + + assert mocked_get.call_count == 1 + + def test_check_status_code__success(self, respx_mock, dummy_client): + """ + Validate that the RequestHandler can successfully check the response status code. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.OK), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + assert request_handler.check_status_code(httpx.codes.OK) + + assert mocked_get.call_count == 1 + + def test_check_status_code__raises_error(self, respx_mock, dummy_client): + """ + Validate that the RequestHandler raises an error if the response status code is not in the provided list. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.BAD_REQUEST), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + with pytest.raises(httpx.HTTPStatusError, match="Unexpected response status code"): + request_handler.check_status_code(httpx.codes.OK) + + assert mocked_get.call_count == 1 + + def test_check_status_code__raises_error__with_multiple_status_codes(self, respx_mock, dummy_client): + """ + Validate that the RequestHandler raises an error if the response status code is not in the provided list. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.BAD_REQUEST), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + with pytest.raises(httpx.HTTPStatusError, match="Unexpected response status code"): + request_handler.check_status_code(httpx.codes.OK, httpx.codes.CREATED) + + assert mocked_get.call_count == 1 + + def test_to_file__success(self, respx_mock, dummy_client, tmp_path): + """ + Assert that the RequestHandler can successfully write the response content to a file. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + + response_content = "Hello, World!" + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.OK, content=response_content), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + file_path = tmp_path / "response.txt" + assert request_handler.to_file(file_path) == file_path + + assert file_path.read_text() == response_content + + assert mocked_get.call_count == 1 + + def test_to_file__fails(self, respx_mock, dummy_client, tmp_path): + """ + Assert that the RequestHandler raises an error if the response content is empty. + """ + + client = dummy_client(headers={"content-type": "garbage"}) + req_path = "/fake-path" + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.OK), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + file_path = tmp_path / "unexistent-directory" / "response.txt" + with pytest.raises(JobbergateResponseError, match="Failed writing file"): + request_handler.to_file(file_path) + + assert mocked_get.call_count == 1 + + def test_to_json__success(self, respx_mock, dummy_client): + """ + Assert that the RequestHandler can successfully write the response content to a file. + """ + + client = dummy_client(headers={"content-type": "application/json"}) + req_path = "/fake-path" + + response_json = dict(foo=1, bar="one") + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.OK, json=response_json), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + assert request_handler.to_json() == response_json + + assert mocked_get.call_count == 1 + + def test_to_json__fails(self, respx_mock, dummy_client): + """ + Assert that the RequestHandler raises an error if the response content is empty. + """ + + client = dummy_client(headers={"content-type": "application/json"}) + req_path = "/fake-path" + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.OK), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + with pytest.raises(JobbergateResponseError, match="Failed unpacking json from response"): + request_handler.to_json() + + assert mocked_get.call_count == 1 + + def test_to_model__success(self, respx_mock, dummy_client): + """ + Assert that the RequestHandler can successfully write the response content to a file. + """ + + client = dummy_client(headers={"content-type": "application/json"}) + req_path = "/fake-path" + + response_json = dict(foo=1, bar="one") + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.OK, json=response_json), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + assert request_handler.to_model(DummyResponseModel) == DummyResponseModel.model_validate(response_json) + + assert mocked_get.call_count == 1 + + def test_to_model__fails(self, respx_mock, dummy_client): + """ + Assert that the RequestHandler raises an error if the response content is empty. + """ + + client = dummy_client(headers={"content-type": "application/json"}) + req_path = "/fake-path" + + mocked_get = respx_mock.get(f"{DEFAULT_DOMAIN}{req_path}").mock( + return_value=httpx.Response(httpx.codes.OK), + ) + + request_handler = RequestHandler(client=client, url_path=req_path, method="GET") + + with pytest.raises(JobbergateResponseError, match="Failed to validate response to model"): + request_handler.to_model(DummyResponseModel) + + assert mocked_get.call_count == 1