Skip to content

Commit

Permalink
Merge pull request #276 from chkware/feat/module-workflow
Browse files Browse the repository at this point in the history
Feature: Workflow module implementation
  • Loading branch information
0hsn authored Oct 23, 2024
2 parents 1e4385a + 59ed987 commit d0df0a1
Show file tree
Hide file tree
Showing 67 changed files with 3,640 additions and 1,599 deletions.
9 changes: 6 additions & 3 deletions .github/workflows/test-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,15 @@ jobs:
name: Run test on ${{ matrix.os }} with ${{ matrix.py_ver }}
strategy:
matrix:
os:
os:
- macOS-latest
- macos-11
- macos-14
- macos-13
- ubuntu-latest
- ubuntu-20.04
- ubuntu-24.04
- ubuntu-22.04
- windows-latest
- windows-2022
- windows-2019

runs-on: ${{ matrix.os }}
Expand Down
5 changes: 3 additions & 2 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,21 @@ requests = "*"
cerberus = "*"
defusedxml = "*"
xmltodict = "*"
jinja2 = "*"
python-dotenv = "*"
pydantic = "*"
loguru = "*"

[dev-packages]
pytest = "*"
mypy = "*"
pylint = "*"
flake8 = "*"
black = {extras = ["d"], version = "*"}
var-dump = "*"
types-pyyaml = "*"
types-requests = "*"
pytest-cov = "*"
coveralls = "*"
icecream = "*"

[requires]
python_version = "3.11"
1,808 changes: 1,020 additions & 788 deletions Pipfile.lock

Large diffs are not rendered by default.

110 changes: 59 additions & 51 deletions chk/console/main.py
Original file line number Diff line number Diff line change
@@ -1,61 +1,25 @@
"""Commands"""

import typing
from os import environ

import click
from dotenv import load_dotenv

import chk.modules.fetch as fetch_executor
import chk.modules.validate as validate_executor
import chk.modules.workflow as workflow_executor
from chk.console.services import (
after_hook,
combine_initial_variables,
load_variables_as_dict, setup_logger,
)
from chk.infrastructure.file_loader import ExecuteContext, FileContext

from chk.infrastructure.file_loader import ExecuteContext, FileContext, FileLoader
from chk.infrastructure.typing_extras import JsonDecodingError


def load_variables_as_dict(json_str: str, **kwargs: typing.Any) -> dict:
"""Reads a json string and converts and returns the dict while doing validation"""

if json_str:
try:
return FileLoader.load_json_from_str(json_str)
except JsonDecodingError as err:
message = kwargs.get("except_msg") or "JSON loading error."
raise click.UsageError(str(message)) from err

return {}


def combine_initial_variables(external_vars: str, **kwargs: typing.Any) -> dict:
"""Reads a json string and converts to dict, and combines with env and dotenv
variables"""

load_dotenv()

return load_variables_as_dict(external_vars, **kwargs) | {"_ENV": dict(environ)}


def after_hook(resp: object) -> None:
"""Saves custom data from commands to global context bus
Args:
resp: object
"""

if curr_ctx := click.get_current_context():
if curr_ctx.parent:
if not curr_ctx.parent.obj:
curr_ctx.parent.obj = {}

curr_ctx.parent.obj |= resp
else:
raise RuntimeError("Default context not found")
VAR_ERROR_MSG = "-V, --variables accept values as JSON object"


# root command
@click.group()
@click.option("--debug/--no-debug", default=True)
@click.pass_context
def chk(ctx: click.Context) -> None:
def chk(ctx: click.Context, debug: bool) -> None:
"""\b
█████████ █████ █████ █████ ████
███░░░░░███░░███ ░░███ ░░███ ███░
Expand All @@ -71,6 +35,9 @@ def chk(ctx: click.Context) -> None:
Version 0.5.0, supported version strings: 0.7.2
"""
ctx.ensure_object(dict)
ctx.obj["debug"] = debug

setup_logger(debug)


# run fetch sub-command
Expand All @@ -80,7 +47,8 @@ def chk(ctx: click.Context) -> None:
"-nf", "--no-format", is_flag=True, help="No formatting to show the output"
)
@click.option("-V", "--variables", type=str, help="Pass variable(s) as JSON object")
def fetch(file: str, no_format: bool, variables: str) -> None:
@click.pass_context
def fetch(cctx: click.Context, file: str, no_format: bool, variables: str) -> None:
"""\b
Command to run Http config files.
FILE: Any .chk file, that has any of the following versions:
Expand All @@ -94,11 +62,12 @@ def fetch(file: str, no_format: bool, variables: str) -> None:
{
"dump": True,
"format": not no_format,
"debug": cctx.obj.get("debug", False),
},
{
"variables": combine_initial_variables(
variables,
except_msg="-V, --variables accept values as JSON object",
except_msg=VAR_ERROR_MSG,
)
},
)
Expand All @@ -114,9 +83,12 @@ def fetch(file: str, no_format: bool, variables: str) -> None:
)
@click.option("-V", "--variables", type=str, help="Pass variable(s) as JSON object")
@click.option("-D", "--data", type=str, help="Pass data as JSON")
def validate(file: str, no_format: bool, variables: str, data: str) -> None:
@click.pass_context
def validate(
cctx: click.Context, file: str, no_format: bool, variables: str, data: str
) -> None:
"""\b
Command to run Http config files.
Command to run Validation specification files.
FILE: Any .chk file, that has any of the following versions:
\b
Expand All @@ -128,11 +100,12 @@ def validate(file: str, no_format: bool, variables: str, data: str) -> None:
{
"dump": True,
"format": not no_format,
"debug": cctx.obj.get("debug", False),
},
{
"variables": combine_initial_variables(
variables,
except_msg="-V, --variables accept values as JSON object",
except_msg=VAR_ERROR_MSG,
),
"data": load_variables_as_dict(
data,
Expand All @@ -142,3 +115,38 @@ def validate(file: str, no_format: bool, variables: str, data: str) -> None:
)

validate_executor.execute(ctx, execution_ctx, after_hook)


# run validate sub-command
@chk.command()
@click.argument("file", type=click.Path(exists=True))
@click.option(
"-nf", "--no-format", is_flag=True, help="No formatting to show the output"
)
@click.option("-V", "--variables", type=str, help="Pass variable(s) as JSON object")
@click.pass_context
def workflow(cctx: click.Context, file: str, no_format: bool, variables: str) -> None:
"""\b
Command to run Workflow specification files.
FILE: Any .chk file, that has any of the following versions:
\b
- default.http.*"""

ctx: FileContext = FileContext.from_file(file)

execution_ctx = ExecuteContext(
{
"dump": True,
"format": not no_format,
"debug": cctx.obj.get("debug", False),
},
{
"variables": combine_initial_variables(
variables,
except_msg=VAR_ERROR_MSG,
),
},
)

workflow_executor.execute(ctx, execution_ctx, after_hook)
45 changes: 45 additions & 0 deletions chk/console/services.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
"""
Console service module
"""

from typing import Any

import click

from chk.infrastructure.file_loader import FileLoader
from chk.infrastructure.logging import LoggingManager
from chk.infrastructure.typing_extras import JsonDecodingError


def load_variables_as_dict(json_str: str, **kwargs: Any) -> dict:
"""Reads a json string and converts and returns the dict while doing validation"""

if json_str:
try:
return FileLoader.load_json_from_str(json_str)
except JsonDecodingError as err:
message = kwargs.get("except_msg") or "JSON loading error."
raise click.UsageError(str(message)) from err

return {}


def combine_initial_variables(external_vars: str, **kwargs: Any) -> dict:
"""Reads a json string and converts to dict, and combines with env and dotenv
variables"""

return load_variables_as_dict(external_vars, **kwargs)


def after_hook(*args: list, **kwargs: dict) -> Any:
"""Run any function after implementation. Default pass"""


def setup_logger(should_log: bool) -> None:
"""setup_logger"""

LoggingManager.remove_loguru()

if should_log:
log_file = LoggingManager.create_new_log_file()
LoggingManager.setup_loguru(log_file)
15 changes: 14 additions & 1 deletion chk/infrastructure/document.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
"""
Base document and utility
"""

import dataclasses

import cerberus
from pydantic import BaseModel, Field

from chk.infrastructure.file_loader import FileContext

Expand All @@ -18,11 +20,22 @@ class VersionedDocument:
version: str = dataclasses.field(default_factory=str)


class VersionedDocumentV2(BaseModel):
"""
versioned document entity
"""

context: tuple = Field(default_factory=tuple)
version: str = Field(default_factory=str)


class VersionedDocumentSupport:
"""DocumentVersionSupport"""

@staticmethod
def validate_with_schema(schema: dict, doc: VersionedDocument) -> bool:
def validate_with_schema(
schema: dict, doc: VersionedDocument | VersionedDocumentV2
) -> bool:
"""Validate a document with given schema
Args:
Expand Down
62 changes: 41 additions & 21 deletions chk/infrastructure/file_loader.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
"""
File loader utility
"""

from __future__ import annotations

import hashlib
from typing import NamedTuple
import json
from pathlib import Path
from typing import NamedTuple

import json
import yaml

from chk.infrastructure.typing_extras import JsonDecodingError
Expand All @@ -25,7 +28,7 @@ def is_file_ok(file_name: str, allowed_list: list | None = None) -> bool:
"""

if not allowed_list:
allowed_list = [".chk"]
allowed_list = [".chk", ".yaml", ".yml"]

if not Path(file_name).is_file():
raise FileNotFoundError("File not found")
Expand Down Expand Up @@ -81,7 +84,7 @@ class FileContext(NamedTuple):
filepath_hash: str = ""

@staticmethod
def from_file(file: str, **kwarg: dict) -> "FileContext":
def from_file(file: str, **kwarg: dict) -> FileContext:
FileLoader.is_file_ok(file)
absolute_path = str(Path(file).absolute())
fpath_hash = hashlib.sha256(absolute_path.encode("utf-8")).hexdigest()
Expand All @@ -95,31 +98,48 @@ def from_file(file: str, **kwarg: dict) -> "FileContext":
arguments=kwarg["arguments"] if "arguments" in kwarg else {},
)

@property
def filepath_as_path(self) -> Path:
"""Get filepath as Path"""

class PathFrom:
"""Utility to expand to full path"""
return Path(self.filepath)

def __init__(self, base: Path):
self.base = base.absolute().parent
@property
def filepath_base_as_path(self) -> Path:
"""Get filepath parent or base as Path"""

def absolute(self, target: str) -> str:
"""Find absolute in comparison to base URL"""
return Path(self.filepath).absolute().parent

if target.startswith("./") or target.startswith("../"):
if self.base.exists():
to_path = self.base

target_path_sp = target.split("/")
for part in target_path_sp:
if part == "..":
to_path = to_path.parent
else:
to_path = Path(str(to_path) + "/" + part)
def generate_abs_path(base_: str, target_: str) -> str:
"""Generate absolute path in comparison to base path
Args:
base_: str, base path to calculate from
target_: str, file path that need absolute path
return str(to_path)
raise ValueError("Invalid base path.")
Returns:
Absolute path for given filepath
"""

base = Path(base_)
base_abs = base.absolute().parent if base.is_file() else base.absolute()

if not base_abs.exists():
raise ValueError("Invalid base path.")
if not (target_.startswith("./") or target_.startswith("../")):
raise ValueError("Invalid target path.")

to_path = base_abs
target_path_sp = target_.split("/")

for part in target_path_sp:
if part == "..":
to_path = to_path.parent
else:
to_path = to_path / part

return str(to_path)


class ExecuteContext(NamedTuple):
"""Information storage for execution context"""
Expand Down
Loading

0 comments on commit d0df0a1

Please sign in to comment.