Skip to content

Commit

Permalink
Unit tests using Pytest for action_inputs.py. (#43)
Browse files Browse the repository at this point in the history
* Unit tests using Pytest for action_inputs.py file.
  • Loading branch information
MobiTikula authored Oct 31, 2024
1 parent f7f8814 commit a17b3b9
Show file tree
Hide file tree
Showing 10 changed files with 349 additions and 120 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,7 @@ Configure the action by customizing the following parameters based on your needs

## Action Outputs
The Living Documentation Generator action provides a key output that allows users to locate and access the generated documentation easily. This output can be utilized in various ways within your CI/CD pipeline to ensure the documentation is effectively distributed and accessible.
The output-path can not be an empty string. It can not aim to the root and other project directories as well.

- **output-path**
- **Description**: This output provides the path to the directory where the generated living documentation files are stored.
Expand Down Expand Up @@ -316,7 +317,8 @@ Add the shebang line at the top of the sh script file.
### Set the Environment Variables
Set the configuration environment variables in the shell script following the structure below.
Also make sure that the GITHUB_TOKEN is configured in your environment variables.
Also make sure that the INPUT_GITHUB_TOKEN is configured in your environment variables.
INPUT_OUTPUT_PATH can not be an empty string. It can not aim to the root and other project directories as well.
```
export INPUT_GITHUB_TOKEN=$(printenv GITHUB_TOKEN)
export INPUT_REPOSITORIES='[
Expand Down
153 changes: 82 additions & 71 deletions living_documentation_generator/action_inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,18 @@

import json
import logging
import os
import sys

from living_documentation_generator.model.config_repository import ConfigRepository
from living_documentation_generator.utils.utils import get_action_input, make_absolute_path
from living_documentation_generator.utils.utils import get_action_input, make_absolute_path, get_all_project_directories
from living_documentation_generator.utils.constants import (
GITHUB_TOKEN,
PROJECT_STATE_MINING,
REPOSITORIES,
OUTPUT_PATH,
STRUCTURED_OUTPUT,
DEFAULT_OUTPUT_PATH,
)

logger = logging.getLogger(__name__)
Expand All @@ -42,93 +44,102 @@ class ActionInputs:
and validating the inputs required for running the GH Action.
"""

def __init__(self):
self.__github_token: str = ""
self.__is_project_state_mining_enabled: bool = False
self.__repositories: list[ConfigRepository] = []
self.__output_directory: str = ""
self.__structured_output: bool = False

@property
def github_token(self) -> str:
"""Getter of the GitHub authorization token."""
return self.__github_token

@property
def is_project_state_mining_enabled(self) -> bool:
"""Getter of the project state mining switch."""
return self.__is_project_state_mining_enabled

@property
def repositories(self) -> list[ConfigRepository]:
"""Getter of the list of repositories to fetch from."""
return self.__repositories

@property
def output_directory(self) -> str:
"""Getter of the output directory."""
return self.__output_directory
@staticmethod
def get_github_token() -> str:
"""
Getter of the GitHub authorization token.
@return: The GitHub authorization token.
"""
return get_action_input(GITHUB_TOKEN)

@property
def structured_output(self) -> bool:
"""Getter of the structured output switch."""
return self.__structured_output
@staticmethod
def get_is_project_state_mining_enabled() -> bool:
"""
Getter of the project state mining switch.
@return: True if project state mining is enabled, False otherwise.
"""
return get_action_input(PROJECT_STATE_MINING, "false").lower() == "true"

def load_from_environment(self, validate: bool = True) -> "ActionInputs":
@staticmethod
def get_is_structured_output_enabled() -> bool:
"""
Load the action inputs from the environment variables and validate them if needed.
Getter of the structured output switch.
@return: True if structured output is enabled, False otherwise.
"""
return get_action_input(STRUCTURED_OUTPUT, "false").lower() == "true"

@param validate: Switch indicating if the inputs should be validated.
@return: The instance of the ActionInputs class.
@staticmethod
def get_repositories() -> list[ConfigRepository]:
"""
Getter and parser of the Config Repositories.
@return: A list of Config Repositories.
"""
self.__github_token = get_action_input(GITHUB_TOKEN)
self.__is_project_state_mining_enabled = get_action_input(PROJECT_STATE_MINING, "false").lower() == "true"
self.__structured_output = get_action_input(STRUCTURED_OUTPUT, "false").lower() == "true"
out_path = get_action_input(OUTPUT_PATH, "./output")
self.__output_directory = make_absolute_path(out_path)
repositories_json = get_action_input(REPOSITORIES, "")

logger.debug("Is project state mining allowed: %s.", self.is_project_state_mining_enabled)
logger.debug("JSON repositories to fetch from: %s.", repositories_json)
logger.debug("Output directory: %s.", self.output_directory)
logger.debug("Is output directory structured: %s.", self.structured_output)

# Validate inputs
if validate:
self.validate_inputs(repositories_json)

# Parse repositories json string into json dictionary format
repositories = []
repositories_json = get_action_input(REPOSITORIES, "[]")
try:
# Parse repositories json string into json dictionary format
repositories_json = json.loads(repositories_json)

# Load repositories into ConfigRepository object from JSON
for repository_json in repositories_json:
config_repository = ConfigRepository()
if config_repository.load_from_json(repository_json):
repositories.append(config_repository)
else:
logger.error("Failed to load repository from JSON: %s.", repository_json)

except json.JSONDecodeError as e:
logger.error("Error parsing JSON repositories: %s.", e, exc_info=True)
sys.exit(1)

for repository_json in repositories_json:
config_repository = ConfigRepository()
if config_repository.load_from_json(repository_json):
self.__repositories.append(config_repository)
else:
logger.error("Failed to load repository from JSON: %s.", repository_json)
except TypeError:
logger.error("Type error parsing input JSON repositories: `%s.`", repositories_json)
sys.exit(1)

return repositories

return self
@staticmethod
def get_output_directory() -> str:
"""Getter of the output directory."""
out_path = get_action_input(OUTPUT_PATH, default=DEFAULT_OUTPUT_PATH)
return make_absolute_path(out_path)

def validate_inputs(self, repositories_json: str) -> None:
@staticmethod
def validate_inputs(out_path: str) -> None:
"""
Validate the input attributes of the action.
Loads the inputs provided for the Living documentation generator.
Logs any validation errors and exits if any are found.
@param repositories_json: The JSON string containing the repositories to fetch.
@param out_path: The output path for the generated documentation.
@return: None
"""

# Validate correct format of input repositories_json
try:
json.loads(repositories_json)
except json.JSONDecodeError:
logger.error("Input attr `repositories_json` is not a valid JSON string.", exc_info=True)
sys.exit(1)
# Validate INPUT_REPOSITORIES
ActionInputs.get_repositories()

# Validate GitHub token
if not self.__github_token:
logger.error("GitHub token could not be loaded from the environment.", exc_info=True)
# Validate INPUT_OUTPUT_PATH
if out_path == "":
logger.error("INPUT_OUTPUT_PATH can not be an empty string.")
sys.exit(1)

# Check that the INPUT_OUTPUT_PATH is not a project directory
# Note: That would cause a rewriting project files
project_directories = get_all_project_directories()
abspath_user_output_path = os.path.abspath(ActionInputs.get_output_directory())

# Ensure project directories are absolute paths
project_abspath_directories = [os.path.abspath(d) for d in project_directories]

if abspath_user_output_path in project_abspath_directories:
project_abspath_directories.remove(abspath_user_output_path)

for project_directory in project_abspath_directories:
# Finds the common path between the absolute paths of out_path and project_directory
common_path = os.path.commonpath([os.path.abspath(out_path), os.path.abspath(project_directory)])

# Check if common path is equal to the absolute path of project_directory
if common_path == os.path.abspath(project_directory):
logger.error("INPUT_OUTPUT_PATH cannot be chosen as a part of any project folder.")
sys.exit(1)

logger.debug("Action inputs validation successfully completed.")
70 changes: 28 additions & 42 deletions living_documentation_generator/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
from living_documentation_generator.action_inputs import ActionInputs
from living_documentation_generator.github_projects import GithubProjects
from living_documentation_generator.model.github_project import GithubProject
from living_documentation_generator.model.config_repository import ConfigRepository
from living_documentation_generator.model.consolidated_issue import ConsolidatedIssue
from living_documentation_generator.model.project_issue import ProjectIssue
from living_documentation_generator.utils.decorators import safe_call_decorator
Expand Down Expand Up @@ -61,35 +60,14 @@ class LivingDocumentationGenerator:
ISSUE_PAGE_TEMPLATE_FILE = os.path.join(PROJECT_ROOT, os.pardir, "templates", "issue_detail_page_template.md")
INDEX_PAGE_TEMPLATE_FILE = os.path.join(PROJECT_ROOT, os.pardir, "templates", "_index_page_template.md")

def __init__(self, action_inputs: ActionInputs):
self.__action_inputs = action_inputs
def __init__(self):
github_token = ActionInputs.get_github_token()

github_token = self.__action_inputs.github_token
self.__github_instance: Github = Github(auth=Auth.Token(token=github_token), per_page=ISSUES_PER_PAGE_LIMIT)
self.__github_projects_instance: GithubProjects = GithubProjects(token=github_token)
self.__rate_limiter: GithubRateLimiter = GithubRateLimiter(self.__github_instance)
self.__safe_call: Callable = safe_call_decorator(self.__rate_limiter)

@property
def repositories(self) -> list[ConfigRepository]:
"""Getter of the list of config repository objects to fetch from."""
return self.__action_inputs.repositories

@property
def project_state_mining_enabled(self) -> bool:
"""Getter of the project state mining switch."""
return self.__action_inputs.is_project_state_mining_enabled

@property
def structured_output(self) -> bool:
"""Getter of the structured output switch."""
return self.__action_inputs.structured_output

@property
def output_path(self) -> str:
"""Getter of the output directory."""
return self.__action_inputs.output_directory

def generate(self) -> None:
"""
Generate the Living Documentation markdown pages output.
Expand Down Expand Up @@ -123,15 +101,18 @@ def generate(self) -> None:
self._generate_markdown_pages(consolidated_issues)
logger.info("Markdown page generation - finished.")

def _clean_output_directory(self) -> None:
@staticmethod
def _clean_output_directory() -> None:
"""
Clean the output directory from the previous run.
@return: None
"""
if os.path.exists(self.output_path):
shutil.rmtree(self.output_path)
os.makedirs(self.output_path)
output_path = ActionInputs.get_output_directory()

if os.path.exists(output_path):
shutil.rmtree(output_path)
os.makedirs(output_path)

def _fetch_github_issues(self) -> dict[str, list[Issue]]:
"""
Expand All @@ -144,7 +125,7 @@ def _fetch_github_issues(self) -> dict[str, list[Issue]]:
total_issues_number = 0

# Run the fetching logic for every config repository
for config_repository in self.repositories:
for config_repository in ActionInputs.get_repositories():
repository_id = f"{config_repository.organization_name}/{config_repository.repository_name}"

repository = self.__safe_call(self.__github_instance.get_repo)(repository_id)
Expand Down Expand Up @@ -194,7 +175,7 @@ def _fetch_github_project_issues(self) -> dict[str, list[ProjectIssue]]:
@return: A dictionary containing project issue objects with unique key.
"""
if not self.project_state_mining_enabled:
if not ActionInputs.get_is_project_state_mining_enabled():
logger.info("Fetching GitHub project data - project mining is not allowed.")
return {}

Expand All @@ -203,7 +184,7 @@ def _fetch_github_project_issues(self) -> dict[str, list[ProjectIssue]]:
# Mine project issues for every repository
all_project_issues: dict[str, list[ProjectIssue]] = {}

for config_repository in self.repositories:
for config_repository in ActionInputs.get_repositories():
repository_id = f"{config_repository.organization_name}/{config_repository.repository_name}"
projects_title_filter = config_repository.projects_title_filter
logger.debug("Filtering projects: %s. If filter is empty, fetching all.", projects_title_filter)
Expand Down Expand Up @@ -320,7 +301,7 @@ def _generate_markdown_pages(self, issues: dict[str, ConsolidatedIssue]) -> None
logger.info("Markdown page generation - generated `%s` issue pages.", len(issues))

# Generate an index page with a summary table about all issues
if self.structured_output:
if ActionInputs.get_is_structured_output_enabled():
self._generate_structured_index_page(issue_index_page_template, issues)
else:
issues = list(issues.values())
Expand Down Expand Up @@ -402,7 +383,9 @@ def _generate_index_page(
"""
# Initializing the issue table header based on the project mining state
issue_table = (
TABLE_HEADER_WITH_PROJECT_DATA if self.project_state_mining_enabled else TABLE_HEADER_WITHOUT_PROJECT_DATA
TABLE_HEADER_WITH_PROJECT_DATA
if ActionInputs.get_is_project_state_mining_enabled()
else TABLE_HEADER_WITHOUT_PROJECT_DATA
)

# Create an issue summary table for every issue
Expand All @@ -426,7 +409,8 @@ def _generate_index_page(
with open(os.path.join(index_directory_path, "_index.md"), "w", encoding="utf-8") as f:
f.write(index_page)

def _generate_markdown_line(self, consolidated_issue: ConsolidatedIssue) -> str:
@staticmethod
def _generate_markdown_line(consolidated_issue: ConsolidatedIssue) -> str:
"""
Generates a markdown summary line for a single issue.
Expand All @@ -446,7 +430,7 @@ def _generate_markdown_line(self, consolidated_issue: ConsolidatedIssue) -> str:
status = ", ".join(status_list) if status_list else "---"

# Change the bool values to more user-friendly characters
if self.project_state_mining_enabled:
if ActionInputs.get_is_project_state_mining_enabled():
if consolidated_issue.linked_to_project:
linked_to_project = LINKED_TO_PROJECT_TRUE
else:
Expand All @@ -466,7 +450,8 @@ def _generate_markdown_line(self, consolidated_issue: ConsolidatedIssue) -> str:

return md_issue_line

def _generate_issue_summary_table(self, consolidated_issue: ConsolidatedIssue) -> str:
@staticmethod
def _generate_issue_summary_table(consolidated_issue: ConsolidatedIssue) -> str:
"""
Generates a string representation of feature info in a table format.
Expand Down Expand Up @@ -508,7 +493,7 @@ def _generate_issue_summary_table(self, consolidated_issue: ConsolidatedIssue) -
]

# Update the summary table, based on the project data mining situation
if self.project_state_mining_enabled:
if ActionInputs.get_is_project_state_mining_enabled():
project_statuses = consolidated_issue.project_issue_statuses

if consolidated_issue.linked_to_project:
Expand Down Expand Up @@ -546,18 +531,19 @@ def _generate_issue_summary_table(self, consolidated_issue: ConsolidatedIssue) -

return issue_info

def _generate_directory_path(self, repository_id: Optional[str]) -> str:
@staticmethod
def _generate_directory_path(repository_id: Optional[str]) -> str:
"""
Generates a directory path based on if structured output is required.
@param repository_id: The repository id.
@return: The generated directory path.
"""
if self.structured_output and repository_id:
output_path = ActionInputs.get_output_directory()

if ActionInputs.get_is_structured_output_enabled() and repository_id:
organization_name, repository_name = repository_id.split("/")
output_path = os.path.join(self.output_path, organization_name, repository_name)
else:
output_path = self.output_path
output_path = os.path.join(output_path, organization_name, repository_name)

os.makedirs(output_path, exist_ok=True)

Expand Down
Loading

0 comments on commit a17b3b9

Please sign in to comment.