diff --git a/integrations/sonarqube/CHANGELOG.md b/integrations/sonarqube/CHANGELOG.md index eb98d82827..10afea0946 100644 --- a/integrations/sonarqube/CHANGELOG.md +++ b/integrations/sonarqube/CHANGELOG.md @@ -7,6 +7,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 +## 0.1.111 (2024-11-20) + + +### Bug Fixes + +- Added defensive mechanism to fail the resyn event for the kind when no data is fetched from Sonar API + +### Improvements + +- Added more logs to track the request and response object made to the Sonar API + + ## 0.1.110 (2024-11-12) diff --git a/integrations/sonarqube/client.py b/integrations/sonarqube/client.py index ff0f5cf70c..afc6acd1a7 100644 --- a/integrations/sonarqube/client.py +++ b/integrations/sonarqube/client.py @@ -125,13 +125,11 @@ async def send_paginated_api_request( query_params = query_params or {} query_params["ps"] = PAGE_SIZE all_resources = [] # List to hold all fetched resources - try: - logger.debug( - f"Sending API request to {method} {endpoint} with query params: {query_params}" - ) - while True: + logger.info( + f"Sending API request to {method} {endpoint} with query params: {query_params}" + ) response = await self.http_client.request( method=method, url=f"{self.base_url}/api/{endpoint}", @@ -141,6 +139,9 @@ async def send_paginated_api_request( response.raise_for_status() response_json = response.json() resource = response_json.get(data_key, []) + if not resource: + logger.warning(f"No {data_key} found in response: {response_json}") + all_resources.extend(resource) # Check for paging information and decide whether to fetch more pages @@ -210,7 +211,9 @@ async def get_components( data_key="components", query_params=query_params, ) - + logger.info( + f"Fetched {len(response)} components {[item.get("key") for item in response]} from SonarQube" + ) return response except Exception as e: logger.error(f"Error occurred while fetching components: {e}") diff --git a/integrations/sonarqube/main.py b/integrations/sonarqube/main.py index 78064fba3d..0195e5d9e2 100644 --- a/integrations/sonarqube/main.py +++ b/integrations/sonarqube/main.py @@ -24,23 +24,46 @@ def init_sonar_client() -> SonarQubeClient: @ocean.on_resync(ObjectKind.PROJECTS) async def on_project_resync(kind: str) -> ASYNC_GENERATOR_RESYNC_TYPE: logger.info(f"Listing Sonarqube resource: {kind}") - + fetched_projects = False async for project_list in sonar_client.get_all_projects(): yield project_list + fetched_projects = True + + if not fetched_projects: + logger.error("No projects found in Sonarqube") + raise RuntimeError( + "No projects found in Sonarqube, failing the resync to avoid data loss" + ) @ocean.on_resync(ObjectKind.ISSUES) async def on_issues_resync(kind: str) -> ASYNC_GENERATOR_RESYNC_TYPE: + fetched_issues = False async for issues_list in sonar_client.get_all_issues(): yield issues_list + fetched_issues = True + + if not fetched_issues: + logger.error("No issues found in Sonarqube") + raise RuntimeError( + "No issues found in Sonarqube, failing the resync to avoid data loss" + ) @ocean.on_resync(ObjectKind.ANALYSIS) @ocean.on_resync(ObjectKind.SASS_ANALYSIS) async def on_saas_analysis_resync(kind: str) -> ASYNC_GENERATOR_RESYNC_TYPE: if not ocean.integration_config["sonar_is_on_premise"]: + fetched_analyses = False async for analyses_list in sonar_client.get_all_sonarcloud_analyses(): yield analyses_list + fetched_analyses = True + + if not fetched_analyses: + logger.error("No analysis found in Sonarqube") + raise RuntimeError( + "No analysis found in Sonarqube, failing the resync to avoid data loss" + ) @ocean.on_resync(ObjectKind.ONPREM_ANALYSIS) diff --git a/integrations/sonarqube/pyproject.toml b/integrations/sonarqube/pyproject.toml index 11e9f01841..34f3641654 100644 --- a/integrations/sonarqube/pyproject.toml +++ b/integrations/sonarqube/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "sonarqube" -version = "0.1.110" +version = "0.1.111" description = "SonarQube projects and code quality analysis integration" authors = ["Port Team "]