diff --git a/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/console_log_analyzer.py b/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/console_log_analyzer.py index 8d0dccf5..0a482c6f 100644 --- a/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/console_log_analyzer.py +++ b/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/console_log_analyzer.py @@ -110,8 +110,10 @@ def print_exceptions_per_time_count(self): ) def save_ufm_versions(self): + if not self.ufm_versions: + self.ufm_versions = "Not found" self._txt_for_pdf.append( - f"Used ufm version in console log {self.ufm_versions}{os.linesep}" + f"Used ufm version in console log: {self.ufm_versions}" ) def full_analysis(self): diff --git a/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/ibdiagnet_log_analyzer.py b/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/ibdiagnet_log_analyzer.py index 1e03e2e7..f7a74272 100644 --- a/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/ibdiagnet_log_analyzer.py +++ b/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/ibdiagnet_log_analyzer.py @@ -31,7 +31,7 @@ def get_fabric_size(self): def save_fabric_size(self): fabric_info = self.get_fabric_size() - self._dataframes_for_pdf.append(("Fabric info", fabric_info)) + self._dataframes_for_pdf.append(("Fabric info from ibdiagnet", fabric_info)) def full_analysis(self): super().full_analysis() diff --git a/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/rest_api_log_analyzer.py b/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/rest_api_log_analyzer.py index 8b195e15..856d0955 100644 --- a/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/rest_api_log_analyzer.py +++ b/plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzers/rest_api_log_analyzer.py @@ -72,10 +72,20 @@ def analyze_endpoints_freq(self, endpoints_count_to_show=10): ).fillna(0) data_to_show = data_to_show[top_x_uris] + # Extract only the path part after the domain and protocol from each URL + suffix_mapping = {} + for uri in top_x_uris: + parsed_uri = urlparse(uri) + suffix = parsed_uri.path.lstrip("/") # Remove the leading slash if any + suffix_mapping[uri] = suffix + + # Rename the columns in the data to use the suffix + data_to_show.rename(columns=suffix_mapping, inplace=True) + return self._save_pivot_data_in_bars( data_to_show, "time", "requests count", - f"Top {endpoints_count_to_show} " "requests count over time", - "legend", + f"Top {endpoints_count_to_show} api requests count over time", + "path", )