Skip to content

Commit

Permalink
encapsulation
Browse files Browse the repository at this point in the history
  • Loading branch information
Miryam-Schwartz committed Nov 28, 2024
1 parent 22cf5b8 commit 532cf34
Show file tree
Hide file tree
Showing 8 changed files with 76 additions and 71 deletions.
63 changes: 2 additions & 61 deletions plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,8 @@ def create_analyzer(
end = time.perf_counter()
log.LOGGER.debug(f"Took {end-start:.3f} to load the parsed data")

all_images_outputs_and_title = ufm_top_analyzer.full_analysis()
all_images_outputs_and_title, dataframes_for_pdf, lists_to_add = \
ufm_top_analyzer.full_analysis_all_analyzers()
png_images = []
images_and_title_to_present = []
for image_title in all_images_outputs_and_title:
Expand All @@ -388,66 +389,6 @@ def create_analyzer(
)

pdf = PDFCreator(pdf_path, pdf_header, png_images, text_to_show_in_pdf)
dataframes_for_pdf = []
fabric_info = (
ibdiagnet_analyzer.get_fabric_size()
if ibdiagnet_analyzer
else "No Fabric Info found"
)
dataframes_for_pdf.append(("Fabric info", fabric_info))
if links_flapping_analyzer:
dataframes_for_pdf.append(
(
"Link Flapping past week",
links_flapping_analyzer.get_link_flapping_last_week(),
)
)
lists_to_add = []
critical_events_headers = ["timestamp", "event_type", "event", "count"]
lists_to_add.append(
(
event_log_analyzer.get_critical_event_bursts(),
"More than 5 events burst over a minute",
critical_events_headers,
)
)

existing_telemetry_analyzers = []
for telemetry_analyzer in [
ibdianget_2_ports_primary_analyzer,
ibdianget_2_ports_secondary_analyzer,
]:
if telemetry_analyzer:
existing_telemetry_analyzers.append(telemetry_analyzer)

for cur_telemetry in existing_telemetry_analyzers:
dataframes_for_pdf.append(
(
f"{cur_telemetry.telemetry_type} Telemetry iteration time",
cur_telemetry.get_last_iterations_time_stats(),
)
)
dataframes_for_pdf.append(
(
f"{cur_telemetry.telemetry_type} "
"Telemetry iteration first and last timestamps",
cur_telemetry.get_first_last_iteration_timestamp(),
)
)
dataframes_for_pdf.append(
(
f"{cur_telemetry.telemetry_type} Telemetry fabric size",
cur_telemetry.get_number_of_switches_and_ports(),
)
)
lists_to_add.append(
(
[cur_telemetry.get_number_of_core_dumps()],
f"{cur_telemetry.telemetry_type} "
"number of core dumps found in the logs",
["Amount"],
)
)

# PDF creator gets all the images and to add to the report
pdf.create_pdf(dataframes_for_pdf, lists_to_add)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def full_analysis(self):
except: # pylint: disable=bare-except
pass

return self._images_created if len(self._images_created) > 0 else []
return self._images_created if len(self._images_created) > 0 else [], [], []


class BaseAnalyzer(BaseImageCreator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,3 +152,15 @@ def plot_link_up_down_count_per_aggregation_time(self):
self._save_pivot_data_in_bars(
pivot_links_data, "Time", "Number of Events", "Link up/down events", "Event"
)

def full_analysis(self):
images, _, _ = super().full_analysis()
critical_events_headers = ["timestamp", "event_type", "event", "count"]
lists = [
(
self.get_critical_event_bursts(),
"More than 5 events burst over a minute",
critical_events_headers,
)
]
return images, [], lists
Original file line number Diff line number Diff line change
Expand Up @@ -193,3 +193,38 @@ def get_number_of_core_dumps(self):
self._log_data_sorted["type"] == "timeout_dump_core"
]
return {"Amount": len(core_dumps)}

def full_analysis(self):
images, _, _ = super().full_analysis()
dataframes_for_pdf = []
lists_to_add = []
dataframes_for_pdf.append(
(
f"{self.telemetry_type} Telemetry iteration time",
self.get_last_iterations_time_stats(),
)
)

dataframes_for_pdf.append(
(
f"{self.telemetry_type} "
"Telemetry iteration first and last timestamps",
self.get_first_last_iteration_timestamp(),
)
)
dataframes_for_pdf.append(
(
f"{self.telemetry_type} Telemetry fabric size",
self.get_number_of_switches_and_ports(),
)
)

lists_to_add.append(
(
[self.get_number_of_core_dumps()],
f"{self.telemetry_type} "
"number of core dumps found in the logs",
["Amount"],
)
)
return images, dataframes_for_pdf, lists_to_add
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,6 @@ def full_analysis(self):
Returns a list of all the graphs created and their title
"""
self.print_fabric_size()
return []
fabric_info = (self.get_fabric_size())
tmp_df = [("Fabric info", fabric_info)]
return [], tmp_df, []
Original file line number Diff line number Diff line change
Expand Up @@ -142,5 +142,7 @@ def plot_link_flapping_last_week(self):
)

def full_analysis(self):
self.get_link_flapping_last_week()
return super().full_analysis()
link_flapping_last_week = self.get_link_flapping_last_week()
images, _, _ = super().full_analysis()
df = [("Link Flapping past week", link_flapping_last_week,)]
return images, df, []
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,31 @@
# pylint: disable=missing-module-docstring


from plugins.ufm_log_analyzer_plugin.src.loganalyze.log_analyzers.ibdiagnet_log_analyzer import IBDIAGNETLogAnalyzer


class UFMTopAnalyzer:
def __init__(self):
self._analyzers = []

def add_analyzer(self, analyzer):
self._analyzers.append(analyzer)

def full_analysis(self):
def full_analysis_all_analyzers(self):
"""
Returns a list of all the graphs created and their title
"""
graphs_and_titles = []
dataframes = []
lists_to_add = []
for analyzer in self._analyzers:
tmp_images_list = analyzer.full_analysis()
if len(tmp_images_list) > 0:
graphs_and_titles.extend(tmp_images_list)
return graphs_and_titles
tmp_images_list, tmp_dataframes, tmp_lists = analyzer.full_analysis()
graphs_and_titles.extend(tmp_images_list) if tmp_images_list else None
dataframes.extend(tmp_dataframes) if tmp_dataframes else None
lists_to_add.extend(tmp_lists) if tmp_lists else None

has_ibdiagnet_analyzer = any(isinstance(instance, IBDIAGNETLogAnalyzer) for instance in self._analyzers)
if not has_ibdiagnet_analyzer:
dataframes.append(("Fabric info", ("No Fabric Info found")))

return graphs_and_titles, dataframes, lists_to_add
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import os
from io import StringIO
from fpdf import FPDF
import pandas as pd
from tabulate import tabulate


Expand Down Expand Up @@ -85,7 +86,8 @@ def add_list_of_dicts_as_text(self, data_list, title=None, headers=None):

def add_dataframe_as_text(self, data_frame, title=None):
"""Adds a DataFrame to the PDF as aligned text without row numbers."""
if data_frame is None or data_frame.empty:
print(data_frame)
if data_frame is None or not isinstance(data_frame, pd.DataFrame) or data_frame.empty:
return

if title:
Expand Down

0 comments on commit 532cf34

Please sign in to comment.