Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

issue: 4176956: move the text handling to the analyzers classes #284

Merged
merged 26 commits into from
Dec 4, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 2 additions & 61 deletions plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,8 @@ def create_analyzer(
end = time.perf_counter()
log.LOGGER.debug(f"Took {end-start:.3f} to load the parsed data")

all_images_outputs_and_title = ufm_top_analyzer.full_analysis()
all_images_outputs_and_title, dataframes_for_pdf, lists_to_add = \
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
ufm_top_analyzer.full_analysis_all_analyzers()
png_images = []
images_and_title_to_present = []
for image_title in all_images_outputs_and_title:
Expand All @@ -388,66 +389,6 @@ def create_analyzer(
)

pdf = PDFCreator(pdf_path, pdf_header, png_images, text_to_show_in_pdf)
dataframes_for_pdf = []
fabric_info = (
ibdiagnet_analyzer.get_fabric_size()
if ibdiagnet_analyzer
else "No Fabric Info found"
)
dataframes_for_pdf.append(("Fabric info", fabric_info))
if links_flapping_analyzer:
dataframes_for_pdf.append(
(
"Link Flapping past week",
links_flapping_analyzer.get_link_flapping_last_week(),
)
)
lists_to_add = []
critical_events_headers = ["timestamp", "event_type", "event", "count"]
lists_to_add.append(
(
event_log_analyzer.get_critical_event_bursts(),
"More than 5 events burst over a minute",
critical_events_headers,
)
)

existing_telemetry_analyzers = []
for telemetry_analyzer in [
ibdianget_2_ports_primary_analyzer,
ibdianget_2_ports_secondary_analyzer,
]:
if telemetry_analyzer:
existing_telemetry_analyzers.append(telemetry_analyzer)

for cur_telemetry in existing_telemetry_analyzers:
dataframes_for_pdf.append(
(
f"{cur_telemetry.telemetry_type} Telemetry iteration time",
cur_telemetry.get_last_iterations_time_stats(),
)
)
dataframes_for_pdf.append(
(
f"{cur_telemetry.telemetry_type} "
"Telemetry iteration first and last timestamps",
cur_telemetry.get_first_last_iteration_timestamp(),
)
)
dataframes_for_pdf.append(
(
f"{cur_telemetry.telemetry_type} Telemetry fabric size",
cur_telemetry.get_number_of_switches_and_ports(),
)
)
lists_to_add.append(
(
[cur_telemetry.get_number_of_core_dumps()],
f"{cur_telemetry.telemetry_type} "
"number of core dumps found in the logs",
["Amount"],
)
)

# PDF creator gets all the images and to add to the report
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
pdf.create_pdf(dataframes_for_pdf, lists_to_add)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ def __init__(self, dest_image_path):
def _save_data_based_on_timestamp(
self, data_to_plot, x_label, y_label, title, large_sample=False
):
if len(data_to_plot) == 0:
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
return
with plt.ion():
log.LOGGER.debug(f"saving {title}")
plt.figure(figsize=(12, 6))
Expand Down Expand Up @@ -156,7 +158,7 @@ def full_analysis(self):
except: # pylint: disable=bare-except
pass

return self._images_created if len(self._images_created) > 0 else []
return self._images_created if len(self._images_created) > 0 else [], [], []
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved


class BaseAnalyzer(BaseImageCreator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,3 +152,15 @@ def plot_link_up_down_count_per_aggregation_time(self):
self._save_pivot_data_in_bars(
pivot_links_data, "Time", "Number of Events", "Link up/down events", "Event"
)

def full_analysis(self):
images, _, _ = super().full_analysis()
critical_events_headers = ["timestamp", "event_type", "event", "count"]
lists = [
(
self.get_critical_event_bursts(),
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
"More than 5 events burst over a minute",
critical_events_headers,
)
]
return images, [], lists
Original file line number Diff line number Diff line change
Expand Up @@ -193,3 +193,38 @@ def get_number_of_core_dumps(self):
self._log_data_sorted["type"] == "timeout_dump_core"
]
return {"Amount": len(core_dumps)}

def full_analysis(self):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Miryam,
You misunderstood the purpose of the assignment. The base class already created an infrastructure to run the analysis. You should add methods to run as part of the base class full_analysis and NOT implement your own full_analysis method.
If it not clear let talk.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK. I changed it

images, _, _ = super().full_analysis()
dataframes_for_pdf = []
lists_to_add = []
dataframes_for_pdf.append(
(
f"{self.telemetry_type} Telemetry iteration time",
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
self.get_last_iterations_time_stats(),
)
)

dataframes_for_pdf.append(
(
f"{self.telemetry_type} "
"Telemetry iteration first and last timestamps",
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
self.get_first_last_iteration_timestamp(),
)
)
dataframes_for_pdf.append(
(
f"{self.telemetry_type} Telemetry fabric size",
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
self.get_number_of_switches_and_ports(),
)
)

lists_to_add.append(
(
[self.get_number_of_core_dumps()],
f"{self.telemetry_type} "
"number of core dumps found in the logs",
["Amount"],
)
)
return images, dataframes_for_pdf, lists_to_add
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,6 @@ def full_analysis(self):
Returns a list of all the graphs created and their title
"""
self.print_fabric_size()
return []
fabric_info = self.get_fabric_size()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same thing. Use the infrastructure in the base class to run this.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fixed

tmp_df = [("Fabric info", fabric_info)]
return [], tmp_df, []
Original file line number Diff line number Diff line change
Expand Up @@ -142,5 +142,7 @@ def plot_link_flapping_last_week(self):
)

def full_analysis(self):
self.get_link_flapping_last_week()
return super().full_analysis()
link_flapping_last_week = self.get_link_flapping_last_week()
images, _, _ = super().full_analysis()
df = [("Link Flapping last week", link_flapping_last_week,)]
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved
return images, df, []
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,33 @@
# pylint: disable=missing-module-docstring


from plugins.ufm_log_analyzer_plugin.src.loganalyze.log_analyzers.ibdiagnet_log_analyzer\
import IBDIAGNETLogAnalyzer


class UFMTopAnalyzer:
def __init__(self):
self._analyzers = []

def add_analyzer(self, analyzer):
self._analyzers.append(analyzer)

def full_analysis(self):
def full_analysis_all_analyzers(self):
"""
Returns a list of all the graphs created and their title
"""
graphs_and_titles = []
dataframes = []
lists_to_add = []
for analyzer in self._analyzers:
tmp_images_list = analyzer.full_analysis()
if len(tmp_images_list) > 0:
graphs_and_titles.extend(tmp_images_list)
return graphs_and_titles
tmp_images_list, tmp_dataframes, tmp_lists = analyzer.full_analysis()
graphs_and_titles.extend(tmp_images_list)
dataframes.extend(tmp_dataframes)
lists_to_add.extend(tmp_lists)

has_ibdiagnet_analyzer = any(isinstance(instance, IBDIAGNETLogAnalyzer) \
for instance in self._analyzers)
if not has_ibdiagnet_analyzer:
dataframes.append(("Fabric info", ("No Fabric Info found")))
Miryam-Schwartz marked this conversation as resolved.
Show resolved Hide resolved

return graphs_and_titles, dataframes, lists_to_add
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import os
from io import StringIO
from fpdf import FPDF
import pandas as pd
from tabulate import tabulate


Expand Down Expand Up @@ -85,7 +86,7 @@ def add_list_of_dicts_as_text(self, data_list, title=None, headers=None):

def add_dataframe_as_text(self, data_frame, title=None):
"""Adds a DataFrame to the PDF as aligned text without row numbers."""
if data_frame is None or data_frame.empty:
if not isinstance(data_frame, pd.DataFrame) or data_frame.empty:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what about None?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if data_frame is None, 'isinstance(data_frame, pd.DataFrame)' will return False

return

if title:
Expand Down
Loading