-
Notifications
You must be signed in to change notification settings - Fork 23
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'main' into vg-pdr-3950870
- Loading branch information
Showing
7 changed files
with
209 additions
and
28 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
89 changes: 89 additions & 0 deletions
89
plugins/pdr_deterministic_plugin/ufm_sim_web_service/data_store.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
from os.path import join,exists | ||
from os import remove,makedirs | ||
from pathlib import Path | ||
from datetime import datetime | ||
import pandas as pd | ||
|
||
class DataStore: | ||
""" | ||
arrange the data store of the telemetries that we save, abs or delta. remove old data when want. | ||
""" | ||
OUTPUT_FILE_FORMAT = "%Y_%m_%d_%H_%M_%S.csv" | ||
AMOUNT_FILES_TO_KEEP = 10 | ||
BASE_PATH = "/opt/ufm/ufm_plugin_pdr_deterministic/datastore" | ||
ABS_PATH = "abs" | ||
DELTA_PATH = "delta" | ||
TAR_SUFFIX = "*.csv" | ||
|
||
def __init__(self,logger) -> None: | ||
self.logger = logger | ||
if not exists(self.BASE_PATH): | ||
makedirs(self._folder_abs()) | ||
makedirs(self._folder_delta()) | ||
|
||
def _folder_abs(self) -> str: | ||
return join(self.BASE_PATH,self.ABS_PATH) | ||
|
||
def _folder_delta(self) -> str: | ||
return join(self.BASE_PATH,self.DELTA_PATH) | ||
|
||
def _get_filename(self) -> str: | ||
return datetime.now().strftime(self.OUTPUT_FILE_FORMAT) | ||
|
||
def get_filename_abs(self) -> str: | ||
""" | ||
return a filename for abs data | ||
""" | ||
return join(self._folder_abs(),self._get_filename()) | ||
|
||
def get_filename_delta(self) -> str: | ||
""" | ||
return a filename for delta data | ||
""" | ||
return join(self._folder_delta(),self._get_filename()) | ||
|
||
def _get_files_to_remove(self, data_path:str, suffix:str, to_keep:int) -> list: | ||
""" | ||
find the file names of the oldest which is after the amount of to_keep | ||
search for in the data_path with the suffix. | ||
""" | ||
files_to_remove = [] | ||
input_path_dir = Path(data_path) | ||
files = list(input_path_dir.glob(suffix)) | ||
files.sort(key=lambda p: p.name) | ||
files = [str(p) for p in files] | ||
if len(files) > to_keep: | ||
files_to_remove = files[:len(files)- to_keep] | ||
return files_to_remove | ||
|
||
def clean_old_files(self) -> None: | ||
""" | ||
search for the both locations to clean the old files. | ||
""" | ||
for data_path in [self._folder_abs(),self._folder_delta()]: | ||
files = self._get_files_to_remove(data_path,self.TAR_SUFFIX,self.AMOUNT_FILES_TO_KEEP) | ||
if len(files) > 0: | ||
self._remove_files(files) | ||
|
||
def _remove_files(self, files: list) -> None: | ||
""" | ||
Delete a list of files | ||
:param files: (List) List of files to be removed | ||
:return: None | ||
""" | ||
self.logger.info(f"removing {len(files)} old files") | ||
for file in files: | ||
try: | ||
if exists(file): | ||
remove(file) | ||
except FileNotFoundError: | ||
pass | ||
except OSError as exc: | ||
self.logger.error("failed to remove file %s [%s]", file, exc) | ||
|
||
def save(self, dataframe:pd.DataFrame, file_name:str) -> None: | ||
""" | ||
save dataframe to the file name | ||
""" | ||
self.logger.info(f"saving data to {file_name}") | ||
dataframe.to_csv(file_name) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
56 changes: 56 additions & 0 deletions
56
plugins/pdr_deterministic_plugin/ufm_sim_web_service/telemetry_collector.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
import urllib | ||
import pandas as pd | ||
from data_store import DataStore | ||
|
||
class TelemetryCollector: | ||
""" | ||
Represent Telemetry collector which send DataFrame once telemetry is called. | ||
Calls data store class for save the abs or delta data. | ||
""" | ||
BASED_COLUMNS = ["Node_GUID", "port_guid", "Port_Number"] | ||
KEY = BASED_COLUMNS + ["timestamp","tag"] | ||
SECONDARY_TELEMETRY_PORT = 9002 | ||
SECONDARY_INSTANCE = "low_freq_debug" | ||
|
||
def __init__(self,test_mode:bool,logger,data_store:DataStore) -> None: | ||
self.test_mode = test_mode | ||
self.logger = logger | ||
self.previous_telemetry_data = None | ||
self.data_store = data_store | ||
|
||
def get_telemetry(self): | ||
""" | ||
get the telemetry from secondary telemetry, if it in test mode it get from the simulation | ||
return DataFrame of the telemetry | ||
""" | ||
if self.test_mode: | ||
url = "http://127.0.0.1:9090/csv/xcset/simulated_telemetry" | ||
else: | ||
url = f"http://127.0.0.1:{self.SECONDARY_TELEMETRY_PORT}/csv/xcset/{self.SECONDARY_INSTANCE}" | ||
try: | ||
self.logger.info(f"collecting telemetry from {url}.") | ||
telemetry_data = pd.read_csv(url) | ||
except (pd.errors.ParserError, pd.errors.EmptyDataError, urllib.error.URLError) as connection_error: | ||
self.logger.error("failed to get telemetry data from UFM, fetched url=%s. Error: %s",url,connection_error) | ||
telemetry_data = None | ||
if self.previous_telemetry_data is not None and telemetry_data is not None: | ||
delta = self._get_delta(self.previous_telemetry_data,telemetry_data) | ||
# when we want to keep only delta | ||
if len(delta) > 0: | ||
self.data_store.save(delta,self.data_store.get_filename_delta()) | ||
elif telemetry_data is not None: | ||
# when we want to keep the abs | ||
self.previous_telemetry_data = telemetry_data | ||
self.data_store.save(telemetry_data,self.data_store.get_filename_abs()) | ||
return telemetry_data | ||
|
||
def _get_delta(self, first_df: pd.DataFrame, second_df:pd.DataFrame): | ||
merged_df = pd.merge(second_df, first_df, on=self.BASED_COLUMNS, how='inner', suffixes=('', '_x')) | ||
delta_dataframe = pd.DataFrame() | ||
for index,col in enumerate(second_df.columns): | ||
if col not in self.KEY and not isinstance(merged_df.iat[0,index],str): | ||
col_x = col + "_x" | ||
delta_dataframe[col] = merged_df[col] - merged_df[col_x] | ||
else: | ||
delta_dataframe[col] = second_df[col] | ||
return delta_dataframe |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters