Skip to content

Commit

Permalink
added a test to insure that we have maximum logs.
Browse files Browse the repository at this point in the history
Signed-off-by: Elad Gershon <egershon@nvidia.com>
  • Loading branch information
egershonNvidia committed Sep 17, 2024
1 parent 99dbffd commit 4769aca
Showing 1 changed file with 39 additions and 10 deletions.
49 changes: 39 additions & 10 deletions plugins/pdr_deterministic_plugin/tests/simulation_telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
from collections import OrderedDict
import requests
from utils.utils import Utils
import pandas as pd
from pathlib import Path
from datetime import datetime
from datetime import datetime,timedelta

lock = Lock()

Expand All @@ -34,6 +35,8 @@
RCV_REMOTE_PHY_ERROR_COUNTER = "PortRcvRemotePhysicalErrors"
TEMP_COUNTER = "Module_Temperature"
FEC_MODE = "fec_mode_active"
TELEMETRY_DATASTORE_LOCATION = "/opt/ufm/ufm_plugin_pdr_deterministic/datastore"
MAX_LOG_FILES = 10
ENDPOINT_CONFIG = {}

EXCLUDE_PORT_LONG_TIME = "ExcludePortForLongTime"
Expand Down Expand Up @@ -72,6 +75,8 @@ def do_GET(self): # pylint: disable=invalid-name
data = endpoint['data']
self.wfile.write(data.encode())

OUTPUT_FILE_FORMAT = "%Y_%m_%d_%H_%M_%S.csv"

DIFFERENT_DEFAULT_VALUES = {
# because the plugin reads the meta data to know the first temperature and we cannot stream the metadata.
TEMP_COUNTER:"5",
Expand Down Expand Up @@ -196,6 +201,7 @@ def start_server(port:str,changes_intervals:int, run_forever:bool):
counters_names = list(counters.keys())
header = ['timestamp', 'source_id,tag,Node_GUID,port_guid,Port_Number'] + counters_names
endpoint['data'] = ""
setup_log_test = False
while True:
# lock.acquire()
data = []
Expand All @@ -219,8 +225,29 @@ def start_server(port:str,changes_intervals:int, run_forever:bool):
if not run_forever and ENDPOINT_CONFIG["ITERATION_TIME"] > MAX_ITERATIONS:
# after all the tests are done, we need to stop the simulator and check the logs
return
if not setup_log_test:
setup_log_test = create_telemetries_logs()
time.sleep(changes_intervals)

def create_telemetries_logs():
"""
create up to double of the amount of logs that can be in the folder. this is a setup for test.
"""
successful = True
for option_location in ['abs','delta']:
input_path_dir = Path(join(TELEMETRY_DATASTORE_LOCATION,option_location))
files = list(input_path_dir.glob("*.csv"))
if len(files)==0:
print("didnt create files because there are no files in the folder")
successful = False
continue
df = pd.read_csv(files[0])
for day in range(1,MAX_LOG_FILES*2):
# we put days back to make sure the logs that the server creates are recent, we have a test for recent logs.
file_name_year_ago = (datetime.now() - timedelta(days=day)).strftime(OUTPUT_FILE_FORMAT)
df.to_csv(file_name_year_ago)
return successful

def excluded_ports_simulation(endpoint):
"""
Perform operations on exclusion port for current iteration
Expand All @@ -231,7 +258,6 @@ def excluded_ports_simulation(endpoint):
for port_index in range(len(rows)):
port_name = endpoint["Ports_names"][port_index]
iteration = ENDPOINT_CONFIG["ITERATION_TIME"]

# Process remove operation
if find_value(port_index, INCLUDE_PORT, iteration, None) is not None:
# Remove from exclusion list
Expand Down Expand Up @@ -344,22 +370,25 @@ def check_logs(config):
return 1

saved_files_tests = True
locations_telemetry_csv = "/opt/ufm/ufm_plugin_pdr_deterministic/datastore"
OUTPUT_FILE_FORMAT = "%Y_%m_%d_%H_%M_%S.csv"


for option_location in ['abs','delta']:
input_path_dir = Path(join(locations_telemetry_csv,option_location))
input_path_dir = Path(join(TELEMETRY_DATASTORE_LOCATION,option_location))
files = list(input_path_dir.glob("*.csv"))

if len(files) == 0: # testing we have files there
print(f"There is no files in the datastore location:{join(locations_telemetry_csv,option_location)}")
print(f"There is no files in the datastore location:{join(TELEMETRY_DATASTORE_LOCATION,option_location)}")
saved_files_tests = False
continue
print_test_result("there are only 10 items in the set, The test copy in each iterations more logs.",
len(files), MAX_LOG_FILES)
if len(files) != MAX_LOG_FILES:
saved_files_tests = False
files.sort(key=lambda p: p.name)
latest_file = files[0].name
saved_time = datetime.strptime(latest_file,OUTPUT_FILE_FORMAT)
different_time = datetime.now() - saved_time
print_test_result(f"the latest file saved at {join(locations_telemetry_csv,option_location)} before test running, more than 5 minutes {different_time.total_seconds() > 300}",
print_test_result(f"the latest file saved at {join(TELEMETRY_DATASTORE_LOCATION,option_location)}\
before test running, more than 5 minutes {different_time.total_seconds() > 300}",
different_time.total_seconds() > 300, False)
if different_time.total_seconds() > 300:
saved_files_tests = False
Expand Down Expand Up @@ -441,7 +470,7 @@ def main():

if not validate_simulation_data():
return 1

port = args.endpoint_port
url = f'http://0.0.0.0:{port}{args.url_suffix}'
print(f'---Starting endpoint {url}')
Expand Down

0 comments on commit 4769aca

Please sign in to comment.