From 239d3217b2f69c1f1590a953b34878bb6f13f5db Mon Sep 17 00:00:00 2001 From: mohamed Date: Thu, 5 Jan 2023 17:26:32 +0200 Subject: [PATCH 1/3] add measured and sim funcs --- .../bjt_iv/device_netlists/npn.spice | 6 +- .../bjt_iv/device_netlists/pnp.spice | 6 +- .../regression/bjt_iv/models_regression.py | 388 ++++++++---------- 3 files changed, 180 insertions(+), 220 deletions(-) diff --git a/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice b/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice index 7bac45c3..4e6ae228 100644 --- a/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice +++ b/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice @@ -18,9 +18,9 @@ xq1 c b 0 0 {{device}} ***************** .DC Vcp 0 6 0.1 Ib 1u 9u 2u .STEP TEMP {{temp}} -60 200 -.print DC FORMAT=CSV file=npn/simulated_IcVc/{{i}}_simulated_{{device}}.csv {-I(Vcp)} +.print DC FORMAT=CSV file=mos_iv_reg/npn/simulated/t{{temp}}_simulated_{{device}}.csv {-I(Vcp)} -.include "../../../../../design.xyce" -.lib "../../../../../sm141064.xyce" bjt_typical +.include "../../../../../../design.xyce" +.lib "../../../../../../sm141064.xyce" bjt_typical .end diff --git a/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice b/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice index 76d695f8..9ba61bfb 100644 --- a/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice +++ b/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice @@ -18,9 +18,9 @@ xq1 c b 0 {{device}} ***************** .DC Vcp 0 -3 -0.1 Ib -1u -9u -2u .STEP TEMP {{temp}} -60 200 -.print DC FORMAT=CSV file=pnp/simulated_IcVc/{{i}}_simulated_{{device}}.csv {I(Vcp)} +.print DC FORMAT=CSV file=mos_iv_reg/pnp/simulated/t{{temp}}_simulated_{{device}}.csv {I(Vcp)} -.include "../../../../../design.xyce" -.lib "../../../../../sm141064.xyce" bjt_typical +.include "../../../../../../design.xyce" +.lib "../../../../../../sm141064.xyce" bjt_typical .end diff --git a/models/xyce/testing/regression/bjt_iv/models_regression.py b/models/xyce/testing/regression/bjt_iv/models_regression.py index 6abd245c..d9564b17 100644 --- a/models/xyce/testing/regression/bjt_iv/models_regression.py +++ b/models/xyce/testing/regression/bjt_iv/models_regression.py @@ -16,10 +16,12 @@ import concurrent.futures import shutil import warnings +import multiprocessing as mp +import glob warnings.simplefilter(action="ignore", category=FutureWarning) - - +pd.options.mode.chained_assignment = None # default='warn' +MOS=["ibp=1.000E-06", "ibp=3.000E-06", "ibp=5.000E-06", "ibp=7.000E-06", "ibp=9.000E-06"] def call_simulator(file_name): """Call simulation commands to perform simulation. Args: @@ -28,11 +30,12 @@ def call_simulator(file_name): os.system(f"Xyce -hspice-ext all {file_name} -l {file_name}.log") -def ext_measured(device, vc, step, Id_sim, list_devices, ib): +def ext_measured(dirpath,device, vc, step, list_devices, ib): # Get dimensions used for each device - dimensions = pd.read_csv(f"{device}/{device}.csv", usecols=["corners"]) + dimensions = pd.read_csv(f"{dirpath}/{device}.csv", usecols=["corners"]) loops = dimensions["corners"].count() + all_dfs = [] # Extracting measured values for each Device for i in range(loops): @@ -55,7 +58,7 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}", f"{ib}{step[4]}", ] - df_measured = pd.read_csv(f"{device}/{device}.csv", usecols=col_list) + df_measured = pd.read_csv(f"{dirpath}/{device}.csv", usecols=col_list) df_measured.columns = [ f"{vc}", f"{ib}{step[0]}", @@ -64,10 +67,6 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}", f"{ib}{step[4]}", ] - df_measured.to_csv( - f"{device}/measured_{Id_sim}/{i}_measured_{list_devices[k]}.csv", - index=False, - ) else: if device == "pnp": vc = temp_vc @@ -80,7 +79,7 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}.{i}", f"{ib}{step[4]}.{i}", ] - df_measured = pd.read_csv(f"{device}/{device}.csv", usecols=col_list) + df_measured = pd.read_csv(f"{dirpath}/{device}.csv", usecols=col_list) df_measured.columns = [ f"{vc}", f"{ib}{step[0]}", @@ -89,217 +88,177 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}", f"{ib}{step[4]}", ] - df_measured.to_csv( - f"{device}/measured_{Id_sim}/{i}_measured_{list_devices[k]}.csv", - index=False, - ) + all_dfs.append(df_measured) + dfs = pd.concat(all_dfs, axis=1) + dfs.drop_duplicates(inplace=True) + return dfs -def ext_simulated(device, vc, step, sweep, Id_sim, list_devices, ib): +def run_sim(dirpath, device, list_devices, temp=25): + """Run simulation at specific information and corner + Args: + dirpath(str): path to the file where we write data + device(str): the device instance will be simulated + id_rds(str): select id or rds + temp(float): a specific temp for simulation + width(float): a specific width for simulation + length(float): a specific length for simulation + + Returns: + info(dict): results are stored in, + and passed to the run_sims function to extract data + """ + netlist_tmp = f"device_netlists/{device}.spice" + + info = {} + info["device"] = device + info["temp"] = temp + info["dev"] = list_devices + + + + temp_str = temp + list_devices_str = list_devices + + s = f"{list_devices_str}netlist_t{temp_str}.spice" + netlist_path = f"{dirpath}/{device}_netlists/{s}" + s = f"t{temp}_simulated_{list_devices_str}.csv" + result_path = f"{dirpath}/simulated/{s}" + os.makedirs(f"{dirpath}/simulated", exist_ok=True) + + with open(netlist_tmp) as f: + tmpl = Template(f.read()) + os.makedirs(f"{dirpath}/{device}_netlists", exist_ok=True) + with open(netlist_path, "w") as netlist: + netlist.write( + tmpl.render( + device=list_devices_str, + temp=temp_str + + ) + ) - # Get dimensions used for each device - dimensions = pd.read_csv(f"{device}/{device}.csv", usecols=["corners"]) - loops = dimensions["corners"].count() - temp_range = int(loops / 4) - netlist_tmp = f"./device_netlists/{device}.spice" - for i in range(loops): - if i in range(0, temp_range): - temp = 25 - elif i in range(temp_range, 2 * temp_range): - temp = -40 - elif i in range(2 * temp_range, 3 * temp_range): - temp = 125 + # Running ngspice for each netlist + try: + call_simulator(netlist_path) + + if os.path.exists(result_path): + bjt_iv = result_path else: - temp = 175 + bjt_iv = "None" - k = i - if i >= len(list_devices): - while k >= len(list_devices): - k = k - len(list_devices) + except Exception: + mos_iv = "None" - with open(netlist_tmp) as f: - tmpl = Template(f.read()) - os.makedirs(f"{device}/{device}_netlists_{Id_sim}", exist_ok=True) - with open( - f"{device}/{device}_netlists_{Id_sim}/{i}_{device}_netlist_{list_devices[k]}.spice", - "w", - ) as netlist: - netlist.write(tmpl.render(device=list_devices[k], i=i, temp=temp)) - netlist_path = f"{device}/{device}_netlists_{Id_sim}/{i}_{device}_netlist_{list_devices[k]}.spice" - - # Running Xyce for each netlist - with concurrent.futures.ProcessPoolExecutor( - max_workers=workers_count - ) as executor: - executor.submit(call_simulator, netlist_path) - - # Writing simulated data - df_simulated = pd.read_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv", - header=0, - ) + info["bjt_iv_simulated"] = bjt_iv - # empty array to append in it shaped (sweep, number of trials + 1) - new_array = np.empty((sweep, 1 + int(df_simulated.shape[0] / sweep))) - new_array[:, 0] = df_simulated.iloc[:sweep, 0] - times = int(df_simulated.shape[0] / sweep) - - for j in range(times): - new_array[:, (j + 1)] = df_simulated.iloc[ - j * sweep : (j + 1) * sweep, 0 - ] - - # Writing final simulated data - df_simulated = pd.DataFrame(new_array) - df_simulated.to_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv", - index=False, - ) - df_simulated.columns = [ - f"{vc}", - f"{ib}{step[0]}", - f"{ib}{step[1]}", - f"{ib}{step[2]}", - f"{ib}{step[3]}", - f"{ib}{step[4]}", - ] - df_simulated.to_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv", - index=False, - ) + return info -def error_cal(device, vc, step, Id_sim, list_devices, ib): +def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): + """passing netlists to run_sim function + and storing the results csv files into dataframes - df_final = pd.DataFrame() - # Get dimensions used for each device - dimensions = pd.read_csv(f"{device}/{device}.csv", usecols=["corners"]) - loops = dimensions["corners"].count() + Args: + df(pd.DataFrame): dataframe passed from the ext_measured function + dirpath(str): the path to the file where we write data + id_rds(str): select id or rds + num_workers=mp.cpu_count() (int): num of cpu used + device(str): name of the device + Returns: + df(pd.DataFrame): dataframe contains simulated results + """ + df1 = pd.read_csv(f"{dirpath}/{device}.csv", usecols=["corners"]) + loops = (df1["corners"]).count() temp_range = int(loops / 4) - for i in range(loops): - if i in range(0, temp_range): - temp = 25 - elif i in range(temp_range, 2 * temp_range): - temp = -40 - elif i in range(2 * temp_range, 3 * temp_range): - temp = 125 - else: - temp = 175 - - k = i - if i >= len(list_devices): - while k >= len(list_devices): - k = k - len(list_devices) + df=pd.DataFrame() + df["dev"]=df1["corners"].dropna() + df["dev"][0:temp_range]=list_devices + df["dev"][temp_range:2*temp_range]=list_devices + df["dev"][2*temp_range:3*temp_range]=list_devices + df["dev"][3*temp_range:4*temp_range]=list_devices + df["temp"]=25 + df["temp"][temp_range :2 * temp_range]=-40 + df["temp"][2*temp_range :3 * temp_range]=125 + df["temp"][3*temp_range :]=-175 + + + results = [] + with concurrent.futures.ThreadPoolExecutor( + max_workers=num_workers + ) as executor: + futures_list = [] + for j, row in df.iterrows(): + futures_list.append( + executor.submit( + run_sim, + dirpath, + device, + row["dev"], + row["temp"], + ) + ) - measured = pd.read_csv( - f"{device}/measured_{Id_sim}/{i}_measured_{list_devices[k]}.csv" + for future in concurrent.futures.as_completed(futures_list): + try: + data = future.result() + results.append(data) + except Exception as exc: + print("Test case generated an exception: %s" % (exc)) + sf = glob.glob(f"{dirpath}/simulated/*.csv") + # sweeping on all generated cvs files + for i in range(len(sf)): + sdf = pd.read_csv( + sf[i], + header=None, + delimiter=r"\s+", ) - simulated = pd.read_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv" + sweep = int(sdf[0].count() / len(MOS)) + new_array = np.empty((sweep, 1 + int(sdf.shape[0] / sweep))) + + new_array[:, 0] = sdf.iloc[1:sweep+1, 0] + times = int(sdf.shape[0] / sweep) + + for j in range(times): + new_array[:, (j + 1)] = sdf.iloc[(j * sweep)+1: ((j + 1) * sweep)+1 , 0] + + # Writing final simulated data 1 + sdf = pd.DataFrame(new_array) + sdf.rename( + columns={ + 0: "ibp1", + 1: "ibp2", + 2: "ibp3", + 3: "ibp4", + 4: "ibp5" + }, + inplace=True, ) + sdf.to_csv(sf[i], index=False) - error_1 = round( - 100 - * abs( - (abs(measured.iloc[0:, 1]) - abs(simulated.iloc[0:, 1])) - / abs(measured.iloc[:, 1]) - ), - 6, - ) - error_2 = round( - 100 - * abs( - (abs(measured.iloc[0:, 2]) - abs(simulated.iloc[0:, 2])) - / abs(measured.iloc[:, 2]) - ), - 6, - ) - error_3 = round( - 100 - * abs( - (abs(measured.iloc[0:, 3]) - abs(simulated.iloc[0:, 3])) - / abs(measured.iloc[:, 3]) - ), - 6, - ) - error_4 = round( - 100 - * abs( - (abs(measured.iloc[0:, 4]) - abs(simulated.iloc[0:, 4])) - / abs(measured.iloc[:, 4]) - ), - 6, - ) - error_5 = round( - 100 - * abs( - (abs(measured.iloc[0:, 5]) - abs(simulated.iloc[0:, 5])) - / abs(measured.iloc[:, 5]) - ), - 6, - ) + df = pd.DataFrame(results) + return df - df_error = pd.DataFrame( - data=[measured.iloc[:, 0], error_1, error_2, error_3, error_4, error_5] - ).transpose() - df_error.to_csv( - f"{device}/error_{Id_sim}/{i}_{device}_error_{list_devices[k]}.csv", - index=False, - ) - # Mean error - mean_error = ( - df_error[f"{ib}{step[0]}"].mean() - + df_error[f"{ib}{step[1]}"].mean() - + df_error[f"{ib}{step[2]}"].mean() - + df_error[f"{ib}{step[3]}"].mean() - + df_error[f"{ib}{step[4]}"].mean() - ) / 6 - # Max error - max_error = ( - df_error[ - [ - f"{ib}{step[0]}", - f"{ib}{step[1]}", - f"{ib}{step[2]}", - f"{ib}{step[3]}", - f"{ib}{step[4]}", - ] - ] - .max() - .max() - ) - # Max error location - max_index = max((df_error == max_error).idxmax()) - max_location_ib = (df_error == max_error).idxmax(axis=1)[max_index] - if i == 0: - if device == "pnp": - temp_vc = vc - vc = "-vc " - else: - if device == "pnp": - vc = temp_vc - max_location_vc = df_error[f"{vc}"][max_index] - - df_final_ = { - "Run no.": f"{i}", - "Temp": f"{temp}", - "Device name": f"{device}", - "device": f"{list_devices[k]}", - "Simulated_Val": f"{Id_sim}", - "Mean error%": f'{"{:.2f}".format(mean_error)}', - "Max error%": f'{"{:.2f}".format(max_error)} @ {max_location_ib} & Vc (V) = {max_location_vc}', - } - df_final = df_final.append(df_final_, ignore_index=True) - - # Max mean error - print(df_final) - df_final.to_csv(f"{device}/Final_report_{Id_sim}.csv", index=False) - out_report = pd.read_csv(f"{device}/Final_report_{Id_sim}.csv") - print("\n", f"Max. mean error = {out_report['Mean error%'].max()}%") - print( - "=====================================================================================================================================================" - ) + +def error_cal( + df: pd.DataFrame, + sim_df: pd.DataFrame, + meas_df: pd.DataFrame, + dev_path: str, + device: str, +) -> None: + """error function calculates the error between measured, simulated data + + Args: + df(pd.DataFrame): Dataframe contains devices and csv files + which represent measured, simulated data + sim_df(pd.DataFrame): Dataframe contains devices and csv files simulated + meas_df(pd.DataFrame): Dataframe contains devices and csv files measured + dev_path(str): The path in which we write data + + """ + def main(): @@ -330,28 +289,29 @@ def main(): for i, device in enumerate(devices): # Folder structure of measured values - dirpath = f"{device}" + dirpath = f"mos_iv_reg/{device}" if os.path.exists(dirpath) and os.path.isdir(dirpath): shutil.rmtree(dirpath) - os.makedirs(f"{device}/measured_{Id_sim}", exist_ok=False) + os.makedirs(f"{dirpath}", exist_ok=False) # From xlsx to csv read_file = pd.read_excel( f"../../180MCU_SPICE_DATA/BJT/bjt_{device}_icvc_f.nl_out.xlsx" ) - read_file.to_csv(f"{device}/{device}.csv", index=False, header=True) + read_file.to_csv(f"{dirpath}/{device}.csv", index=False, header=True) # Folder structure of simulated values - os.makedirs(f"{device}/simulated_{Id_sim}", exist_ok=False) - os.makedirs(f"{device}/error_{Id_sim}", exist_ok=False) + os.makedirs(f"{dirpath}/simulated", exist_ok=False) + os.makedirs(f"{dirpath}/error_{Id_sim}", exist_ok=False) # =========== Simulate ============== - ext_measured(device, vc[i], step, Id_sim, list_devices[i], ib[i]) - - ext_simulated(device, vc[i], step, sweep[i], Id_sim, list_devices[i], ib[i]) - + df=ext_measured(dirpath,device, vc[i], step, list_devices[i], ib[i]) + + run_sims( dirpath,list_devices[i], device, num_workers=mp.cpu_count()) + # ext_simulated(dirpath,device, vc[i], step, sweep[i], Id_sim, list_devices[i], ib[i]) + # ============ Results ============= - error_cal(device, vc[i], step, Id_sim, list_devices[i], ib[i]) + # error_cal(dirpath,device,df, vc[i], step, Id_sim, list_devices[i], ib[i]) # ================================================================ From 6223d667f61cfa09eb35a124070c7ca9f4e63e2a Mon Sep 17 00:00:00 2001 From: mohamed Date: Mon, 9 Jan 2023 12:38:40 +0200 Subject: [PATCH 2/3] complete the code --- .../regression/bjt_iv/models_regression.py | 187 ++++++++++++++++-- 1 file changed, 168 insertions(+), 19 deletions(-) diff --git a/models/xyce/testing/regression/bjt_iv/models_regression.py b/models/xyce/testing/regression/bjt_iv/models_regression.py index d9564b17..220a6222 100644 --- a/models/xyce/testing/regression/bjt_iv/models_regression.py +++ b/models/xyce/testing/regression/bjt_iv/models_regression.py @@ -18,7 +18,7 @@ import warnings import multiprocessing as mp import glob - +PASS_THRESH = 2.0 warnings.simplefilter(action="ignore", category=FutureWarning) pd.options.mode.chained_assignment = None # default='warn' MOS=["ibp=1.000E-06", "ibp=3.000E-06", "ibp=5.000E-06", "ibp=7.000E-06", "ibp=9.000E-06"] @@ -27,7 +27,7 @@ def call_simulator(file_name): Args: file_name (str): Netlist file name. """ - os.system(f"Xyce -hspice-ext all {file_name} -l {file_name}.log") + os.system(f"Xyce -hspice-ext all {file_name} -l {file_name}.log 2>/dev/null") def ext_measured(dirpath,device, vc, step, list_devices, ib): @@ -148,7 +148,7 @@ def run_sim(dirpath, device, list_devices, temp=25): bjt_iv = "None" except Exception: - mos_iv = "None" + bjt_iv = "None" info["bjt_iv_simulated"] = bjt_iv @@ -206,6 +206,7 @@ def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): except Exception as exc: print("Test case generated an exception: %s" % (exc)) sf = glob.glob(f"{dirpath}/simulated/*.csv") + # sweeping on all generated cvs files for i in range(len(sf)): sdf = pd.read_csv( @@ -226,27 +227,29 @@ def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): sdf = pd.DataFrame(new_array) sdf.rename( columns={ - 0: "ibp1", - 1: "ibp2", - 2: "ibp3", - 3: "ibp4", - 4: "ibp5" + 1: "ibp1", + 2: "ibp2", + 3: "ibp3", + 4: "ibp4", + 5: "ibp5" }, inplace=True, ) sdf.to_csv(sf[i], index=False) - df = pd.DataFrame(results) + df1 = pd.DataFrame(results) + return df def error_cal( - df: pd.DataFrame, sim_df: pd.DataFrame, meas_df: pd.DataFrame, - dev_path: str, device: str, + step, + ib, + vc ) -> None: """error function calculates the error between measured, simulated data @@ -258,7 +261,108 @@ def error_cal( dev_path(str): The path in which we write data """ - + merged_dfs = list() + meas_df.to_csv(f"mos_iv_reg/{device}/{device}_measured.csv", index=False, header=True) + meas_df=pd.read_csv(f"mos_iv_reg/{device}/{device}_measured.csv") + for i in range (len(sim_df)): + t=sim_df["temp"].iloc[i] + dev=sim_df["dev"].iloc[i] + sim_path= f"mos_iv_reg/{device}/simulated/t{t}_simulated_{dev}.csv" + + simulated_data = pd.read_csv(sim_path) + if i==0: + measured_data = meas_df[ + [ + f"{ib}{step[0]}", + f"{ib}{step[1]}", + f"{ib}{step[2]}", + f"{ib}{step[3]}", + f"{ib}{step[4]}", + + ] + ].copy() + + measured_data.rename( + columns={ + f"{ib}{step[0]}":"m_ibp1", + f"{ib}{step[1]}":"m_ibp2", + f"{ib}{step[2]}":"m_ibp3", + f"{ib}{step[3]}":"m_ibp4", + f"{ib}{step[4]}":"m_ibp5"}, + inplace=True + ) + else: + measured_data = meas_df[ + [ + f"{ib}{step[0]}.{i}", + f"{ib}{step[1]}.{i}", + f"{ib}{step[2]}.{i}", + f"{ib}{step[3]}.{i}", + f"{ib}{step[4]}.{i}", + + ] + ].copy() + + measured_data.rename( + columns={ + f"{ib}{step[0]}.{i}":"m_ibp1", + f"{ib}{step[1]}.{i}":"m_ibp2", + f"{ib}{step[2]}.{i}":"m_ibp3", + f"{ib}{step[3]}.{i}":"m_ibp4", + f"{ib}{step[4]}.{i}":"m_ibp5"} + ,inplace=True + ) + measured_data["vcp"]=meas_df[f"{vc}"] + simulated_data["vcp"]=meas_df[f"{vc}"] + simulated_data["device"]=sim_df["dev"].iloc[i] + measured_data["device"]=sim_df["dev"].iloc[i] + simulated_data["temp"]=sim_df["temp"].iloc[i] + measured_data["temp"]=sim_df["temp"].iloc[i] + result_data = simulated_data.merge(measured_data, how="left") + + result_data["step1_error"] = ( + np.abs(result_data["ibp1"] - result_data["m_ibp1"]) + * 100.0 + / (result_data["m_ibp1"]) + ) + result_data["step2_error"] = ( + np.abs(result_data["ibp2"] - result_data["m_ibp2"]) + * 100.0 + / (result_data["m_ibp2"]) + ) + result_data["step3_error"] = ( + np.abs(result_data["ibp3"] - result_data["m_ibp3"]) + * 100.0 + / (result_data["m_ibp3"]) + ) + result_data["step4_error"] = ( + np.abs(result_data["ibp4"] - result_data["m_ibp4"]) + * 100.0 + / (result_data["m_ibp4"]) + ) + result_data["step5_error"] = ( + np.abs(result_data["ibp5"] - result_data["m_ibp5"]) + * 100.0 + / (result_data["m_ibp5"]) + ) + result_data["error"] = ( + np.abs( + result_data["step1_error"] + + result_data["step2_error"] + + result_data["step3_error"] + + result_data["step4_error"] + + result_data["step5_error"] + ) + / 5 + ) + + merged_dfs.append(result_data) + merged_out = pd.concat(merged_dfs) + merged_out.fillna(0, inplace=True) + merged_out.to_csv( + f"mos_iv_reg/{device}/error_analysis.csv", index=False + ) + return merged_out def main(): @@ -284,9 +388,7 @@ def main(): vc = ["vcp ", "-vc (A)"] ib = ["ibp =", "ib =-"] Id_sim = "IcVc" - sweep = [61, 31] step = ["1.000E-06", "3.000E-06", "5.000E-06", "7.000E-06", "9.000E-06"] - for i, device in enumerate(devices): # Folder structure of measured values dirpath = f"mos_iv_reg/{device}" @@ -302,17 +404,64 @@ def main(): # Folder structure of simulated values os.makedirs(f"{dirpath}/simulated", exist_ok=False) - os.makedirs(f"{dirpath}/error_{Id_sim}", exist_ok=False) + # =========== Simulate ============== df=ext_measured(dirpath,device, vc[i], step, list_devices[i], ib[i]) - run_sims( dirpath,list_devices[i], device, num_workers=mp.cpu_count()) - # ext_simulated(dirpath,device, vc[i], step, sweep[i], Id_sim, list_devices[i], ib[i]) - + sims=run_sims( dirpath,list_devices[i], device, num_workers=mp.cpu_count()) # ============ Results ============= - # error_cal(dirpath,device,df, vc[i], step, Id_sim, list_devices[i], ib[i]) + merged_all=error_cal(sims,df,device,step,ib[i],vc[i]) + + for dev in list_devices[i]: + min_error_total = float() + max_error_total = float() + error_total = float() + number_of_existance = int() + + # number of rows in the final excel sheet + num_rows = merged_all["device"].count() + + for n in range(num_rows): + if dev == merged_all["device"].iloc[n]: + number_of_existance += 1 + error_total += merged_all["error"].iloc[n] + if merged_all["error"].iloc[n] > max_error_total: + max_error_total = merged_all["error"].iloc[n] + elif merged_all["error"].iloc[n] < min_error_total: + min_error_total = merged_all["error"].iloc[n] + + mean_error_total = error_total / number_of_existance + + # Making sure that min, max, mean errors are not > 100% + if min_error_total > 100: + min_error_total = 100 + + if max_error_total > 100: + max_error_total = 100 + + if mean_error_total > 100: + mean_error_total = 100 + + # printing min, max, mean errors to the consol + print( + "# Device {} min error: {:.2f}".format(dev, min_error_total), + ", max error: {:.2f}, mean error {:.2f}".format( + max_error_total, mean_error_total + ), + ) + + if max_error_total < PASS_THRESH: + print("# Device {} has passed regression.".format(dev)) + else: + print( + "# Device {} has failed regression. Needs more analysis.".format( + dev + ) + ) + print("\n\n") + print("\n\n") # ================================================================ # -------------------------- MAIN -------------------------------- From 398874a4cc16a9ec6daccea41e79f8b402f66d4e Mon Sep 17 00:00:00 2001 From: mohamed Date: Mon, 9 Jan 2023 18:19:51 +0200 Subject: [PATCH 3/3] apply black and flake8 --- .../regression/bjt_iv/models_regression.py | 154 ++++++++---------- 1 file changed, 70 insertions(+), 84 deletions(-) diff --git a/models/xyce/testing/regression/bjt_iv/models_regression.py b/models/xyce/testing/regression/bjt_iv/models_regression.py index 220a6222..ee8b9a04 100644 --- a/models/xyce/testing/regression/bjt_iv/models_regression.py +++ b/models/xyce/testing/regression/bjt_iv/models_regression.py @@ -18,10 +18,19 @@ import warnings import multiprocessing as mp import glob + PASS_THRESH = 2.0 warnings.simplefilter(action="ignore", category=FutureWarning) pd.options.mode.chained_assignment = None # default='warn' -MOS=["ibp=1.000E-06", "ibp=3.000E-06", "ibp=5.000E-06", "ibp=7.000E-06", "ibp=9.000E-06"] +MOS = [ + "ibp=1.000E-06", + "ibp=3.000E-06", + "ibp=5.000E-06", + "ibp=7.000E-06", + "ibp=9.000E-06", +] + + def call_simulator(file_name): """Call simulation commands to perform simulation. Args: @@ -30,7 +39,7 @@ def call_simulator(file_name): os.system(f"Xyce -hspice-ext all {file_name} -l {file_name}.log 2>/dev/null") -def ext_measured(dirpath,device, vc, step, list_devices, ib): +def ext_measured(dirpath, device, vc, step, list_devices, ib): # Get dimensions used for each device dimensions = pd.read_csv(f"{dirpath}/{device}.csv", usecols=["corners"]) @@ -90,7 +99,7 @@ def ext_measured(dirpath,device, vc, step, list_devices, ib): ] all_dfs.append(df_measured) dfs = pd.concat(all_dfs, axis=1) - dfs.drop_duplicates(inplace=True) + dfs.drop_duplicates(inplace=True) return dfs @@ -115,8 +124,6 @@ def run_sim(dirpath, device, list_devices, temp=25): info["temp"] = temp info["dev"] = list_devices - - temp_str = temp list_devices_str = list_devices @@ -130,13 +137,7 @@ def run_sim(dirpath, device, list_devices, temp=25): tmpl = Template(f.read()) os.makedirs(f"{dirpath}/{device}_netlists", exist_ok=True) with open(netlist_path, "w") as netlist: - netlist.write( - tmpl.render( - device=list_devices_str, - temp=temp_str - - ) - ) + netlist.write(tmpl.render(device=list_devices_str, temp=temp_str)) # Running ngspice for each netlist try: @@ -155,7 +156,7 @@ def run_sim(dirpath, device, list_devices, temp=25): return info -def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): +def run_sims(dirpath, list_devices, device, num_workers=mp.cpu_count()): """passing netlists to run_sim function and storing the results csv files into dataframes @@ -171,22 +172,19 @@ def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): df1 = pd.read_csv(f"{dirpath}/{device}.csv", usecols=["corners"]) loops = (df1["corners"]).count() temp_range = int(loops / 4) - df=pd.DataFrame() - df["dev"]=df1["corners"].dropna() - df["dev"][0:temp_range]=list_devices - df["dev"][temp_range:2*temp_range]=list_devices - df["dev"][2*temp_range:3*temp_range]=list_devices - df["dev"][3*temp_range:4*temp_range]=list_devices - df["temp"]=25 - df["temp"][temp_range :2 * temp_range]=-40 - df["temp"][2*temp_range :3 * temp_range]=125 - df["temp"][3*temp_range :]=-175 - + df = pd.DataFrame() + df["dev"] = df1["corners"].dropna() + df["dev"][0:temp_range] = list_devices + df["dev"][temp_range : 2 * temp_range] = list_devices + df["dev"][2 * temp_range : 3 * temp_range] = list_devices + df["dev"][3 * temp_range : 4 * temp_range] = list_devices + df["temp"] = 25 + df["temp"][temp_range : 2 * temp_range] = -40 + df["temp"][2 * temp_range : 3 * temp_range] = 125 + df["temp"][3 * temp_range :] = -175 results = [] - with concurrent.futures.ThreadPoolExecutor( - max_workers=num_workers - ) as executor: + with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: futures_list = [] for j, row in df.iterrows(): futures_list.append( @@ -206,7 +204,7 @@ def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): except Exception as exc: print("Test case generated an exception: %s" % (exc)) sf = glob.glob(f"{dirpath}/simulated/*.csv") - + # sweeping on all generated cvs files for i in range(len(sf)): sdf = pd.read_csv( @@ -217,22 +215,16 @@ def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): sweep = int(sdf[0].count() / len(MOS)) new_array = np.empty((sweep, 1 + int(sdf.shape[0] / sweep))) - new_array[:, 0] = sdf.iloc[1:sweep+1, 0] + new_array[:, 0] = sdf.iloc[1 : sweep + 1, 0] times = int(sdf.shape[0] / sweep) for j in range(times): - new_array[:, (j + 1)] = sdf.iloc[(j * sweep)+1: ((j + 1) * sweep)+1 , 0] + new_array[:, (j + 1)] = sdf.iloc[(j * sweep) + 1 : ((j + 1) * sweep) + 1, 0] # Writing final simulated data 1 sdf = pd.DataFrame(new_array) sdf.rename( - columns={ - 1: "ibp1", - 2: "ibp2", - 3: "ibp3", - 4: "ibp4", - 5: "ibp5" - }, + columns={1: "ibp1", 2: "ibp2", 3: "ibp3", 4: "ibp4", 5: "ibp5"}, inplace=True, ) sdf.to_csv(sf[i], index=False) @@ -242,14 +234,8 @@ def run_sims( dirpath, list_devices,device, num_workers=mp.cpu_count()): return df - def error_cal( - sim_df: pd.DataFrame, - meas_df: pd.DataFrame, - device: str, - step, - ib, - vc + sim_df: pd.DataFrame, meas_df: pd.DataFrame, device: str, step, ib, vc ) -> None: """error function calculates the error between measured, simulated data @@ -262,15 +248,17 @@ def error_cal( """ merged_dfs = list() - meas_df.to_csv(f"mos_iv_reg/{device}/{device}_measured.csv", index=False, header=True) - meas_df=pd.read_csv(f"mos_iv_reg/{device}/{device}_measured.csv") - for i in range (len(sim_df)): - t=sim_df["temp"].iloc[i] - dev=sim_df["dev"].iloc[i] - sim_path= f"mos_iv_reg/{device}/simulated/t{t}_simulated_{dev}.csv" + meas_df.to_csv( + f"mos_iv_reg/{device}/{device}_measured.csv", index=False, header=True + ) + meas_df = pd.read_csv(f"mos_iv_reg/{device}/{device}_measured.csv") + for i in range(len(sim_df)): + t = sim_df["temp"].iloc[i] + dev = sim_df["dev"].iloc[i] + sim_path = f"mos_iv_reg/{device}/simulated/t{t}_simulated_{dev}.csv" simulated_data = pd.read_csv(sim_path) - if i==0: + if i == 0: measured_data = meas_df[ [ f"{ib}{step[0]}", @@ -278,19 +266,19 @@ def error_cal( f"{ib}{step[2]}", f"{ib}{step[3]}", f"{ib}{step[4]}", - ] ].copy() measured_data.rename( - columns={ - f"{ib}{step[0]}":"m_ibp1", - f"{ib}{step[1]}":"m_ibp2", - f"{ib}{step[2]}":"m_ibp3", - f"{ib}{step[3]}":"m_ibp4", - f"{ib}{step[4]}":"m_ibp5"}, - inplace=True - ) + columns={ + f"{ib}{step[0]}": "m_ibp1", + f"{ib}{step[1]}": "m_ibp2", + f"{ib}{step[2]}": "m_ibp3", + f"{ib}{step[3]}": "m_ibp4", + f"{ib}{step[4]}": "m_ibp5", + }, + inplace=True, + ) else: measured_data = meas_df[ [ @@ -299,27 +287,27 @@ def error_cal( f"{ib}{step[2]}.{i}", f"{ib}{step[3]}.{i}", f"{ib}{step[4]}.{i}", - ] ].copy() measured_data.rename( - columns={ - f"{ib}{step[0]}.{i}":"m_ibp1", - f"{ib}{step[1]}.{i}":"m_ibp2", - f"{ib}{step[2]}.{i}":"m_ibp3", - f"{ib}{step[3]}.{i}":"m_ibp4", - f"{ib}{step[4]}.{i}":"m_ibp5"} - ,inplace=True - ) - measured_data["vcp"]=meas_df[f"{vc}"] - simulated_data["vcp"]=meas_df[f"{vc}"] - simulated_data["device"]=sim_df["dev"].iloc[i] - measured_data["device"]=sim_df["dev"].iloc[i] - simulated_data["temp"]=sim_df["temp"].iloc[i] - measured_data["temp"]=sim_df["temp"].iloc[i] + columns={ + f"{ib}{step[0]}.{i}": "m_ibp1", + f"{ib}{step[1]}.{i}": "m_ibp2", + f"{ib}{step[2]}.{i}": "m_ibp3", + f"{ib}{step[3]}.{i}": "m_ibp4", + f"{ib}{step[4]}.{i}": "m_ibp5", + }, + inplace=True, + ) + measured_data["vcp"] = meas_df[f"{vc}"] + simulated_data["vcp"] = meas_df[f"{vc}"] + simulated_data["device"] = sim_df["dev"].iloc[i] + measured_data["device"] = sim_df["dev"].iloc[i] + simulated_data["temp"] = sim_df["temp"].iloc[i] + measured_data["temp"] = sim_df["temp"].iloc[i] result_data = simulated_data.merge(measured_data, how="left") - + result_data["step1_error"] = ( np.abs(result_data["ibp1"] - result_data["m_ibp1"]) * 100.0 @@ -359,9 +347,7 @@ def error_cal( merged_dfs.append(result_data) merged_out = pd.concat(merged_dfs) merged_out.fillna(0, inplace=True) - merged_out.to_csv( - f"mos_iv_reg/{device}/error_analysis.csv", index=False - ) + merged_out.to_csv(f"mos_iv_reg/{device}/error_analysis.csv", index=False) return merged_out @@ -405,13 +391,12 @@ def main(): # Folder structure of simulated values os.makedirs(f"{dirpath}/simulated", exist_ok=False) - # =========== Simulate ============== - df=ext_measured(dirpath,device, vc[i], step, list_devices[i], ib[i]) - - sims=run_sims( dirpath,list_devices[i], device, num_workers=mp.cpu_count()) + df = ext_measured(dirpath, device, vc[i], step, list_devices[i], ib[i]) + + sims = run_sims(dirpath, list_devices[i], device, num_workers=mp.cpu_count()) # ============ Results ============= - merged_all=error_cal(sims,df,device,step,ib[i],vc[i]) + merged_all = error_cal(sims, df, device, step, ib[i], vc[i]) for dev in list_devices[i]: min_error_total = float() @@ -463,6 +448,7 @@ def main(): print("\n\n") + # ================================================================ # -------------------------- MAIN -------------------------------- # ================================================================