Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dataset learning #9

Open
wants to merge 31 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
de5c010
data loading alpha version
skourta Jan 23, 2023
50658fc
removed berakpoints and fixed code style to match previous code
skourta Jan 24, 2023
6985d62
added support for the legality check of skewing
skourta Jan 24, 2023
8bdd2c2
removed breakpoint
skourta Jan 24, 2023
c84c891
working on resuming learning
skourta Jan 25, 2023
1643fc2
refactored code from later commit
skourta Jan 25, 2023
6c8b39b
fixed loading exec time from dataset and cleaned some code
skourta Jan 26, 2023
617b692
refactored checking the hostname in function for future changes
skourta Jan 26, 2023
00ff9b6
fixed invalidating exec time on init time
skourta Jan 26, 2023
3422a62
load new version of dataset and save it
skourta Jan 27, 2023
0300f24
dataset learning with one functiona at a time
skourta Feb 3, 2023
0455369
removed import bug
skourta Feb 3, 2023
d5efeca
data saviung multiple formats
skourta Feb 3, 2023
5e1f766
data saviung multiple formats
skourta Feb 3, 2023
c4c162f
added solver resutls to the dataset, fixed circular imports and remov…
skourta Feb 6, 2023
6b78258
changed saving frequency to increase performance
skourta Feb 6, 2023
c7a51a2
Fixing conflicts
skourta Feb 6, 2023
09d2c6e
Added comments explaining dataset config part
skourta Feb 6, 2023
28debb3
fixed conflicts
skourta Feb 7, 2023
a443a0a
Merge pull request #10 from Tiramisu-Compiler/load_model
skourta Feb 7, 2023
2f245d6
changed back the commands to no lz to avoid conflicts
skourta Feb 7, 2023
8968bb3
fixed call of clean cpp
skourta Feb 7, 2023
28774d3
added saving the dataset to the disk when not using dataset
skourta Feb 13, 2023
6d7f7b9
reverted ray init to multiple workers
skourta Feb 14, 2023
acfc53b
added 2 checkpoints for dataset and model.eval
skourta Feb 22, 2023
1f45759
fixed INFO to info bug in calling logging
skourta Feb 23, 2023
c6ac8cc
format with black
skourta Mar 15, 2023
7ce563a
added sequential data
skourta Mar 16, 2023
e5e4b8a
Merge pull request #12 from Tiramisu-Compiler/dataset_learning_sequen…
skourta Mar 22, 2023
b0ca8df
added seed, comments and types
skourta Mar 28, 2023
cfc4024
added seed, comments and types and reverted tiramisu config
skourta Mar 28, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,5 @@ scripts/env.sh
Dataset*
.vscode
.idea
dataset
cpps
9 changes: 9 additions & 0 deletions config.yaml.template
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,15 @@ environment:
dataset_path: "./Dataset_multi/"
programs_file: "./multicomp.json"
clean_files: True
json_dataset:
# Path to the dataset file
path: "./Dataset_pickle/full_legality_annotations_solver.pkl"
# Path to the dataset cpp files
cpps_path: "./Dataset_multi"
# Path where to save the updated dataset (without the extension, it will be inferred by the dataset format)
path_to_save_dataset: "./Dataset_pickle/full_legality_annotations_solver_updated"
# Supported formats are available int he dataset_utilities module
dataset_format: "PICKLE"


tiramisu:
Expand Down
5 changes: 3 additions & 2 deletions evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ def get_arguments():

# @hydra.main(config_path="config", config_name="config")
def main(config: RLAutoSchedulerConfig, checkpoint=None):
if checkpoint is None: return
if checkpoint is None:
return
configure_env_variables(config)
best_checkpoint = os.path.join(config.ray.base_path, checkpoint)
with ray.init(num_cpus=config.ray.ray_num_cpus):
Expand Down Expand Up @@ -87,7 +88,7 @@ def main(config: RLAutoSchedulerConfig, checkpoint=None):
except:
print("error", action, observation, reward, done)
continue
result["schedule_str"] = env.schedule_object.schedule_str
result["sched_str"] = env.schedule_object.sched_str
result["speedup"] = env.schedule_controller.speedup
results.append(result)
with open("results.json", "w+") as file:
Expand Down
7 changes: 5 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
PyYAML == 6.0
gym == 0.21.0
gym==0.21.0
gymnasium==0.27.1
numpy == 1.23.1
ray[rllib] == 1.13.0
ray[rllib] == 2.2.0
sympy == 1.10.1
torch == 1.12.0
tqdm == 4.64.0
pandas == 1.5.3
tensorflow_probability == 0.19.0
5 changes: 0 additions & 5 deletions rl_interface/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +0,0 @@
from .action import *
from .environment import *
from .model import *
from .reward import *
from .utils import *
72 changes: 53 additions & 19 deletions rl_interface/action.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import random
from tiramisu_programs.optimization import OptimizationCommand
from tiramisu_programs.schedule_utils import ScheduleUtils
from tiramisu_programs.tiramisu_program import TiramisuProgram
import ray


class Action:
Expand Down Expand Up @@ -104,7 +107,7 @@ def __init__(self, id_, it_dict, common_it):
self.it_dict = it_dict
self.common_it = common_it

def parameter(self, comp=None, prog=None):
def parameter(self, comp=None, prog: TiramisuProgram = None, schedule: list[OptimizationCommand] = None):
""""
Property method to return the parameter related to the action selected.
Returns:
Expand Down Expand Up @@ -212,8 +215,8 @@ def parameter(self, comp=None, prog=None):
first_it = 6
second_it = 7

first_fact = 32 #random.choice([32, 64, 128])
second_fact = 32 #random.choice([32, 64, 128])
first_fact = 32 # random.choice([32, 64, 128])
second_fact = 32 # random.choice([32, 64, 128])
# #print("after choosing first and second params and factors")

# calculate the loop extent to see if we should create new iterators or not
Expand All @@ -222,7 +225,7 @@ def parameter(self, comp=None, prog=None):
self.it_dict[first_comp][first_it]["upper_bound"] -
self.it_dict[first_comp][first_it]["lower_bound"])
# #print("\n first loop extent is ", loop_extent_1)
#print("first factor is", first_fact)
# print("first factor is", first_fact)
if loop_extent_1 == first_fact:
tiling_flag_1 = False
print("Tiling flag 1 false, loopextent == factor")
Expand All @@ -235,10 +238,10 @@ def parameter(self, comp=None, prog=None):
self.it_dict[first_comp][second_it]["upper_bound"] -
self.it_dict[first_comp][second_it]["lower_bound"])
# print("\n second loop extent is ", loop_extent_2)
#print("second factor is", second_fact)
# print("second factor is", second_fact)
if loop_extent_2 == second_fact:
tiling_flag_2 = False
#print("tiling flag 2 false, loopextent == factor")
# print("tiling flag 2 false, loopextent == factor")
elif loop_extent_2 < second_fact:
print("exceeeption, loop extent 2 smaller than factor")
from tiramisu_programs.schedule import LoopExtentException
Expand Down Expand Up @@ -283,15 +286,15 @@ def parameter(self, comp=None, prog=None):
second_it = 6
third_it = 7

first_fact = 32 #random.choice([32, 64, 128])
second_fact = 32 #random.choice([32, 64, 128])
third_fact = 32 #random.choice([32, 64, 128])
first_fact = 32 # random.choice([32, 64, 128])
second_fact = 32 # random.choice([32, 64, 128])
third_fact = 32 # random.choice([32, 64, 128])
# calculate the loop extent to see if we should create new iterators or not
loop_extent_1 = abs(
self.it_dict[first_comp][first_it]["upper_bound"] -
self.it_dict[first_comp][first_it]["lower_bound"])
# #print("\n first loop extent is ", loop_extent_1)
#print("first factor is", first_fact)
# print("first factor is", first_fact)
if loop_extent_1 == first_fact:
tiling_flag_1 = False
print("tiling flag 1 false, loopextent == factor")
Expand All @@ -304,7 +307,7 @@ def parameter(self, comp=None, prog=None):
self.it_dict[first_comp][second_it]["upper_bound"] -
self.it_dict[first_comp][second_it]["lower_bound"])
# print("\n second loop extent is ", loop_extent_2)
#print("second factor is", second_fact)
# print("second factor is", second_fact)
if loop_extent_2 == second_fact:
tiling_flag_2 = False
print("tiling flag 2 false, loopextent == factor")
Expand All @@ -317,7 +320,7 @@ def parameter(self, comp=None, prog=None):
self.it_dict[first_comp][third_it]["upper_bound"] -
self.it_dict[first_comp][third_it]["lower_bound"])
# print("\n third loop extent is ", loop_extent_3)
#print("third factor is", third_fact)
# print("third factor is", third_fact)
if loop_extent_3 == third_fact:
tiling_flag_3 = False
print("tiling flag 3 false, loopextent == factor")
Expand Down Expand Up @@ -376,6 +379,7 @@ def parameter(self, comp=None, prog=None):
return params

elif self.id == 44: # SKEWING01
solver_res = None
first_it = 0
second_it = 1

Expand All @@ -384,10 +388,25 @@ def parameter(self, comp=None, prog=None):
"second_dim_index": second_it
}

# print("before calling solver")
# Get schedule id
tmp_sched_str = ScheduleUtils.optimlist_to_str(schedule)

solver_res = prog.call_solver(comp, skew_params)
# print("afetr calling solver")
# Load saved results if they exist
if prog.config.environment.use_dataset:
# Check if schedule is saved
if tmp_sched_str in prog.function_dict[
'schedules_solver_results_dict']:
print(
f"Loading solver results from saved schedule: {tmp_sched_str}")
solver_res = prog.function_dict[
'schedules_solver_results_dict'][tmp_sched_str]

if solver_res is None:
solver_res = prog.call_solver(comp, skew_params)

# Save the new solver results
prog.function_dict[
'schedules_solver_results_dict'][tmp_sched_str] = solver_res

if solver_res == None or solver_res == "-1":
return {
Expand All @@ -406,6 +425,7 @@ def parameter(self, comp=None, prog=None):
}

elif self.id == 45: # SKEWING12
solver_res = None
first_it = 1
second_it = 2

Expand All @@ -414,10 +434,24 @@ def parameter(self, comp=None, prog=None):
"second_dim_index": second_it
}

# print("before calling solver")
# Load saved results if they exist
if prog.config.environment.use_dataset:
tmp_sched_str = ScheduleUtils.optimlist_to_str(schedule)

# Check if schedule is saved
if tmp_sched_str in prog.function_dict[
'schedules_solver_results_dict']:
print(
f"Loading solver results from saved schedule: {tmp_sched_str}")
solver_res = prog.function_dict[
'schedules_solver_results_dict'][tmp_sched_str]

if solver_res is None:
solver_res = prog.call_solver(comp, skew_params)

solver_res = prog.call_solver(comp, skew_params)
# print("afetr calling solver")
# Save the new solver results
if prog.config.environment.use_dataset:
prog.function_dict['schedules_solver_results_dict'][tmp_sched_str] = solver_res

if solver_res == None or solver_res == "-1":
return {
Expand Down
Loading