Skip to content

Commit

Permalink
Add an exception for the nvml_ parameter check if in simulation mode
Browse files Browse the repository at this point in the history
  • Loading branch information
fjwillemsen committed Sep 27, 2023
1 parent 25c5452 commit a39d5ae
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
2 changes: 1 addition & 1 deletion kernel_tuner/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -590,7 +590,7 @@ def tune_kernel(
objective, objective_higher_is_better = get_objective_defaults(objective, objective_higher_is_better)

# check for forbidden names in tune parameters
util.check_tune_params_list(tune_params, observers)
util.check_tune_params_list(tune_params, observers, simulation_mode=simulation_mode)

# check whether block_size_names are used as expected
util.check_block_size_params_names_list(block_size_names, tune_params)
Expand Down
5 changes: 3 additions & 2 deletions kernel_tuner/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,14 +180,14 @@ def check_stop_criterion(to):
raise StopCriterionReached("time limit exceeded")


def check_tune_params_list(tune_params, observers):
def check_tune_params_list(tune_params, observers, simulation_mode=False):
"""Raise an exception if a tune parameter has a forbidden name."""
forbidden_names = ("grid_size_x", "grid_size_y", "grid_size_z", "time")
for name, param in tune_params.items():
if name in forbidden_names:
raise ValueError("Tune parameter " + name + " with value " + str(param) + " has a forbidden name!")
if any("nvml_" in param for param in tune_params):
if not observers or not any(isinstance(obs, NVMLObserver) for obs in observers):
if not simulation_mode and (not observers or not any(isinstance(obs, NVMLObserver) for obs in observers)):
raise ValueError("Tune parameters starting with nvml_ require an NVMLObserver!")


Expand Down Expand Up @@ -1065,6 +1065,7 @@ def process_cache(cache, kernel_options, tuning_options, runner):
if all(key in tuning_options.tune_params for key in cached_data["tune_params_keys"]):
raise ValueError(
f"All tunable parameters are present, but the order is wrong. \
This is not possible because the order must be preserved to lookup the correct configuration in the cache. \
Cache has order: {cached_data['tune_params_keys']}, tuning_options has: {list(tuning_options.tune_params.keys())}"
)
raise ValueError(
Expand Down

0 comments on commit a39d5ae

Please sign in to comment.