Skip to content

Commit

Permalink
fixed numerical instabilities in stateless
Browse files Browse the repository at this point in the history
  • Loading branch information
ErikOrm committed Dec 18, 2023
1 parent f480695 commit f103d01
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 6 deletions.
6 changes: 3 additions & 3 deletions hypermapper/bo/bo.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def main(settings, black_box_function=None):
)
if default_doe_parameter_array.shape[0] > 0:
if settings["hypermapper_mode"]["mode"] == "stateless":
return default_doe_parameter_array.numpy(), param_space.parameter_names
return param_space.convert(default_doe_parameter_array, "internal", "original"), param_space.parameter_names
else:
default_doe_data_array = param_space.run_configurations(
default_doe_parameter_array,
Expand Down Expand Up @@ -157,7 +157,7 @@ def main(settings, black_box_function=None):
allow_repetitions=False,
)
if settings["hypermapper_mode"]["mode"] == "stateless":
return tmp_parameter_array.numpy(), param_space.parameter_names
return param_space.convert(tmp_parameter_array, "internal", "original"), param_space.parameter_names
else:
tmp_data_array = param_space.run_configurations(
tmp_parameter_array, beginning_of_time, settings, black_box_function
Expand Down Expand Up @@ -319,7 +319,7 @@ def main(settings, black_box_function=None):
##################
black_box_function_t0 = time.time()
if settings["hypermapper_mode"]["mode"] == "stateless":
return best_configurations.numpy(), param_space.parameter_names
return param_space.convert(best_configurations, "internal", "original"), param_space.parameter_names
else:
new_data_array = param_space.run_configurations(
best_configurations,
Expand Down
17 changes: 14 additions & 3 deletions hypermapper/param/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,8 @@ def convert(
return (intermediate_value - self.get_min()) / (
self.get_max() - self.get_min()
)
elif to_type == "original" and isinstance(intermediate_value, torch.Tensor):
return intermediate_value.item()
else:
return intermediate_value

Expand Down Expand Up @@ -376,6 +378,8 @@ def convert(
return (intermediate_value - self.get_min()) / (
self.get_max() - self.get_min()
)
elif to_type == "original" and isinstance(intermediate_value, torch.Tensor):
return intermediate_value.item()
else:
return intermediate_value

Expand Down Expand Up @@ -524,12 +528,13 @@ def convert(
else:
intermediate_value = input_value

if to_type == "string":
if to_type in ["string", "original"]:
# this is a correction for numerical errora
if (
self.int_ordinal
and abs(intermediate_value - np.round(intermediate_value)) < 1e-6
):
return f"{int(np.round(intermediate_value))}"
corrected_output = int(np.round(intermediate_value))
else:
closest_value = min(
self._val_indices.keys(), key=lambda x: abs(x - intermediate_value)
Expand All @@ -539,7 +544,13 @@ def convert(
self._val_indices.keys(),
key=lambda x: abs(x - intermediate_value),
)
return f"{intermediate_value}"
corrected_output = intermediate_value
if to_type == "string":
return f"{corrected_output}"
else:
if isinstance(corrected_output, torch.Tensor):
corrected_output = corrected_output.item()
return corrected_output
elif to_type == "01":
return self.values.index(intermediate_value) / (self.get_size() - 1)
else:
Expand Down

0 comments on commit f103d01

Please sign in to comment.