diff --git a/src/neps/api.py b/src/neps/api.py index 6eb97bd9..4423d323 100644 --- a/src/neps/api.py +++ b/src/neps/api.py @@ -4,11 +4,13 @@ from __future__ import annotations import logging +import os import warnings from pathlib import Path from typing import Callable import ConfigSpace as CS +import yaml from typing_extensions import Literal import metahyper @@ -18,6 +20,7 @@ from .plot.tensorboard_eval import tblogger from .search_spaces.parameter import Parameter from .search_spaces.search_space import SearchSpace, pipeline_space_from_configspace +from .utils.common import get_searcher_data from .utils.result_utils import get_loss @@ -114,10 +117,11 @@ def run( "bayesian_optimization", "random_search", "hyperband", - "hyperband_custom_default", + "priorband", "mobster", ] | BaseOptimizer = "default", + searcher_path: str | None = None, **searcher_kwargs, ) -> None: """Run a neural pipeline search. @@ -155,6 +159,8 @@ def run( cost_value_on_error: Setting this and loss_value_on_error to any float will supress any error and will use given cost value instead. default: None searcher: Which optimizer to use. This is usually only needed by neps developers. + searcher_path: The path to the user created searcher. None when the user + is using NePS designed searchers. **searcher_kwargs: Will be passed to the searcher. This is usually only needed by neps develolpers. @@ -162,6 +168,7 @@ def run( ValueError: If deprecated argument working_directory is used. ValueError: If root_directory is None. TypeError: If pipeline_space has invalid type. + FileNotFoundError: If the specified searcher is not found. Example: @@ -220,31 +227,88 @@ def run( message = f"The pipeline_space has invalid type: {type(pipeline_space)}" raise TypeError(message) from e - if searcher == "default" or searcher is None: - if pipeline_space.has_fidelity: - searcher = "hyperband" - if hasattr(pipeline_space, "has_prior") and pipeline_space.has_prior: - searcher = "hyperband_custom_default" + user_defined_searcher = False + + if searcher_path is not None: + # The user has their own custom searcher. + user_yaml_path = os.path.join(searcher_path, f"{searcher}.yaml") + + if not os.path.exists(user_yaml_path): + raise FileNotFoundError( + f"File '{searcher}.yaml' does not exist in {user_yaml_path}." + ) + + with open(user_yaml_path) as config_file: + config = yaml.safe_load(config_file) + user_defined_searcher = True + + logging.info("Preparing to run user created searcher") + else: + if searcher in ["default", None]: + # NePS decides the searcher. + if pipeline_space.has_prior: + searcher = "priorband" if pipeline_space.has_fidelity else "pibo" + elif pipeline_space.has_fidelity: + searcher = "hyperband" + else: + searcher = "bayesian_optimization" else: - searcher = "bayesian_optimization" - logger.info(f"Running {searcher} as the searcher") + # User specifies one of NePS searchers. + user_defined_searcher = True + + config = get_searcher_data(searcher) + + searcher_alg = config["searcher_init"]["algorithm"] + searcher_config = config["searcher_kwargs"] + + logger.info(f"Running {searcher} as the searcher") + logger.info(f"Algorithm: {searcher_alg}") + + # Updating searcher arguments from searcher_kwargs + if user_defined_searcher: + for key, value in searcher_kwargs.items(): + if key not in searcher_config: + searcher_config[key] = value + logger.info( + f"Updating the current searcher argument '{key}'" + f" with the value '{value}'" + ) + elif searcher_config[key] != value: + searcher_config[key] = value + logger.info( + f"Updating the current searcher argument '{key}'" + f" with the value '{value}'" + ) + else: + logger.info( + f"The searcher argument '{key}' has the same" + f" value '{value}' as default. No update" + ) + elif searcher_kwargs and not user_defined_searcher: + # No searcher argument updates when NePS decides the searcher. + logger.info( + "No updates were made to the searcher arguments due to" + " NePS deciding the searcher." + ) - searcher_kwargs.update( + searcher_config.update( { "loss_value_on_error": loss_value_on_error, "cost_value_on_error": cost_value_on_error, "ignore_errors": ignore_errors, } ) - searcher = instance_from_map(SearcherMapping, searcher, "searcher", as_class=True)( + searcher_instance = instance_from_map( + SearcherMapping, searcher_alg, "searcher", as_class=True + )( pipeline_space=pipeline_space, budget=max_cost_total, # TODO: use max_cost_total everywhere - **searcher_kwargs, + **searcher_config, ) metahyper.run( run_pipeline, - searcher, + searcher_instance, root_directory, development_stage_id=development_stage_id, task_id=task_id, diff --git a/src/neps/optimizers/README.md b/src/neps/optimizers/README.md new file mode 100644 index 00000000..95395829 --- /dev/null +++ b/src/neps/optimizers/README.md @@ -0,0 +1,103 @@ +## Optimizer Configuration Options + +Before running the optimizer for your AutoML tasks, you have several configuration options to tailor the optimization process to your specific needs. These options allow you to customize the optimizer's behavior according to your preferences and requirements. + +### 1. Automatic Optimizer Selection + +If you prefer not to specify a particular optimizer for your AutoML task, you can simply pass `default` or `None` for the neps searcher. NePS will automatically choose the best optimizer based on the characteristics of your search space. This provides a hassle-free way to get started quickly. + +The optimizer selection is based on the following characteristics of your search space: + +- If it has fidelity: hyperband +- If it has a prior: pibo +- If it has both fidelity and a prior: priorband +- If it has neither: bayesian_optimization + +### 2. Choosing one of NePS Optimizers + +We have also prepared some optimizers with specific hyperparameters that we believe can generalize well to most AutoML tasks and use cases. For more details on the available default optimizers and the algorithms that can be called, please refer to the next section on SearcherConfigs. + +### 3. Custom Optimizer Configuration via YAML + +For users who want more control over the optimizer's hyperparameters, you can create your own YAML configuration file. In this file, you can specify the hyperparameters for your preferred optimizer. To use this custom configuration, provide the path to your YAML file using the `searcher_path` parameter when running the optimizer. The library will then load your custom settings and use them for optimization. + +Here's the format of the YAML configuration using `Bayesian Optimization` as an example: + +```yaml +searcher_init: + algorithm: bayesian_optimization +searcher_kwargs: # Specific arguments depending on the searcher + initial_design_size: 5 + surrogate_model: gp_hierarchy # or {"gp_hierarchy", "deep_gp"} + acquisition: EI # or {"LogEI", "AEI", "MFEI"} + log_prior_weighted: false + acquisition_sampler: random # or {"mutation", "evolution", "freeze-thaw"} + random_interleave_prob: 0.0 + disable_priors: false + prior_confidence: high + sample_default_first: true +``` + +### 4. Hyperparameter Overrides + +If you want to make on-the-fly adjustments to the optimizer's hyperparameters without modifying the YAML configuration file, you can do so by passing keyword arguments (kwargs) to the neps.run function itself. This enables you to fine-tune specific hyperparameters without the need for YAML file updates. Any hyperparameter values provided as kwargs will take precedence over those specified in the YAML configuration. + +### Note for Contributors + +When designing a new optimizer, it's essential to create a YAML configuration file in the `default_searcher` folder under `neps.src.optimizers`. This YAML file should contain the default configuration settings that you believe should be used when the user chooses the searcher. + +Even when many hyperparameters might be set to their default values as specified in the code, it is still considered good practice to include them in the YAML file. This is because the `SearcherConfigs` method relies on the arguments from the YAML file to display the optimizer's configuration to the user. + +## Searcher Configurations + +The `SearcherConfigs` class provides a set of useful functions to manage and retrieve default configuration details for NePS optimizers. These functions can help you understand and interact with the available searchers and their associated algorithms and configurations. + +### Importing `SearcherConfigs` + +Before you can use the `SearcherConfigs` class to manage and retrieve default configuration details for NePS optimizers, make sure to import it into your Python script. You can do this with the following import statement: + +```python +from neps.optimizers.info import SearcherConfigs +``` + +Once you have imported the class, you can proceed to use its functions to explore the available searchers, algorithms, and configuration details. + +### List Available Searchers + +To list all the available searchers that can be used in NePS runs, you can use the `get_searchers` function. It provides you with a list of searcher names: + +```python +searchers = SearcherConfigs.get_searchers() +print("Available searchers:", searchers) +``` + +### List Available Searching Algorithms + +The `get_available_algorithms` function helps you discover the searching algorithms available within the NePS searchers: + +```python +algorithms = SearcherConfigs.get_available_algorithms() +print("Available searching algorithms:", algorithms) +``` + +### Find Searchers Using a Specific Algorithm + +If you want to identify which NePS searchers are using a specific searching algorithm (e.g., Bayesian Optimization, Hyperband, PriorBand...), you can use the `get_searcher_from_alg` function. It returns a list of searchers utilizing the specified algorithm: + +```python +algorithm = "bayesian_optimization" # Replace with the desired algorithm +searchers = SearcherConfigs.get_searcher_from_alg(algorithm) +print(f"Searchers using {algorithm}:", searchers) +``` + +### Retrieve Searcher Configuration Details + +To access the configuration details of a specific searcher, you can use the `get_searcher_kwargs` function. Provide the name of the searcher you are interested in, and it will return the searcher's configuration: + +```python +searcher_name = "pibo" # Replace with the desired NePS searcher name +searcher_kwargs = SearcherConfigs.get_searcher_kwargs(searcher_name) +print(f"Configuration of {searcher_name}:", searcher_kwargs) +``` + +These functions empower you to explore and manage the available NePS searchers and their configurations effectively. diff --git a/src/neps/optimizers/default_searchers/asha.yaml b/src/neps/optimizers/default_searchers/asha.yaml new file mode 100644 index 00000000..2b683199 --- /dev/null +++ b/src/neps/optimizers/default_searchers/asha.yaml @@ -0,0 +1,18 @@ +searcher_init: + algorithm: asha +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + early_stopping_rate: 0 + initial_design_type: max_budget # or {"unique_configs"} + use_priors: false + random_interleave_prob: 0.0 + sample_default_first: false + sample_default_at_target: false + + # Arguments that can not be modified by the user + # sampling_policy: RandomUniformPolicy + # promotion_policy: AsyncPromotionPolicy + + # Other arguments + # ignore_errors: false diff --git a/src/neps/optimizers/default_searchers/asha_prior.yaml b/src/neps/optimizers/default_searchers/asha_prior.yaml new file mode 100644 index 00000000..9907fede --- /dev/null +++ b/src/neps/optimizers/default_searchers/asha_prior.yaml @@ -0,0 +1,18 @@ +searcher_init: + algorithm: asha_prior +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + early_stopping_rate: 0 + initial_design_type: max_budget # or {"unique_configs"} + prior_confidence: medium # or {"low", "high"} + random_interleave_prob: 0.0 + sample_default_first: true + sample_default_at_target: false + + # Arguments that can not be modified by the user + # sampling_policy: FixedPriorPolicy + # promotion_policy: AsyncPromotionPolicy + + # Other arguments + # ignore_errors: false diff --git a/src/neps/optimizers/default_searchers/bayesian_optimization.yaml b/src/neps/optimizers/default_searchers/bayesian_optimization.yaml new file mode 100644 index 00000000..08db03ba --- /dev/null +++ b/src/neps/optimizers/default_searchers/bayesian_optimization.yaml @@ -0,0 +1,19 @@ +searcher_init: + algorithm: bayesian_optimization +searcher_kwargs: + # Arguments that can be modified by the user + initial_design_size: 10 + surrogate_model: gp # or {"gp_hierarchy", "deep_gp"} + acquisition: EI # or {"LogEI", "AEI", "MFEI"} + log_prior_weighted: false + acquisition_sampler: mutation # or {"random", "evolution", "freeze-thaw"} + random_interleave_prob: 0.0 + disable_priors: true + sample_default_first: false + + # Other arguments: + # surrogate_model_args: None # type: dict + # optimal_assignment: false # type: bool + # domain_se_kernel: None # type: str + # graph_kernels: None # type: list + # hp_kernels: None # type: list diff --git a/src/neps/optimizers/default_searchers/hyperband.yaml b/src/neps/optimizers/default_searchers/hyperband.yaml new file mode 100644 index 00000000..f7f394b7 --- /dev/null +++ b/src/neps/optimizers/default_searchers/hyperband.yaml @@ -0,0 +1,14 @@ +searcher_init: + algorithm: hyperband +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + initial_design_type: max_budget # or {"unique_configs"} + use_priors: false + random_interleave_prob: 0.0 + sample_default_first: false + sample_default_at_target: false + + # Arguments that can not be modified by the user + # sampling_policy: RandomUniformPolicy + # promotion_policy: AsyncPromotionPolicy diff --git a/src/neps/optimizers/default_searchers/mf_ei_bo.yaml b/src/neps/optimizers/default_searchers/mf_ei_bo.yaml new file mode 100644 index 00000000..1176df3d --- /dev/null +++ b/src/neps/optimizers/default_searchers/mf_ei_bo.yaml @@ -0,0 +1,26 @@ +searcher_init: + algorithm: mf_ei_bo +searcher_kwargs: + # Arguments that can be modified by the user + step_size: 1 + use_priors: false + sample_default_first: false + sample_default_at_target: false + + # Arguments for model + surrogate_model: deep_gp # or {"gp", "gp_hierarchy"} + acquisition: MFEI # or {"LogEI", "AEI", "EI"} + acquisition_sampler: freeze-thaw # or {"random", "evolution", "mutation"} + initial_design_fraction: 0.75 + initial_design_size: 10 + + # Arguments that can not be modified by the user + # model_policy: MFEIDeepModel + + # Other arguments + # surrogate_model_args: None # type: dict + # optimal_assignment: false # type: bool + # domain_se_kernel: None # type: str + # graph_kernels: None # type: list + # hp_kernels: None # type: list + # initial_design_budget: None # type: int diff --git a/src/neps/optimizers/default_searchers/mobster.yaml b/src/neps/optimizers/default_searchers/mobster.yaml new file mode 100644 index 00000000..29c09358 --- /dev/null +++ b/src/neps/optimizers/default_searchers/mobster.yaml @@ -0,0 +1,27 @@ +searcher_init: + algorithm: mobster +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + initial_design_type: max_budget # or {"unique_configs"} + use_priors: false + random_interleave_prob: 0.0 + sample_default_first: false + sample_default_at_target: false + + # arguments for model + surrogate_model: gp # or {"gp_hierarchy", "deep_gp"} + acquisition: EI # or {"LogEI", "AEI", "MFEI"} + log_prior_weighted: false + acquisition_sampler: mutation # or {"random", "evolution", "freeze-thaw"} + + # Arguments that can not be modified by the user + # sampling_policy: RandomUniformPolicy + # promotion_policy: AsyncPromotionPolicy + # model_policy: ModelPolicy + + # Other arguments + # surrogate_model_args: None # type: dict + # domain_se_kernel: None # type: str + # graph_kernels: None # type: list + # hp_kernels: None # type: list diff --git a/src/neps/optimizers/default_searchers/pibo.yaml b/src/neps/optimizers/default_searchers/pibo.yaml new file mode 100644 index 00000000..5dc10c1f --- /dev/null +++ b/src/neps/optimizers/default_searchers/pibo.yaml @@ -0,0 +1,20 @@ +searcher_init: + algorithm: bayesian_optimization +searcher_kwargs: + # Arguments that can be modified by the user + initial_design_size: 10 + surrogate_model: gp # or {"gp_hierarchy", "deep_gp"} + acquisition: EI # or {"LogEI", "AEI", "MFEI"} + log_prior_weighted: false + acquisition_sampler: mutation # or {"random", "evolution", "freeze-thaw"} + random_interleave_prob: 0.0 + disable_priors: false + prior_confidence: medium # or {"low", "high"} + sample_default_first: false + + # Other arguments: + # surrogate_model_args: None # type: dict + # optimal_assignment: false # type: bool + # domain_se_kernel: None # type: str + # graph_kernels: None # type: list + # hp_kernels: None # type: list diff --git a/src/neps/optimizers/default_searchers/priorband.yaml b/src/neps/optimizers/default_searchers/priorband.yaml new file mode 100644 index 00000000..9cc07f9e --- /dev/null +++ b/src/neps/optimizers/default_searchers/priorband.yaml @@ -0,0 +1,22 @@ +searcher_init: + algorithm: priorband +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + initial_design_type: max_budget # or {"unique_configs"} + prior_confidence: medium # or {"low", "high"} + random_interleave_prob: 0.0 + sample_default_first: true + sample_default_at_target: false + prior_weight_type: geometric # or {"linear", "50-50"} + inc_sample_type: mutation # or {"crossover", "gaussian", "hypersphere"} + inc_mutation_rate: 0.5 + inc_mutation_std: 0.25 + inc_style: dynamic # or {"decay", "constant"} + + # arguments for model + model_based: false # crucial argument to set to allow model-search + + # Arguments that can not be modified by the user + # sampling_policy: EnsemblePolicy + # promotion_policy: SyncPromotionPolicy diff --git a/src/neps/optimizers/default_searchers/priorband_bo.yaml b/src/neps/optimizers/default_searchers/priorband_bo.yaml new file mode 100644 index 00000000..9991b8f8 --- /dev/null +++ b/src/neps/optimizers/default_searchers/priorband_bo.yaml @@ -0,0 +1,35 @@ +searcher_init: + algorithm: priorband +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + initial_design_type: max_budget # or {"unique_configs"} + prior_confidence: medium # or {"low", "high"} + random_interleave_prob: 0.0 + sample_default_first: true + sample_default_at_target: false + prior_weight_type: geometric # or {"linear", "50-50"} + inc_sample_type: mutation # or {"crossover", "gaussian", "hypersphere"} + inc_mutation_rate: 0.5 + inc_mutation_std: 0.25 + inc_style: dynamic # or {"decay", "constant"} + + # arguments for model + model_based: true # crucial argument to set to allow model-search + modelling_type: joint # or {"rung"} + initial_design_size: 10 + surrogate_model: gp # or {"gp_hierarchy", "deep_gp"} + acquisition: EI # or {"LogEI", "AEI", "MFEI"} + log_prior_weighted: false + acquisition_sampler: mutation # or {"random", "evolution", "freeze-thaw"} + + # Arguments that can not be modified by the user + # sampling_policy: EnsemblePolicy + # promotion_policy: SyncPromotionPolicy + # model_policy: ModelPolicy + + # Other arguments + # surrogate_model_args: None # type: dict + # domain_se_kernel: None # type: str + # graph_kernels: None # type: list + # hp_kernels: None # type: list diff --git a/src/neps/optimizers/default_searchers/random_search.yaml b/src/neps/optimizers/default_searchers/random_search.yaml new file mode 100644 index 00000000..745aeef5 --- /dev/null +++ b/src/neps/optimizers/default_searchers/random_search.yaml @@ -0,0 +1,6 @@ +searcher_init: + algorithm: random_search +searcher_kwargs: + # Arguments that can be modified by the user + use_priors: false + ignore_fidelity: true diff --git a/src/neps/optimizers/default_searchers/regularized_evolution.yaml b/src/neps/optimizers/default_searchers/regularized_evolution.yaml new file mode 100644 index 00000000..efb62a6a --- /dev/null +++ b/src/neps/optimizers/default_searchers/regularized_evolution.yaml @@ -0,0 +1,11 @@ +searcher_init: + algorithm: regularized_evolution +searcher_kwargs: + # Arguments that can be modified by the user + population_size: 30 + sample_size: 10 + assisted: false + + # Other arguments + # assisted_zero_cost_proxy: None # type: Callable + # assisted_init_population_dir: None # type: str | Path diff --git a/src/neps/optimizers/default_searchers/successive_halving.yaml b/src/neps/optimizers/default_searchers/successive_halving.yaml new file mode 100644 index 00000000..c352fa84 --- /dev/null +++ b/src/neps/optimizers/default_searchers/successive_halving.yaml @@ -0,0 +1,15 @@ +searcher_init: + algorithm: successive_halving +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + early_stopping_rate: 0 + initial_design_type: max_budget # or {"unique_configs"} + use_priors: false + random_interleave_prob: 0.0 + sample_default_first: false + sample_default_at_target: false + + # Arguments that can not be modified by the user + # sampling_policy: RandomUniformPolicy + # promotion_policy: SyncPromotionPolicy diff --git a/src/neps/optimizers/default_searchers/successive_halving_prior.yaml b/src/neps/optimizers/default_searchers/successive_halving_prior.yaml new file mode 100644 index 00000000..7c468d92 --- /dev/null +++ b/src/neps/optimizers/default_searchers/successive_halving_prior.yaml @@ -0,0 +1,15 @@ +searcher_init: + algorithm: successive_halving_prior +searcher_kwargs: + # Arguments that can be modified by the user + eta: 3 + early_stopping_rate: 0 + initial_design_type: max_budget # or {"unique_configs"} + prior_confidence: medium # or {"low", "high"} + random_interleave_prob: 0.0 + sample_default_first: false + sample_default_at_target: false + + # Arguments that can not be modified by the user + # sampling_policy: FixedPriorPolicy + # promotion_policy: SyncPromotionPolicy diff --git a/src/neps/optimizers/info.py b/src/neps/optimizers/info.py new file mode 100644 index 00000000..04f3f6b7 --- /dev/null +++ b/src/neps/optimizers/info.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +import os + +import yaml + + +class SearcherConfigs: + """ + This class provides methods to access default configuration details + for NePS optimizers. + """ + + @staticmethod + def _get_searchers_folder_path() -> str: + """ + Helper method to get the folder path for default searchers. + + Returns: + str: The absolute path to the default searchers folder. + """ + script_directory = os.path.dirname(os.path.abspath(__file__)) + return os.path.join(script_directory, "default_searchers") + + @staticmethod + def get_searchers() -> list[str]: + """ + List all the searcher names that can be used in neps run. + + Returns: + list[str]: A list of searcher names. + """ + folder_path = SearcherConfigs._get_searchers_folder_path() + searchers = [] + + for file_name in os.listdir(folder_path): + if file_name.endswith(".yaml"): + searcher_name = os.path.splitext(file_name)[0] + searchers.append(searcher_name) + + return searchers + + @staticmethod + def get_available_algorithms() -> list[str]: + """ + List all available algorithms used by NePS searchers. + + Returns: + list[str]: A list of algorithm names. + """ + folder_path = SearcherConfigs._get_searchers_folder_path() + prev_algorithms = set() + + for filename in os.listdir(folder_path): + if filename.endswith(".yaml"): + file_path = os.path.join(folder_path, filename) + with open(file_path) as file: + searcher_config = yaml.safe_load(file) + algorithm = searcher_config["searcher_init"].get("algorithm") + if algorithm: + prev_algorithms.add(algorithm) + + return list(prev_algorithms) + + @staticmethod + def get_searcher_from_alg(algorithm: str) -> list[str]: + """ + Get all NePS searchers that use a specific searching algorithm. + + Args: + algorithm (str): The name of the algorithm needed for the search. + + Returns: + list[str]: A list of searcher names using the specified algorithm. + """ + folder_path = SearcherConfigs._get_searchers_folder_path() + searchers = [] + + for filename in os.listdir(folder_path): + if filename.endswith(".yaml"): + file_path = os.path.join(folder_path, filename) + with open(file_path) as file: + searcher_config = yaml.safe_load(file) + if searcher_config["searcher_init"].get("algorithm") == algorithm: + searchers.append(os.path.splitext(filename)[0]) + + return searchers + + @staticmethod + def get_searcher_kwargs(searcher: str) -> str: + """ + Get the kwargs and algorithm setup for a specific searcher. + + Args: + searcher (str): The name of the searcher to check the details of. + + Returns: + str: The raw content of the searcher's configuration + """ + folder_path = SearcherConfigs._get_searchers_folder_path() + + for filename in os.listdir(folder_path): + if filename.endswith(".yaml") and filename.startswith(searcher): + file_path = os.path.join(folder_path, filename) + with open(file_path) as file: + searcher_config = file.read() + + return searcher_config diff --git a/src/neps/utils/common.py b/src/neps/utils/common.py index 3ef35095..6fe5fd9e 100644 --- a/src/neps/utils/common.py +++ b/src/neps/utils/common.py @@ -1,9 +1,35 @@ from __future__ import annotations +import os import random import numpy as np import torch +import yaml + + +def get_searcher_data(searcher: str) -> str: + """ + Returns the data from the YAML file associated with the specified searcher. + + Args: + searcher (str): The name of the searcher. + + Returns: + str: The content of the YAML file. + """ + folder_path = "optimizers/default_searchers" + script_directory = os.path.dirname(os.path.abspath(__file__)) + parent_directory = os.path.join(script_directory, os.pardir) + resource_path = os.path.join(parent_directory, folder_path, f"{searcher}.yaml") + + if not os.path.exists(resource_path): + raise FileNotFoundError(f"Searcher '{searcher}' does not exit.") + + with open(resource_path, "rb") as file: + data = yaml.safe_load(file) + + return data def has_instance(collection, *types):