Skip to content

Commit

Permalink
Merge branch 'develop' into uri/split_initial_point_generation
Browse files Browse the repository at this point in the history
  • Loading branch information
uri-granta authored Jan 22, 2024
2 parents 8cf13c0 + c0fab52 commit 9a37d83
Show file tree
Hide file tree
Showing 38 changed files with 220 additions and 148 deletions.
1 change: 0 additions & 1 deletion .github/workflows/develop-checks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ on:
workflow_dispatch:
push:
branches: develop
tags: v[0-9]+.[0-9]+.[0-9]+*

jobs:
slowtests:
Expand Down
114 changes: 114 additions & 0 deletions .github/workflows/release-checks.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
# Copyright 2024 The Trieste Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

name: Release checks

on:
workflow_dispatch:
push:
tags: v[0-9]+.[0-9]+.[0-9]+*

jobs:
types:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.10"
- run: pip install tox
- run: tox -e types

types_old:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.7"
- run: pip install tox
- run: tox -e types_old

format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.10"
- run: pip install tox
- run: tox -e format

tests:
runs-on: ubuntu-latest
strategy:
matrix:
part: [ "1", "2", "3", "4" ]
name: tests (part${{ matrix.part }})
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.10"
- run: pip install tox
- run: tox -e tests_${{ matrix.part }}

tests_old:
runs-on: ubuntu-latest
strategy:
matrix:
part: [ "1", "2", "3", "4" ]
name: tests_old (part${{ matrix.part }})
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.7"
- run: pip install tox
- run: tox -e tests_old_${{ matrix.part }}

slowtests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.10"
- run: pip install tox
- run: tox -e tests -- --runslow only

slowtests_old:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.7"
- run: pip install tox
- run: tox -e tests_old -- --runslow only

fulldocs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.10"
- run: pip install tox
- run: |
TEMP_DEB="$(mktemp)" &&
wget -O "$TEMP_DEB" 'https://github.com/jgm/pandoc/releases/download/2.10.1/pandoc-2.10.1-1-amd64.deb' &&
sudo dpkg -i "$TEMP_DEB"
rm -f "$TEMP_DEB"
- run: tox -e docs
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

[![PyPI](https://img.shields.io/pypi/v/trieste.svg)](https://pypi.org/project/trieste)
[![License](https://img.shields.io/badge/license-Apache-green.svg)](LICENSE)
[![Quality checks](https://github.com/secondmind-labs/trieste/actions/workflows/develop-checks.yaml/badge.svg)](https://github.com/secondmind-labs/trieste/actions?query=workflows%3Adevelop-checks)
[![Docs](https://github.com/secondmind-labs/trieste/actions/workflows/deploy.yaml/badge.svg)](https://github.com/secondmind-labs/trieste/actions/workflows/deploy.yaml)
[![Release](https://img.shields.io/github/actions/workflow/status/secondmind-labs/trieste/release-checks.yaml?logo=github&label=release%20checks)](https://github.com/secondmind-labs/trieste/actions/workflows/release-checks.yaml)
[![Develop](https://img.shields.io/github/actions/workflow/status/secondmind-labs/trieste/develop-checks.yaml?logo=github&label=develop%20checks)](https://github.com/secondmind-labs/trieste/actions/workflows/develop-checks.yaml)
[![Codecov](https://img.shields.io/codecov/c/github/secondmind-labs/trieste/coverage.svg?branch=develop)](https://app.codecov.io/github/secondmind-labs/trieste/tree/develop)
[![Slack Status](https://img.shields.io/badge/slack-trieste-green.svg?logo=Slack)](https://join.slack.com/t/secondmind-labs/shared_invite/zt-ph07nuie-gMlkle__tjvXBay4FNSLkw)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import trieste
from trieste.acquisition.function import BayesianActiveLearningByDisagreement
from trieste.acquisition.rule import OBJECTIVE
from trieste.models.gpflow.models import VariationalGaussianProcess
from trieste.objectives.utils import mk_observer

np.random.seed(1793)
Expand Down
2 changes: 1 addition & 1 deletion docs/notebooks/asynchronous_greedy_multiprocessing.pct.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def terminate_processes(processes):
f"Process {pid}: Main : received data {new_data}",
flush=True,
)
except:
except Exception:
continue

# new_data is a tuple of (point, observation value)
Expand Down
4 changes: 0 additions & 4 deletions docs/notebooks/deep_gaussian_processes.pct.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
# The Michalewicz functions are highly non-stationary and have a global minimum that's hard to find, so DGPs might be more suitable than standard GPs, which may struggle because they typically have stationary kernels that cannot easily model non-stationarities.

# %%
import gpflow
from trieste.objectives import Michalewicz2, Michalewicz5
from trieste.objectives.utils import mk_observer
from trieste.experimental.plotting import plot_function_plotly
Expand Down Expand Up @@ -167,9 +166,6 @@ def build_dgp_model(data, search_space):
# We now compare to a GP model with priors over the hyperparameters. We do not expect this to do as well because GP models cannot deal with non-stationary functions well.

# %%
import gpflow
import tensorflow_probability as tfp

from trieste.models.gpflow import GaussianProcessRegression, build_gpr

gpflow_model = build_gpr(initial_data, search_space, likelihood_variance=1e-7)
Expand Down
1 change: 0 additions & 1 deletion docs/notebooks/explicit_constraints.pct.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,6 @@ def constraint(input_data):
def plot_bo_results():
dataset = result.try_get_final_dataset()
query_points = dataset.query_points.numpy()
observations = dataset.observations.numpy()

_, ax = plot_function_2d(
ScaledBranin.objective,
Expand Down
8 changes: 4 additions & 4 deletions docs/notebooks/multifidelity_modelling.pct.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,13 +260,13 @@ def __call__(self, x, add_noise=True):

# Plot gpr results
mean, var = gpr_predictions
ax.plot(X, mean, label=f"GPR", color="tab:blue")
ax.plot(X, mean, label="GPR", color="tab:blue")
ax.plot(X, mean + 1.96 * tf.math.sqrt(var), alpha=0.2, color="tab:blue")
ax.plot(X, mean - 1.96 * tf.math.sqrt(var), alpha=0.2, color="tab:blue")

# Plot gpr results
mean, var = multifidelity_predictions
ax.plot(X, mean, label=f"MultifidelityAutoregressive", color="tab:orange")
ax.plot(X, mean, label="MultifidelityAutoregressive", color="tab:orange")
ax.plot(X, mean + 1.96 * tf.math.sqrt(var), alpha=0.2, color="tab:orange")
ax.plot(X, mean - 1.96 * tf.math.sqrt(var), alpha=0.2, color="tab:orange")

Expand All @@ -275,13 +275,13 @@ def __call__(self, x, add_noise=True):
ax.plot(
X,
observer(X_for_multifid, add_noise=False).observations,
label=f"True function",
label="True function",
color="tab:green",
)

# Scatter the data
ax.scatter(
hf_data.query_points, hf_data.observations, label=f"Data", color="tab:green"
hf_data.query_points, hf_data.observations, label="Data", color="tab:green"
)
plt.legend()
plt.show()
Expand Down
4 changes: 4 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,12 @@ disable = [
"invalid-unary-operand-type",
"line-too-long",
"locally-disabled",
"missing-class-docstring",
"missing-module-docstring",
"no-else-return",
"no-self-use",
"no-value-for-parameter",
"protected-access",
"redefined-builtin",
"redundant-keyword-arg",
"suppressed-message",
Expand All @@ -98,10 +100,12 @@ disable = [
"too-many-instance-attributes",
"too-many-public-methods",
"too-many-arguments",
"too-many-branches",
"too-many-locals",
"too-many-statements",
"too-many-boolean-expressions",
"too-many-nested-blocks",
"typevar-name-incorrect-variance",
"unexpected-keyword-arg",
"unused-argument",
"unsubscriptable-object",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def test_multifidelity_nonlinear_autoregressive_results_better_than_linear() ->
build_multifidelity_autoregressive_models(initial_data, n_fidelities, input_search_space)
)

mses = list()
mses = []
for model in [nonlinear_model, linear_model]:
model.update(initial_data)
model.optimize(initial_data)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def _build_nested_multifidelity_dataset(
) -> Dataset:
num_fidelities = problem.num_fidelities
initial_sample_sizes = [10 + 2 * (num_fidelities - i) for i in range(num_fidelities)]
fidelity_samples = list()
fidelity_samples = []
lowest_fidelity_sample = problem.search_space.sample(initial_sample_sizes[0])
lowest_fidelity_sample = add_fidelity_column(lowest_fidelity_sample, 0)
fidelity_samples.append(lowest_fidelity_sample)
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/acquisition/function/test_active_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ def test_integrated_variance_reduction_builder_updates_without_retracing() -> No
],
)
def test_bayesian_active_learning_by_disagreement_is_correct(at: tf.Tensor) -> None:
""" "
"""
We perform an MC check as in Section 5 of Houlsby 2011 paper. We check only the
2nd, more complicated term.
"""
Expand Down
3 changes: 2 additions & 1 deletion trieste/acquisition/combination.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ def __init__(self, *builders: AcquisitionFunctionBuilder[ProbabilisticModelType]

def __repr__(self) -> str:
""""""
return "{}({})".format(self.__class__.__name__, ", ".join(map(repr, self._acquisitions)))
builders = ", ".join(map(repr, self._acquisitions))
return f"{self.__class__.__name__}({builders})"

def prepare_acquisition_function(
self,
Expand Down
5 changes: 2 additions & 3 deletions trieste/acquisition/function/active_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def prepare_acquisition_function(
if not isinstance(model, SupportsPredictJoint):
raise NotImplementedError(
f"PredictiveVariance only works with models that support "
f"predict_joint; received {model.__repr__()}"
f"predict_joint; received {model!r}"
)

return predictive_variance(model, self._jitter)
Expand Down Expand Up @@ -282,8 +282,7 @@ def prepare_acquisition_function(
"""
if not isinstance(model, FastUpdateModel):
raise NotImplementedError(
f"PredictiveVariance only works with FastUpdateModel models; "
f"received {model.__repr__()}"
f"PredictiveVariance only works with FastUpdateModel models; received {model!r}"
)

return integrated_variance_reduction(model, self._integration_points, self._threshold)
Expand Down
4 changes: 2 additions & 2 deletions trieste/acquisition/function/continuous_thompson_sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def prepare_acquisition_function(
if not isinstance(model, HasTrajectorySampler):
raise ValueError(
f"Thompson sampling from trajectory only supports models with a trajectory_sampler "
f"method; received {model.__repr__()}"
f"method; received {model!r}"
)

self._trajectory_sampler = model.trajectory_sampler()
Expand Down Expand Up @@ -147,7 +147,7 @@ def prepare_acquisition_function(
if not isinstance(model, HasTrajectorySampler):
raise ValueError(
f"Thompson sampling from trajectory only supports models with a trajectory_sampler "
f"method; received {model.__repr__()}"
f"method; received {model!r}"
)

self._trajectory_sampler = model.trajectory_sampler()
Expand Down
6 changes: 1 addition & 5 deletions trieste/acquisition/function/entropy.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,6 @@ class SupportsCovarianceObservationNoise(
):
"""A model that supports both covariance_between_points and get_observation_noise."""

pass


class SupportsCovarianceObservationNoiseTrajectory(
HasTrajectorySampler, SupportsCovarianceObservationNoise, Protocol
Expand Down Expand Up @@ -337,7 +335,7 @@ def prepare_acquisition_function(
if not isinstance(model, SupportsCovarianceObservationNoise):
raise NotImplementedError(
f"GIBBON only works with models that support "
f"covariance_between_points and get_observation_noise; received {model.__repr__()}"
f"covariance_between_points and get_observation_noise; received {model!r}"
)

tf.debugging.Assert(dataset is not None, [tf.constant([])])
Expand Down Expand Up @@ -630,8 +628,6 @@ class SupportsCovarianceWithTopFidelityPredictY(
):
"""A model that is both multifidelity and supports predict_y."""

pass


MUMBOModelType = TypeVar(
"MUMBOModelType", bound=SupportsCovarianceWithTopFidelityPredictY, contravariant=True
Expand Down
Loading

0 comments on commit 9a37d83

Please sign in to comment.