Skip to content

Commit a29abe8

Browse files
authored
Build fixes (rust-ml#333)
* Bump MSRV to 1.70 * Rename examples to avoid name collisions * Fix clippy errors * Remove Result alias to avoid glob export collisions * Revert Criterion import for non windows
1 parent 4e40ce6 commit a29abe8

File tree

17 files changed

+22
-22
lines changed

17 files changed

+22
-22
lines changed

.github/workflows/checking.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ jobs:
1010
fail-fast: false
1111
matrix:
1212
toolchain:
13-
- 1.67.0
13+
- 1.70.0
1414
- stable
1515
- nightly
1616
os:

.github/workflows/codequality.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ jobs:
1010
strategy:
1111
matrix:
1212
toolchain:
13-
- 1.67.0
13+
- 1.70.0
1414
- stable
1515

1616
steps:

.github/workflows/testing.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ jobs:
1010
fail-fast: false
1111
matrix:
1212
toolchain:
13-
- 1.67.0
13+
- 1.70.0
1414
- stable
1515
os:
1616
- ubuntu-latest
@@ -35,7 +35,7 @@ jobs:
3535
fail-fast: false
3636
matrix:
3737
toolchain:
38-
- 1.67.0
38+
- 1.70.0
3939
- stable
4040
os:
4141
- ubuntu-latest

algorithms/linfa-bayes/src/gaussian_nb.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ where
8484
let nclass = xclass.nrows();
8585

8686
// We compute the update of the gaussian mean and variance
87-
let mut class_info = model
87+
let class_info = model
8888
.class_info
8989
.entry(class)
9090
.or_insert_with(GaussianClassInfo::default);

algorithms/linfa-bayes/src/multinomial_nb.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ where
7171
let nclass = xclass.nrows();
7272

7373
// We compute the feature log probabilities and feature counts on the slice corresponding to the current class
74-
let mut class_info = model
74+
let class_info = model
7575
.class_info
7676
.entry(class)
7777
.or_insert_with(MultinomialClassInfo::default);

algorithms/linfa-clustering/src/gaussian_mixture/algorithm.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::gaussian_mixture::errors::{GmmError, Result};
1+
use crate::gaussian_mixture::errors::GmmError;
22
use crate::gaussian_mixture::hyperparams::{
33
GmmCovarType, GmmInitMethod, GmmParams, GmmValidParams,
44
};
@@ -126,7 +126,7 @@ impl<F: Float> GaussianMixtureModel<F> {
126126
hyperparameters: &GmmValidParams<F, R>,
127127
dataset: &DatasetBase<ArrayBase<D, Ix2>, T>,
128128
mut rng: R,
129-
) -> Result<GaussianMixtureModel<F>> {
129+
) -> Result<GaussianMixtureModel<F>, GmmError> {
130130
let observations = dataset.records().view();
131131
let n_samples = observations.nrows();
132132

@@ -216,7 +216,7 @@ impl<F: Float> GaussianMixtureModel<F> {
216216
resp: &Array2<F>,
217217
_covar_type: &GmmCovarType,
218218
reg_covar: F,
219-
) -> Result<(Array1<F>, Array2<F>, Array3<F>)> {
219+
) -> Result<(Array1<F>, Array2<F>, Array3<F>), GmmError> {
220220
let nk = resp.sum_axis(Axis(0));
221221
if nk.min()? < &(F::cast(10.) * F::epsilon()) {
222222
return Err(GmmError::EmptyCluster(format!(
@@ -255,7 +255,7 @@ impl<F: Float> GaussianMixtureModel<F> {
255255

256256
fn compute_precisions_cholesky_full<D: Data<Elem = F>>(
257257
covariances: &ArrayBase<D, Ix3>,
258-
) -> Result<Array3<F>> {
258+
) -> Result<Array3<F>, GmmError> {
259259
let n_clusters = covariances.shape()[0];
260260
let n_features = covariances.shape()[1];
261261
let mut precisions_chol = Array::zeros((n_clusters, n_features, n_features));
@@ -290,7 +290,7 @@ impl<F: Float> GaussianMixtureModel<F> {
290290
fn e_step<D: Data<Elem = F>>(
291291
&self,
292292
observations: &ArrayBase<D, Ix2>,
293-
) -> Result<(F, Array2<F>)> {
293+
) -> Result<(F, Array2<F>), GmmError> {
294294
let (log_prob_norm, log_resp) = self.estimate_log_prob_resp(observations);
295295
let log_mean = log_prob_norm.mean().unwrap();
296296
Ok((log_mean, log_resp))
@@ -301,7 +301,7 @@ impl<F: Float> GaussianMixtureModel<F> {
301301
reg_covar: F,
302302
observations: &ArrayBase<D, Ix2>,
303303
log_resp: &Array2<F>,
304-
) -> Result<()> {
304+
) -> Result<(), GmmError> {
305305
let n_samples = observations.nrows();
306306
let (weights, means, covariances) = Self::estimate_gaussian_parameters(
307307
observations,
@@ -407,7 +407,7 @@ impl<F: Float, R: Rng + Clone, D: Data<Elem = F>, T> Fit<ArrayBase<D, Ix2>, T, G
407407
{
408408
type Object = GaussianMixtureModel<F>;
409409

410-
fn fit(&self, dataset: &DatasetBase<ArrayBase<D, Ix2>, T>) -> Result<Self::Object> {
410+
fn fit(&self, dataset: &DatasetBase<ArrayBase<D, Ix2>, T>) -> Result<Self::Object, GmmError> {
411411
let observations = dataset.records().view();
412412
let mut gmm = GaussianMixtureModel::<F>::new(self, dataset, self.rng())?;
413413

algorithms/linfa-clustering/src/gaussian_mixture/errors.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ use linfa_linalg::LinalgError;
44
#[cfg(feature = "blas")]
55
use ndarray_linalg::error::LinalgError;
66
use thiserror::Error;
7-
pub type Result<T> = std::result::Result<T, GmmError>;
87

98
/// An error when modeling a GMM algorithm
109
#[derive(Error, Debug)]

algorithms/linfa-clustering/src/gaussian_mixture/hyperparams.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::gaussian_mixture::errors::{GmmError, Result};
1+
use crate::gaussian_mixture::errors::GmmError;
22
use ndarray_rand::rand::{Rng, SeedableRng};
33
use rand_xoshiro::Xoshiro256Plus;
44
#[cfg(feature = "serde")]
@@ -170,7 +170,7 @@ impl<F: Float, R: Rng> ParamGuard for GmmParams<F, R> {
170170
type Checked = GmmValidParams<F, R>;
171171
type Error = GmmError;
172172

173-
fn check_ref(&self) -> Result<&Self::Checked> {
173+
fn check_ref(&self) -> Result<&Self::Checked, GmmError> {
174174
if self.0.n_clusters == 0 {
175175
Err(GmmError::InvalidValue(
176176
"`n_clusters` cannot be 0!".to_string(),
@@ -194,7 +194,7 @@ impl<F: Float, R: Rng> ParamGuard for GmmParams<F, R> {
194194
}
195195
}
196196

197-
fn check(self) -> Result<Self::Checked> {
197+
fn check(self) -> Result<Self::Checked, GmmError> {
198198
self.check_ref()?;
199199
Ok(self.0)
200200
}

algorithms/linfa-clustering/src/optics/errors.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
use thiserror::Error;
2-
pub type Result<T> = std::result::Result<T, OpticsError>;
32

43
/// An error when performing OPTICS Analysis
54
#[derive(Error, Debug)]

algorithms/linfa-clustering/src/optics/hyperparams.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::optics::errors::{OpticsError, Result};
1+
use crate::optics::errors::OpticsError;
22
use linfa::{param_guard::TransformGuard, Float, ParamGuard};
33
#[cfg(feature = "serde")]
44
use serde_crate::{Deserialize, Serialize};
@@ -91,7 +91,7 @@ impl<F: Float, D, N> ParamGuard for OpticsParams<F, D, N> {
9191
type Checked = OpticsValidParams<F, D, N>;
9292
type Error = OpticsError;
9393

94-
fn check_ref(&self) -> Result<&Self::Checked> {
94+
fn check_ref(&self) -> Result<&Self::Checked, OpticsError> {
9595
if self.0.tolerance <= F::zero() {
9696
Err(OpticsError::InvalidValue(
9797
"`tolerance` must be greater than 0!".to_string(),
@@ -106,7 +106,7 @@ impl<F: Float, D, N> ParamGuard for OpticsParams<F, D, N> {
106106
}
107107
}
108108

109-
fn check(self) -> Result<Self::Checked> {
109+
fn check(self) -> Result<Self::Checked, OpticsError> {
110110
self.check_ref()?;
111111
Ok(self.0)
112112
}

src/benchmarks/mod.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
#[cfg(feature = "benchmarks")]
22
pub mod config {
3-
use criterion::{measurement::WallTime, BenchmarkGroup, Criterion};
3+
#[cfg(not(target_os = "windows"))]
4+
use criterion::Criterion;
5+
use criterion::{measurement::WallTime, BenchmarkGroup};
46
#[cfg(not(target_os = "windows"))]
57
use pprof::criterion::{Output, PProfProfiler};
68
use std::time::Duration;

0 commit comments

Comments
 (0)