Skip to content
39 changes: 37 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@ serde_json = "1"

ctrlc = "3.4"

clap = { version = "4", features = ["derive"] }
bincode = { version = "2", features = ["serde"] }

# dev dependencies
criterion = "0.5"
approx = "0.4"
Expand Down
6 changes: 3 additions & 3 deletions crates/ego/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ keywords = ["machine-learning", "bayesian", "optimization"]
categories = ["algorithms", "mathematics", "science"]

[features]
default = []
default = ["persistent"]

persistent = ["egobox-moe/persistent"]
blas = ["ndarray-linalg", "linfa/ndarray-linalg", "linfa-pls/blas"]
Expand Down Expand Up @@ -43,7 +43,7 @@ nlopt = { version = "0.8", optional = true }

rand_xoshiro = { version = "0.6", features = ["serde1"] }
argmin = { version = "0.10.0", features = ["serde1", "ctrlc"] }
bincode = { version = "1.3.0" }
bincode.workspace = true
web-time = "1.1.0"
libm = "0.2.6"
finitediff.workspace = true
Expand All @@ -53,7 +53,7 @@ log.workspace = true
env_logger.workspace = true
thiserror.workspace = true
anyhow.workspace = true
clap = { version = "4", features = ["derive"] }
clap.workspace = true

serde = { version = "1", features = ["derive", "rc"] }
serde_json.workspace = true
Expand Down
36 changes: 17 additions & 19 deletions crates/ego/examples/g24.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use egobox_doe::{Lhs, SamplingMethod};
use egobox_ego::{Cstr, EgorServiceFactory};
use ndarray::{Array2, ArrayBase, ArrayView2, Axis, Data, Ix1, Zip, array, concatenate};
use egobox_ego::{EgorBuilder, InfillOptimizer};
use ndarray::{Array2, ArrayBase, ArrayView2, Data, Ix1, Zip, array};

// Objective
fn g24(x: &ArrayBase<impl Data<Elem = f64>, Ix1>) -> f64 {
Expand Down Expand Up @@ -30,21 +30,19 @@ fn f_g24(x: &ArrayView2<f64>) -> Array2<f64> {

fn main() {
let xlimits = array![[0., 3.], [0., 4.]];
let mut doe = Lhs::new(&xlimits).sample(3);

// We use Egor optimizer as a service
let egor = EgorServiceFactory::<Cstr>::optimize()
.configure(|config| config.n_cstr(2).seed(42))
.min_within(&xlimits);

let mut y_doe = f_g24(&doe.view());
for _i in 0..10 {
// We tell function values and ask for next x location
let x_suggested = egor.suggest(&doe, &y_doe);

doe = concatenate![Axis(0), doe, x_suggested];
y_doe = f_g24(&doe.view());
}

println!("G24 optim x suggestion history = {doe:?}");
let doe = Lhs::new(&xlimits).sample(3);

let res = EgorBuilder::optimize(f_g24)
.configure(|config| {
config
.n_cstr(2)
.doe(&doe)
.max_iters(100)
.infill_optimizer(InfillOptimizer::Cobyla)
.seed(42)
})
.min_within(&xlimits)
.run()
.expect("Minimize failure");
println!("G24 optim result = {}", res.y_opt);
}
50 changes: 50 additions & 0 deletions crates/ego/examples/g24_suggest.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
use egobox_doe::{Lhs, SamplingMethod};
use egobox_ego::{Cstr, EgorServiceFactory};
use ndarray::{Array2, ArrayBase, ArrayView2, Axis, Data, Ix1, Zip, array, concatenate};

// Objective
fn g24(x: &ArrayBase<impl Data<Elem = f64>, Ix1>) -> f64 {
// Function G24: 1 global optimum y_opt = -5.5080 at x_opt =(2.3295, 3.1785)
-x[0] - x[1]
}

// Constraints < 0
fn g24_c1(x: &ArrayBase<impl Data<Elem = f64>, Ix1>) -> f64 {
-2.0 * x[0].powf(4.0) + 8.0 * x[0].powf(3.0) - 8.0 * x[0].powf(2.0) + x[1] - 2.0
}

fn g24_c2(x: &ArrayBase<impl Data<Elem = f64>, Ix1>) -> f64 {
-4.0 * x[0].powf(4.0) + 32.0 * x[0].powf(3.0) - 88.0 * x[0].powf(2.0) + 96.0 * x[0] + x[1]
- 36.0
}

fn f_g24(x: &ArrayView2<f64>) -> Array2<f64> {
let mut y = Array2::zeros((x.nrows(), 3));
Zip::from(y.rows_mut())
.and(x.rows())
.for_each(|mut yi, xi| {
yi.assign(&array![g24(&xi), g24_c1(&xi), g24_c2(&xi)]);
});
y
}

fn main() {
let xlimits = array![[0., 3.], [0., 4.]];
let mut doe = Lhs::new(&xlimits).sample(3);

// We use Egor optimizer as a service
let egor = EgorServiceFactory::<Cstr>::optimize()
.configure(|config| config.n_cstr(2).seed(42))
.min_within(&xlimits);

let mut y_doe = f_g24(&doe.view());
for _i in 0..10 {
// We tell function values and ask for next x location
let x_suggested = egor.suggest(&doe, &y_doe);

doe = concatenate![Axis(0), doe, x_suggested];
y_doe = f_g24(&doe.view());
}

println!("G24 optim x suggestion history = {doe:?}");
}
2 changes: 1 addition & 1 deletion crates/ego/examples/mopta08.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ fn main() -> anyhow::Result<()> {
let mut xlimits = Array2::zeros((dim, 2));
xlimits.column_mut(1).assign(&Array1::ones(dim));

let res = if std::env::var(egobox_ego::EGOBOX_USE_GP_VAR_PORTFOLIO).is_ok() {
let res = if std::env::var(egobox_ego::EGOR_USE_GP_VAR_PORTFOLIO).is_ok() {
EgorBuilder::optimize(mopta_func(dim))
.configure(|config| {
config
Expand Down
2 changes: 1 addition & 1 deletion crates/ego/src/criteria/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use ndarray::{Array1, ArrayView2};
/// determine the next most promising point expected to be the
/// optimum location of the objective function
#[clonable]
#[typetag::serde(tag = "type")]
#[typetag::serde(tag = "type_infill")]
pub trait InfillCriterion: Clone + Sync {
/// Name of the infill criterion
fn name(&self) -> &'static str;
Expand Down
4 changes: 3 additions & 1 deletion crates/ego/src/egor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -907,6 +907,7 @@ mod tests {
let doe = Lhs::new(&xlimits)
.with_rng(Xoshiro256Plus::seed_from_u64(42))
.sample(10);
let q = 2;
let res = EgorBuilder::optimize(f_g24)
.configure(|config| {
config
Expand All @@ -916,7 +917,7 @@ mod tests {
})
.n_cstr(2)
.cstr_tol(array![2e-6, 2e-6])
.q_points(2)
.q_points(q)
.qei_strategy(QEiStrategy::KrigingBeliever)
.doe(&doe)
.target(-5.5030)
Expand All @@ -926,6 +927,7 @@ mod tests {
.min_within(&xlimits)
.run()
.expect("Egor minimization");
assert_eq!(res.x_doe.nrows(), doe.nrows() + q * res.state.iter as usize);
println!("G24 optim result = {res:?}");
let expected = array![2.3295, 3.1785];
assert_abs_diff_eq!(expected, res.x_opt, epsilon = 2e-2);
Expand Down
12 changes: 12 additions & 0 deletions crates/ego/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,16 @@ pub enum EgoError {
/// When global EGO step cannot add any point
#[error("EGO exit (no more point)")]
NoMorePointToAddError(Box<EgorState<f64>>),
/// When error during saving
#[cfg(feature = "persistent")]
#[error("Save error: {0}")]
SaveBinaryError(#[from] bincode::error::EncodeError),
/// When error during loading
#[cfg(feature = "persistent")]
#[error("Load error: {0}")]
LoadBinaryError(#[from] bincode::error::DecodeError),
/// When error during saving
#[cfg(feature = "persistent")]
#[error("Save error: {0}")]
JsonError(#[from] serde_json::Error),
}
55 changes: 49 additions & 6 deletions crates/ego/src/gpmix/mixint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ use crate::errors::{EgoError, Result};
use crate::types::{SurrogateBuilder, XType};
use egobox_doe::{FullFactorial, Lhs, LhsKind, Random};
use egobox_gp::ThetaTuning;
use egobox_gp::metrics::CrossValScore;
use egobox_moe::{
Clustered, Clustering, CorrelationSpec, FullGpSurrogate, GpMixture, GpMixtureParams,
GpSurrogate, GpSurrogateExt, MixtureGpSurrogate, NbClusters, Recombination, RegressionSpec,
GpQualityAssurance, GpScore, GpSurrogate, GpSurrogateExt, MixtureGpSurrogate, NbClusters,
Recombination, RegressionSpec,
};
use linfa::traits::{Fit, PredictInplace};
use linfa::{DatasetBase, Float, ParamGuard};
Expand Down Expand Up @@ -612,13 +612,32 @@ impl GpSurrogate for MixintGpMixture {
let mut file = fs::File::create(path).unwrap();
let bytes = match format {
GpFileFormat::Json => serde_json::to_vec(self).map_err(MoeError::SaveJsonError)?,
GpFileFormat::Binary => bincode::serialize(self).map_err(MoeError::SaveBinaryError)?,
GpFileFormat::Binary => {
bincode::serde::encode_to_vec(self, bincode::config::standard())
.map_err(MoeError::SaveBinaryError)?
}
};
file.write_all(&bytes)?;
Ok(())
}
}

impl MixintGpMixture {
/// Load MixintGpMixture from given file.
#[cfg(feature = "persistent")]
pub fn load(path: &str, format: GpFileFormat) -> Result<Box<MixintGpMixture>> {
let data = fs::read(path)?;
let moe = match format {
GpFileFormat::Json => serde_json::from_slice(&data).unwrap(),
GpFileFormat::Binary => {
bincode::serde::decode_from_slice(&data, bincode::config::standard())
.map(|(surrogate, _)| surrogate)?
}
};
Ok(Box::new(moe))
}
}

#[typetag::serde]
impl GpSurrogateExt for MixintGpMixture {
fn predict_gradients(&self, x: &ArrayView2<f64>) -> egobox_moe::Result<Array2<f64>> {
Expand Down Expand Up @@ -652,16 +671,40 @@ impl GpSurrogateExt for MixintGpMixture {
}
}

impl CrossValScore<f64, EgoError, MixintGpMixtureParams, Self> for MixintGpMixture {
impl GpScore<EgoError, MixintGpMixtureParams, Self> for MixintGpMixture {
fn params(&self) -> MixintGpMixtureParams {
self.params.clone().into()
}

fn training_data(&self) -> &(Array2<f64>, Array1<f64>) {
&self.training_data
}
}

fn params(&self) -> MixintGpMixtureParams {
MixintGpMixtureParams::from(self.params.clone())
#[typetag::serde]
impl GpQualityAssurance for MixintGpMixture {
fn training_data(&self) -> &(Array2<f64>, Array1<f64>) {
(self as &dyn GpScore<_, _, _>).training_data()
}

fn q2(&self, kfold: usize) -> f64 {
(self as &dyn GpScore<_, _, _>).q2_score(kfold)
}

fn looq2(&self) -> f64 {
(self as &dyn GpScore<_, _, _>).looq2_score()
}

fn pva(&self, kfold: usize) -> f64 {
(self as &dyn GpScore<_, _, _>).pva_score(kfold)
}

fn loopva(&self) -> f64 {
(self as &dyn GpScore<_, _, _>).loopva_score()
}
}

#[typetag::serde]
impl MixtureGpSurrogate for MixintGpMixture {
fn experts(&self) -> &Vec<Box<dyn FullGpSurrogate>> {
self.moe.experts()
Expand Down
5 changes: 3 additions & 2 deletions crates/ego/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -300,8 +300,9 @@ pub use crate::gpmix::spec::{CorrelationSpec, RegressionSpec};
pub use crate::solver::*;
pub use crate::types::*;
pub use crate::utils::{
CHECKPOINT_FILE, Checkpoint, CheckpointingFrequency, EGOBOX_LOG, EGOBOX_USE_GP_VAR_PORTFOLIO,
EGOBOX_USE_MAX_PROBA_OF_FEASIBILITY, HotStartCheckpoint, HotStartMode, find_best_result_index,
CHECKPOINT_FILE, Checkpoint, CheckpointingFrequency, EGOBOX_LOG, EGOR_GP_FILENAME,
EGOR_INITIAL_GP_FILENAME, EGOR_USE_GP_RECORDER, EGOR_USE_GP_VAR_PORTFOLIO,
EGOR_USE_MAX_PROBA_OF_FEASIBILITY, HotStartCheckpoint, HotStartMode, find_best_result_index,
};

mod optimizers;
Expand Down
Loading
Loading