Skip to content

Commit

Permalink
Merge pull request #32 from LAPKB/config
Browse files Browse the repository at this point in the history
Reworked settings and run configuration
  • Loading branch information
Siel authored Jan 23, 2024
2 parents b6524f5 + 04b6530 commit cd37328
Show file tree
Hide file tree
Showing 21 changed files with 340 additions and 410 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,4 @@ meta*.csv
stop
.vscode
*.f90
settings.json
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ csv = "1.2.1"
ndarray = { version = "0.15.6", features = ["rayon"] }
serde = "1.0.188"
serde_derive = "1.0.188"
serde_json = "1.0.66"
sobol_burley = "0.5.0"
toml = { version = "0.8.1", features = ["preserve_order"] }
ode_solvers = "0.3.7"
Expand All @@ -46,6 +47,7 @@ faer = { version = "0.15.0", features = ["nalgebra", "ndarray"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.17", features = ["env-filter", "fmt", "time"] }
chrono = "0.4"
config = "0.13"

[dev-dependencies]
criterion = "0.5"
Expand Down
11 changes: 6 additions & 5 deletions examples/bimodal_ke/config.toml
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
[paths]
data = "examples/data/bimodal_ke.csv"
log_out = "log/bimodal_ke.log"
#prior_dist = "theta_bimodal_ke.csv"
log = "log/bimodal_ke.log"
#prior = "theta_bimodal_ke.csv"

[config]
cycles = 1024
engine = "NPAG"
init_points = 10000
init_points = 2129
seed = 347
tui = true
pmetrics_outputs = true
output = true
cache = true
idelta = 0.1
log_level = "debug"
log_level = "info"


[random]
Ke = [0.001, 3.0]
Expand Down
18 changes: 6 additions & 12 deletions src/algorithms.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::prelude::{self, settings::run::Settings};
use crate::prelude::{self, settings::Settings};

use output::NPResult;
use prelude::{datafile::Scenario, *};
Expand All @@ -9,12 +9,6 @@ mod npag;
mod npod;
mod postprob;

// pub enum Type {
// NPAG,
// NPOD,
// POSTPROB,
// }

pub trait Algorithm {
fn fit(&mut self) -> NPResult;
fn to_npresult(&self) -> NPResult;
Expand All @@ -35,17 +29,17 @@ where
Err(err) => panic!("Unable to remove previous stop file: {}", err),
}
}
let ranges = settings.computed.random.ranges.clone();
let ranges = settings.random.ranges();
let theta = initialization::sample_space(&settings, &ranges);

//This should be a macro, so it can automatically expands as soon as we add a new option in the Type Enum
match settings.parsed.config.engine.as_str() {
match settings.config.engine.as_str() {
"NPAG" => Box::new(npag::NPAG::new(
engine,
ranges,
theta,
scenarios,
settings.parsed.error.poly,
settings.error.poly,
tx,
settings,
)),
Expand All @@ -54,15 +48,15 @@ where
ranges,
theta,
scenarios,
settings.parsed.error.poly,
settings.error.poly,
tx,
settings,
)),
"POSTPROB" => Box::new(postprob::POSTPROB::new(
engine,
theta,
scenarios,
settings.parsed.error.poly,
settings.error.poly,
tx,
settings,
)),
Expand Down
17 changes: 9 additions & 8 deletions src/algorithms/npag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::{
output::NPResult,
output::{CycleLog, NPCycle},
prob, qr,
settings::run::Settings,
settings::Settings,
simulation::predict::Engine,
simulation::predict::{sim_obs, Predict},
},
Expand Down Expand Up @@ -118,15 +118,15 @@ where
f1: f64::default(),
cycle: 1,
gamma_delta: 0.1,
gamma: settings.parsed.error.value,
error_type: match settings.parsed.error.class.to_lowercase().as_str() {
gamma: settings.error.value,
error_type: match settings.error.class.to_lowercase().as_str() {
"additive" => ErrorType::Add,
"proportional" => ErrorType::Prop,
_ => panic!("Error type not supported"),
},
converged: false,
cycle_log: CycleLog::new(&settings.computed.random.names),
cache: settings.parsed.config.cache.unwrap_or(false),
cycle_log: CycleLog::new(&settings.random.names()),
cache: settings.config.cache,
tx,
settings,
scenarios,
Expand Down Expand Up @@ -277,8 +277,6 @@ where
gamlam: self.gamma,
};
self.tx.send(Comm::NPCycle(state.clone())).unwrap();
self.cycle_log
.push_and_write(state, self.settings.parsed.config.pmetrics_outputs.unwrap());

// Increasing objf signals instability or model misspecification.
if self.last_objf > self.objf {
Expand All @@ -292,6 +290,9 @@ where
self.w = self.lambda.clone();
let pyl = self.psi.dot(&self.w);

self.cycle_log
.push_and_write(state, self.settings.config.output);

// Stop if we have reached convergence criteria
if (self.last_objf - self.objf).abs() <= THETA_G && self.eps > THETA_E {
self.eps /= 2.;
Expand All @@ -309,7 +310,7 @@ where
}

// Stop if we have reached maximum number of cycles
if self.cycle >= self.settings.parsed.config.cycles {
if self.cycle >= self.settings.config.cycles {
tracing::warn!("Maximum number of cycles reached");
break;
}
Expand Down
14 changes: 7 additions & 7 deletions src/algorithms/npod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use crate::{
output::NPResult,
output::{CycleLog, NPCycle},
prob, qr,
settings::run::Settings,
settings::Settings,
simulation::predict::Engine,
simulation::predict::{sim_obs, Predict},
},
Expand Down Expand Up @@ -111,15 +111,15 @@ where
objf: f64::INFINITY,
cycle: 1,
gamma_delta: 0.1,
gamma: settings.parsed.error.value,
error_type: match settings.parsed.error.class.as_str() {
gamma: settings.error.value,
error_type: match settings.error.class.as_str() {
"additive" => ErrorType::Add,
"proportional" => ErrorType::Prop,
_ => panic!("Error type not supported"),
},
converged: false,
cycle_log: CycleLog::new(&settings.computed.random.names),
cache: settings.parsed.config.cache.unwrap_or(false),
cycle_log: CycleLog::new(&settings.random.names()),
cache: settings.config.cache,
tx,
settings,
scenarios,
Expand Down Expand Up @@ -296,7 +296,7 @@ where
}

// Stop if we have reached maximum number of cycles
if self.cycle >= self.settings.parsed.config.cycles {
if self.cycle >= self.settings.config.cycles {
tracing::warn!("Maximum number of cycles reached");
break;
}
Expand All @@ -308,7 +308,7 @@ where
}
//TODO: the cycle migh break before reaching this point
self.cycle_log
.push_and_write(state, self.settings.parsed.config.pmetrics_outputs.unwrap());
.push_and_write(state, self.settings.config.output);

self.cycle += 1;

Expand Down
6 changes: 3 additions & 3 deletions src/algorithms/postprob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::{
ipm,
output::NPResult,
prob,
settings::run::Settings,
settings::Settings,
simulation::predict::Engine,
simulation::predict::{sim_obs, Predict},
},
Expand Down Expand Up @@ -82,8 +82,8 @@ where
objf: f64::INFINITY,
cycle: 0,
converged: false,
gamma: settings.parsed.error.value,
error_type: match settings.parsed.error.class.as_str() {
gamma: settings.error.value,
error_type: match settings.error.class.as_str() {
"additive" => ErrorType::Add,
"proportional" => ErrorType::Prop,
_ => panic!("Error type not supported"),
Expand Down
34 changes: 20 additions & 14 deletions src/entrypoints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::prelude::{
*,
};
use crate::routines::datafile::Scenario;
use crate::routines::settings::run::Settings;
use crate::routines::settings::*;

use csv::{ReaderBuilder, WriterBuilder};
use eyre::Result;
Expand Down Expand Up @@ -35,16 +35,16 @@ pub fn simulate<S>(engine: Engine<S>, settings_path: String) -> Result<()>
where
S: Predict<'static> + std::marker::Sync + std::marker::Send + 'static + Clone,
{
let settings = settings::simulator::read(settings_path);
let theta_file = File::open(settings.paths.theta).unwrap();
let settings: Settings = read_settings(settings_path).unwrap();
let theta_file = File::open(settings.paths.prior.unwrap()).unwrap();
let mut reader = ReaderBuilder::new()
.has_headers(true)
.from_reader(theta_file);
let theta: Array2<f64> = reader.deserialize_array2_dynamic().unwrap();

// Expand data
let idelta = settings.config.idelta.unwrap_or(0.0);
let tad = settings.config.tad.unwrap_or(0.0);
let idelta = settings.config.idelta;
let tad = settings.config.tad;
let mut scenarios = datafile::parse(&settings.paths.data).unwrap();
scenarios.iter_mut().for_each(|scenario| {
*scenario = scenario.add_event_interval(idelta, tad);
Expand Down Expand Up @@ -88,17 +88,23 @@ where
S: Predict<'static> + std::marker::Sync + std::marker::Send + 'static + Clone,
{
let now = Instant::now();
let settings = settings::run::read(settings_path);
let settings = match read_settings(settings_path) {
Ok(s) => s,
Err(e) => {
eprintln!("Error reading settings: {:?}", e);
std::process::exit(-1);
}
};
let (tx, rx) = mpsc::unbounded_channel::<Comm>();
let maintx = tx.clone();
logger::setup_log(&settings, tx.clone());
tracing::info!("Starting NPcore");

// Read input data and remove excluded scenarios (if any)
let mut scenarios = datafile::parse(&settings.parsed.paths.data).unwrap();
if let Some(exclude) = &settings.parsed.config.exclude {
let mut scenarios = datafile::parse(&settings.paths.data).unwrap();
if let Some(exclude) = &settings.config.exclude {
for val in exclude {
scenarios.remove(val.as_integer().unwrap() as usize);
scenarios.remove(val.as_ptr() as usize);
}
}

Expand All @@ -111,7 +117,7 @@ where

// Spawn new thread for TUI
let settings_tui = settings.clone();
let handle = if settings.parsed.config.tui {
let handle = if settings.config.tui {
spawn(move || {
start_ui(rx, settings_tui).expect("Failed to start TUI");
})
Expand All @@ -128,10 +134,10 @@ where
tracing::info!("Total time: {:.2?}", now.elapsed());

// Write output files (if configured)
if let Some(write) = &settings.parsed.config.pmetrics_outputs {
let idelta = settings.parsed.config.idelta.unwrap_or(0.0);
let tad = settings.parsed.config.tad.unwrap_or(0.0);
result.write_outputs(*write, &engine, idelta, tad);
if settings.config.output {
let idelta = settings.config.idelta;
let tad = settings.config.tad;
result.write_outputs(true, &engine, idelta, tad);
}

tracing::info!("Program complete");
Expand Down
5 changes: 1 addition & 4 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,7 @@ pub mod routines {
pub mod adaptative_grid;
}

pub mod settings {
pub mod run;
pub mod simulator;
}
pub mod settings;
pub mod evaluation {

pub mod ipm;
Expand Down
20 changes: 8 additions & 12 deletions src/logger.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::routines::settings::run::Settings;
use crate::routines::settings::Settings;
use crate::tui::ui::Comm;
use std::io::{self, Write};
use tokio::sync::mpsc::UnboundedSender;
Expand All @@ -22,14 +22,10 @@ use tracing_subscriber::EnvFilter;
/// If not, the log messages are written to stdout.
pub fn setup_log(settings: &Settings, ui_tx: UnboundedSender<Comm>) {
// Use the log level defined in configuration file, or default to info
let log_level = settings
.parsed
.config
.log_level
.as_ref()
.map(|level| level.as_str())
.unwrap_or("info")
.to_lowercase();
let log_level = settings.config.log_level.as_str();

// Use the log file defined in configuration file, or default to npcore.log
let log_path = settings.paths.log.as_ref().unwrap();

let env_filter = EnvFilter::new(&log_level);

Expand All @@ -41,7 +37,7 @@ pub fn setup_log(settings: &Settings, ui_tx: UnboundedSender<Comm>) {
.create(true)
.write(true)
.truncate(true)
.open(&settings.parsed.paths.log_out)
.open(log_path)
.expect("Failed to open log file - does the directory exist?");

let file_layer = fmt::layer()
Expand All @@ -50,7 +46,7 @@ pub fn setup_log(settings: &Settings, ui_tx: UnboundedSender<Comm>) {
.with_timer(CompactTimestamp);

// Define layer for stdout
let stdout_layer = if !settings.parsed.config.tui {
let stdout_layer = if !settings.config.tui {
let layer = fmt::layer()
.with_writer(std::io::stdout)
.with_ansi(true)
Expand All @@ -66,7 +62,7 @@ pub fn setup_log(settings: &Settings, ui_tx: UnboundedSender<Comm>) {
ui_tx: ui_tx.clone(),
};

let tui_layer = if settings.parsed.config.tui {
let tui_layer = if settings.config.tui {
let layer = fmt::layer()
.with_writer(tui_writer_closure)
.with_ansi(false)
Expand Down
Loading

0 comments on commit cd37328

Please sign in to comment.