--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/experiments.rs Thu Dec 01 23:07:35 2022 +0200 @@ -0,0 +1,299 @@ +/*! +Experimental setups. +*/ + +//use numeric_literals::replace_float_literals; +use serde::{Serialize, Deserialize}; +use clap::ValueEnum; +use std::collections::HashMap; +use std::hash::{Hash, Hasher}; +use std::collections::hash_map::DefaultHasher; + +use alg_tools::bisection_tree::*; +use alg_tools::error::DynResult; +use alg_tools::norms::Linfinity; + +use crate::ExperimentOverrides; +use crate::kernels::*; +use crate::kernels::{SupportProductFirst as Prod}; +use crate::pdps::PDPSConfig; +use crate::types::*; +use crate::run::{ + RunnableExperiment, + Experiment, + Named, + DefaultAlgorithm, + AlgorithmConfig +}; +//use crate::fb::FBGenericConfig; +use crate::rand_distr::{SerializableNormal, SaltAndPepper}; + +/// Experiments shorthands, to be used with the command line parser + +#[derive(ValueEnum, Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] +#[allow(non_camel_case_types)] +pub enum DefaultExperiment { + /// One dimension, cut gaussian spread, 2-norm-squared data fidelity + #[clap(name = "1d")] + Experiment1D, + /// One dimension, “fast” spread, 2-norm-squared data fidelity + #[clap(name = "1d_fast")] + Experiment1DFast, + /// Two dimensions, cut gaussian spread, 2-norm-squared data fidelity + #[clap(name = "2d")] + Experiment2D, + /// Two dimensions, “fast” spread, 2-norm-squared data fidelity + #[clap(name = "2d_fast")] + Experiment2DFast, + /// One dimension, cut gaussian spread, 1-norm data fidelity + #[clap(name = "1d_l1")] + Experiment1D_L1, + /// One dimension, ‘“fast” spread, 1-norm data fidelity + #[clap(name = "1d_l1_fast")] + Experiment1D_L1_Fast, + /// Two dimensions, cut gaussian spread, 1-norm data fidelity + #[clap(name = "2d_l1")] + Experiment2D_L1, + /// Two dimensions, “fast” spread, 1-norm data fidelity + #[clap(name = "2d_l1_fast")] + Experiment2D_L1_Fast, +} + +macro_rules! make_float_constant { + ($name:ident = $value:expr) => { + #[derive(Debug, Copy, Eq, PartialEq, Clone, Serialize, Deserialize)] + #[serde(into = "float")] + struct $name; + impl Into<float> for $name { + #[inline] + fn into(self) -> float { $value } + } + impl Constant for $name { + type Type = float; + fn value(&self) -> float { $value } + } + } +} + +/// Ground-truth measure spike locations and magnitudes for 1D experiments +static MU_TRUE_1D_BASIC : [(float, float); 4] = [ + (0.10, 10.0), + (0.30, 2.0), + (0.70, 3.0), + (0.80, 5.0) +]; + +/// Ground-truth measure spike locations and magnitudes for 2D experiments +static MU_TRUE_2D_BASIC : [([float; 2], float); 4] = [ + ([0.15, 0.15], 10.0), + ([0.75, 0.45], 2.0), + ([0.80, 0.50], 4.0), + ([0.30, 0.70], 5.0) +]; + +//#[replace_float_literals(F::cast_from(literal))] +impl DefaultExperiment { + /// Convert the experiment shorthand into a runnable experiment configuration. + pub fn get_experiment(&self, cli : &ExperimentOverrides<float>) -> DynResult<Box<dyn RunnableExperiment<float>>> { + let name = "pointsource".to_string() + + self.to_possible_value().unwrap().get_name(); + + let kernel_plot_width = 0.2; + + const BASE_SEED : u64 = 915373234; + + const N_SENSORS_1D : usize = 100; + make_float_constant!(SensorWidth1D = 0.4/(N_SENSORS_1D as float)); + + const N_SENSORS_2D : usize = 16; + make_float_constant!(SensorWidth2D = 0.4/(N_SENSORS_2D as float)); + + const N_SENSORS_2D_MORE : usize = 32; + make_float_constant!(SensorWidth2DMore = 0.4/(N_SENSORS_2D_MORE as float)); + + make_float_constant!(Variance1 = 0.05.powi(2)); + make_float_constant!(CutOff1 = 0.15); + make_float_constant!(Hat1 = 0.16); + + // We use a different step length for PDPS in 2D experiments + let pdps_2d = || { + let τ0 = 3.0; + PDPSConfig { + τ0, + σ0 : 0.99 / τ0, + .. Default::default() + } + }; + + // We add a hash of the experiment name to the configured + // noise seed to not use the same noise for different experiments. + let mut h = DefaultHasher::new(); + name.hash(&mut h); + let noise_seed = cli.noise_seed.unwrap_or(BASE_SEED) + h.finish(); + + use DefaultExperiment::*; + Ok(match self { + Experiment1D => { + let base_spread = Gaussian { variance : Variance1 }; + let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]].into(), + sensor_count : [N_SENSORS_1D], + α : cli.alpha.unwrap_or(0.09), + noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.2))?, + dataterm : DataTerm::L2Squared, + μ_hat : MU_TRUE_1D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, + spread : Prod(spread_cutoff, base_spread), + kernel : Prod(AutoConvolution(spread_cutoff), base_spread), + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::new(), + }}) + }, + Experiment1DFast => { + let base_spread = HatConv { radius : Hat1 }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]].into(), + sensor_count : [N_SENSORS_1D], + α : cli.alpha.unwrap_or(0.06), + noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.2))?, + dataterm : DataTerm::L2Squared, + μ_hat : MU_TRUE_1D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, + spread : base_spread, + kernel : base_spread, + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::new(), + }}) + }, + Experiment2D => { + let base_spread = Gaussian { variance : Variance1 }; + let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]; 2].into(), + sensor_count : [N_SENSORS_2D; 2], + α : cli.alpha.unwrap_or(0.19), // 0.18, //0.17, //0.16, + noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.25))?, + dataterm : DataTerm::L2Squared, + μ_hat : MU_TRUE_2D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, + spread : Prod(spread_cutoff, base_spread), + kernel : Prod(AutoConvolution(spread_cutoff), base_spread), + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::from([ + (DefaultAlgorithm::PDPS, AlgorithmConfig::PDPS(pdps_2d())) + ]), + }}) + }, + Experiment2DFast => { + let base_spread = HatConv { radius : Hat1 }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]; 2].into(), + sensor_count : [N_SENSORS_2D; 2], + α : cli.alpha.unwrap_or(0.12), //0.10, //0.14, + noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.15))?, //0.25 + dataterm : DataTerm::L2Squared, + μ_hat : MU_TRUE_2D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, + spread : base_spread, + kernel : base_spread, + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::from([ + (DefaultAlgorithm::PDPS, AlgorithmConfig::PDPS(pdps_2d())) + ]), + }}) + }, + Experiment1D_L1 => { + let base_spread = Gaussian { variance : Variance1 }; + let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]].into(), + sensor_count : [N_SENSORS_1D], + α : cli.alpha.unwrap_or(0.1), + noise_distr : SaltAndPepper::new( + cli.salt_and_pepper.as_ref().map_or(0.6, |v| v[0]), + cli.salt_and_pepper.as_ref().map_or(0.4, |v| v[1]) + )?, + dataterm : DataTerm::L1, + μ_hat : MU_TRUE_1D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, + spread : Prod(spread_cutoff, base_spread), + kernel : Prod(AutoConvolution(spread_cutoff), base_spread), + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::new(), + }}) + }, + Experiment1D_L1_Fast => { + let base_spread = HatConv { radius : Hat1 }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]].into(), + sensor_count : [N_SENSORS_1D], + α : cli.alpha.unwrap_or(0.12), + noise_distr : SaltAndPepper::new( + cli.salt_and_pepper.as_ref().map_or(0.6, |v| v[0]), + cli.salt_and_pepper.as_ref().map_or(0.4, |v| v[1]) + )?, + dataterm : DataTerm::L1, + μ_hat : MU_TRUE_1D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, + spread : base_spread, + kernel : base_spread, + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::new(), + }}) + }, + Experiment2D_L1 => { + let base_spread = Gaussian { variance : Variance1 }; + let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]; 2].into(), + sensor_count : [N_SENSORS_2D; 2], + α : cli.alpha.unwrap_or(0.35), + noise_distr : SaltAndPepper::new( + cli.salt_and_pepper.as_ref().map_or(0.8, |v| v[0]), + cli.salt_and_pepper.as_ref().map_or(0.2, |v| v[1]) + )?, + dataterm : DataTerm::L1, + μ_hat : MU_TRUE_2D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, + spread : Prod(spread_cutoff, base_spread), + kernel : Prod(AutoConvolution(spread_cutoff), base_spread), + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::from([ + (DefaultAlgorithm::PDPS, AlgorithmConfig::PDPS(pdps_2d())) + ]), + }}) + }, + Experiment2D_L1_Fast => { + let base_spread = HatConv { radius : Hat1 }; + Box::new(Named { name, data : Experiment { + domain : [[0.0, 1.0]; 2].into(), + sensor_count : [N_SENSORS_2D; 2], + α : cli.alpha.unwrap_or(0.40), + noise_distr : SaltAndPepper::new( + cli.salt_and_pepper.as_ref().map_or(0.8, |v| v[0]), + cli.salt_and_pepper.as_ref().map_or(0.2, |v| v[1]) + )?, + dataterm : DataTerm::L1, + μ_hat : MU_TRUE_2D_BASIC.into(), + sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, + spread : base_spread, + kernel : base_spread, + kernel_plot_width, + noise_seed, + algorithm_defaults: HashMap::from([ + (DefaultAlgorithm::PDPS, AlgorithmConfig::PDPS(pdps_2d())) + ]), + }}) + }, + }) + } +} +