Thu, 23 Jan 2025 23:34:05 +0100
Merging adjustments, parameter tuning, etc.
/*! Experimental setups. */ //use numeric_literals::replace_float_literals; use serde::{Serialize, Deserialize}; use clap::ValueEnum; use std::collections::HashMap; use std::hash::{Hash, Hasher}; use std::collections::hash_map::DefaultHasher; use alg_tools::bisection_tree::*; use alg_tools::error::DynResult; use alg_tools::norms::Linfinity; use crate::{ExperimentOverrides, AlgorithmOverrides}; use crate::kernels::*; use crate::kernels::SupportProductFirst as Prod; use crate::types::*; use crate::run::{ RunnableExperiment, ExperimentV2, ExperimentBiased, Named, DefaultAlgorithm, }; //use crate::fb::FBGenericConfig; use crate::rand_distr::{SerializableNormal, SaltAndPepper}; use crate::regularisation::Regularisation; use alg_tools::euclidean::Euclidean; use alg_tools::instance::Instance; use alg_tools::mapping::Mapping; use alg_tools::operator_arithmetic::{MappingSum, Weighted}; /// Experiments shorthands, to be used with the command line parser #[derive(ValueEnum, Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[allow(non_camel_case_types)] pub enum DefaultExperiment { /// One dimension, cut gaussian spread, 2-norm-squared data fidelity #[clap(name = "1d")] Experiment1D, /// One dimension, “fast” spread, 2-norm-squared data fidelity #[clap(name = "1d_fast")] Experiment1DFast, /// Two dimensions, cut gaussian spread, 2-norm-squared data fidelity #[clap(name = "2d")] Experiment2D, /// Two dimensions, “fast” spread, 2-norm-squared data fidelity #[clap(name = "2d_fast")] Experiment2DFast, /// One dimension, cut gaussian spread, 1-norm data fidelity #[clap(name = "1d_l1")] Experiment1D_L1, /// One dimension, ‘“fast” spread, 1-norm data fidelity #[clap(name = "1d_l1_fast")] Experiment1D_L1_Fast, /// Two dimensions, cut gaussian spread, 1-norm data fidelity #[clap(name = "2d_l1")] Experiment2D_L1, /// Two dimensions, “fast” spread, 1-norm data fidelity #[clap(name = "2d_l1_fast")] Experiment2D_L1_Fast, /// One dimension, “fast” spread, 2-norm-squared data fidelity with extra TV-regularised bias #[clap(name = "1d_tv_fast")] Experiment1D_TV_Fast, /// Two dimensions, “fast” spread, 2-norm-squared data fidelity with extra TV-regularised bias #[clap(name = "2d_tv_fast")] Experiment2D_TV_Fast, } macro_rules! make_float_constant { ($name:ident = $value:expr) => { #[derive(Debug, Copy, Eq, PartialEq, Clone, Serialize, Deserialize)] #[serde(into = "float")] struct $name; impl Into<float> for $name { #[inline] fn into(self) -> float { $value } } impl Constant for $name { type Type = float; fn value(&self) -> float { $value } } } } /// Ground-truth measure spike locations and magnitudes for 1D experiments static MU_TRUE_1D_BASIC : [(float, float); 4] = [ (0.10, 10.0), (0.30, 2.0), (0.70, 3.0), (0.80, 5.0) ]; /// Ground-truth measure spike locations and magnitudes for 2D experiments static MU_TRUE_2D_BASIC : [([float; 2], float); 4] = [ ([0.15, 0.15], 10.0), ([0.75, 0.45], 2.0), ([0.80, 0.50], 4.0), ([0.30, 0.70], 5.0) ]; /// The $\{0,1\}$-valued characteristic function of a ball as a [`Mapping`]. #[derive(Debug,Copy,Clone,Serialize,PartialEq)] struct BallCharacteristic<F : Float, const N : usize> { pub center : Loc<F, N>, pub radius : F, } impl<F : Float, const N : usize> Mapping<Loc<F, N>> for BallCharacteristic<F, N> { type Codomain =F; fn apply<I : Instance<Loc<F, N>>>(&self, i : I) -> F { if self.center.dist2(i) <= self.radius { F::ONE } else { F::ZERO } } } //#[replace_float_literals(F::cast_from(literal))] impl DefaultExperiment { /// Convert the experiment shorthand into a runnable experiment configuration. pub fn get_experiment(&self, cli : &ExperimentOverrides<float>) -> DynResult<Box<dyn RunnableExperiment<float>>> { let name = "pointsource".to_string() + self.to_possible_value().unwrap().get_name(); let kernel_plot_width = 0.2; const BASE_SEED : u64 = 915373234; const N_SENSORS_1D : usize = 100; make_float_constant!(SensorWidth1D = 0.4/(N_SENSORS_1D as float)); const N_SENSORS_2D : usize = 16; make_float_constant!(SensorWidth2D = 0.4/(N_SENSORS_2D as float)); const N_SENSORS_2D_MORE : usize = 32; make_float_constant!(SensorWidth2DMore = 0.4/(N_SENSORS_2D_MORE as float)); make_float_constant!(Variance1 = 0.05.powi(2)); make_float_constant!(CutOff1 = 0.15); make_float_constant!(Hat1 = 0.16); make_float_constant!(HatBias = 0.05); // We use a different step length for PDPS in 2D experiments // let pdps_2d = (DefaultAlgorithm::PDPS, // AlgorithmOverrides { // tau0 : Some(3.0), // sigma0 : Some(0.99 / 3.0), // .. Default::default() // } // ); // let radon_pdps_2d = (DefaultAlgorithm::RadonPDPS, // AlgorithmOverrides { // tau0 : Some(3.0), // sigma0 : Some(0.99 / 3.0), // .. Default::default() // } // ); let sliding_fb_cut_gaussian = (DefaultAlgorithm::SlidingFB, AlgorithmOverrides { theta0 : Some(0.3), .. Default::default() } ); // let higher_cpos = |alg| (alg, // AlgorithmOverrides { // transport_tolerance_pos : Some(1000.0), // .. Default::default() // } // ); let higher_cpos_merging = |alg| (alg, AlgorithmOverrides { transport_tolerance_pos : Some(1000.0), merge : Some(true), fitness_merging : Some(true), .. Default::default() } ); let much_higher_cpos_merging_steptune = |alg| (alg, AlgorithmOverrides { transport_tolerance_pri : Some(1000.0), transport_tolerance_pos : Some(10000.0), sigma0 : Some(0.15), theta0 : Some(0.3), merge : Some(true), fitness_merging : Some(true), .. Default::default() } ); // We add a hash of the experiment name to the configured // noise seed to not use the same noise for different experiments. let mut h = DefaultHasher::new(); name.hash(&mut h); let noise_seed = cli.noise_seed.unwrap_or(BASE_SEED) + h.finish(); let default_merge_radius = 0.01; use DefaultExperiment::*; Ok(match self { Experiment1D => { let base_spread = Gaussian { variance : Variance1 }; let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]].into(), sensor_count : [N_SENSORS_1D], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.08)), noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.2))?, dataterm : DataTerm::L2Squared, μ_hat : MU_TRUE_1D_BASIC.into(), sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, spread : Prod(spread_cutoff, base_spread), kernel : Prod(AutoConvolution(spread_cutoff), base_spread), kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ sliding_fb_cut_gaussian, higher_cpos_merging(DefaultAlgorithm::RadonFB), higher_cpos_merging(DefaultAlgorithm::RadonSlidingFB), ]), }}) }, Experiment1DFast => { let base_spread = HatConv { radius : Hat1 }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]].into(), sensor_count : [N_SENSORS_1D], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.06)), noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.2))?, dataterm : DataTerm::L2Squared, μ_hat : MU_TRUE_1D_BASIC.into(), sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, spread : base_spread, kernel : base_spread, kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ higher_cpos_merging(DefaultAlgorithm::RadonFB), higher_cpos_merging(DefaultAlgorithm::RadonSlidingFB), ]), }}) }, Experiment2D => { let base_spread = Gaussian { variance : Variance1 }; let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]; 2].into(), sensor_count : [N_SENSORS_2D; 2], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.19)), noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.25))?, dataterm : DataTerm::L2Squared, μ_hat : MU_TRUE_2D_BASIC.into(), sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, spread : Prod(spread_cutoff, base_spread), kernel : Prod(AutoConvolution(spread_cutoff), base_spread), kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ sliding_fb_cut_gaussian, higher_cpos_merging(DefaultAlgorithm::RadonFB), higher_cpos_merging(DefaultAlgorithm::RadonSlidingFB), // pdps_2d, // radon_pdps_2d ]), }}) }, Experiment2DFast => { let base_spread = HatConv { radius : Hat1 }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]; 2].into(), sensor_count : [N_SENSORS_2D; 2], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.12)), noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.15))?, //0.25 dataterm : DataTerm::L2Squared, μ_hat : MU_TRUE_2D_BASIC.into(), sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, spread : base_spread, kernel : base_spread, kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ // pdps_2d, // radon_pdps_2d higher_cpos_merging(DefaultAlgorithm::RadonFB), higher_cpos_merging(DefaultAlgorithm::RadonSlidingFB), ]), }}) }, Experiment1D_L1 => { let base_spread = Gaussian { variance : Variance1 }; let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]].into(), sensor_count : [N_SENSORS_1D], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.1)), noise_distr : SaltAndPepper::new( cli.salt_and_pepper.as_ref().map_or(0.6, |v| v[0]), cli.salt_and_pepper.as_ref().map_or(0.4, |v| v[1]) )?, dataterm : DataTerm::L1, μ_hat : MU_TRUE_1D_BASIC.into(), sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, spread : Prod(spread_cutoff, base_spread), kernel : Prod(AutoConvolution(spread_cutoff), base_spread), kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::new(), }}) }, Experiment1D_L1_Fast => { let base_spread = HatConv { radius : Hat1 }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]].into(), sensor_count : [N_SENSORS_1D], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.12)), noise_distr : SaltAndPepper::new( cli.salt_and_pepper.as_ref().map_or(0.6, |v| v[0]), cli.salt_and_pepper.as_ref().map_or(0.4, |v| v[1]) )?, dataterm : DataTerm::L1, μ_hat : MU_TRUE_1D_BASIC.into(), sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, spread : base_spread, kernel : base_spread, kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::new(), }}) }, Experiment2D_L1 => { let base_spread = Gaussian { variance : Variance1 }; let spread_cutoff = BallIndicator { r : CutOff1, exponent : Linfinity }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]; 2].into(), sensor_count : [N_SENSORS_2D; 2], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.35)), noise_distr : SaltAndPepper::new( cli.salt_and_pepper.as_ref().map_or(0.8, |v| v[0]), cli.salt_and_pepper.as_ref().map_or(0.2, |v| v[1]) )?, dataterm : DataTerm::L1, μ_hat : MU_TRUE_2D_BASIC.into(), sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, spread : Prod(spread_cutoff, base_spread), kernel : Prod(AutoConvolution(spread_cutoff), base_spread), kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ // pdps_2d, // radon_pdps_2d ]), }}) }, Experiment2D_L1_Fast => { let base_spread = HatConv { radius : Hat1 }; Box::new(Named { name, data : ExperimentV2 { domain : [[0.0, 1.0]; 2].into(), sensor_count : [N_SENSORS_2D; 2], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.40)), noise_distr : SaltAndPepper::new( cli.salt_and_pepper.as_ref().map_or(0.8, |v| v[0]), cli.salt_and_pepper.as_ref().map_or(0.2, |v| v[1]) )?, dataterm : DataTerm::L1, μ_hat : MU_TRUE_2D_BASIC.into(), sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, spread : base_spread, kernel : base_spread, kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ // pdps_2d, // radon_pdps_2d ]), }}) }, Experiment1D_TV_Fast => { let base_spread = HatConv { radius : HatBias }; Box::new(Named { name, data : ExperimentBiased { λ : 0.02, bias : MappingSum::new([ Weighted::new(1.0, BallCharacteristic{ center : 0.3.into(), radius : 0.2 }), Weighted::new(0.5, BallCharacteristic{ center : 0.6.into(), radius : 0.3 }), ]), base : ExperimentV2 { domain : [[0.0, 1.0]].into(), sensor_count : [N_SENSORS_1D], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.2)), noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.1))?, dataterm : DataTerm::L2Squared, μ_hat : MU_TRUE_1D_BASIC.into(), sensor : BallIndicator { r : SensorWidth1D, exponent : Linfinity }, spread : base_spread, kernel : base_spread, kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ higher_cpos_merging(DefaultAlgorithm::RadonForwardPDPS), higher_cpos_merging(DefaultAlgorithm::RadonSlidingPDPS), ]), }, }}) }, Experiment2D_TV_Fast => { let base_spread = HatConv { radius : Hat1 }; Box::new(Named { name, data : ExperimentBiased { λ : 0.005, bias : MappingSum::new([ Weighted::new(1.0, BallCharacteristic{ center : [0.3, 0.3].into(), radius : 0.2 }), Weighted::new(0.5, BallCharacteristic{ center : [0.6, 0.6].into(), radius : 0.3 }), ]), base : ExperimentV2 { domain : [[0.0, 1.0]; 2].into(), sensor_count : [N_SENSORS_2D; 2], regularisation : Regularisation::NonnegRadon(cli.alpha.unwrap_or(0.06)), noise_distr : SerializableNormal::new(0.0, cli.variance.unwrap_or(0.15))?, //0.25 dataterm : DataTerm::L2Squared, μ_hat : MU_TRUE_2D_BASIC.into(), sensor : BallIndicator { r : SensorWidth2D, exponent : Linfinity }, spread : base_spread, kernel : base_spread, kernel_plot_width, noise_seed, default_merge_radius, algorithm_overrides: HashMap::from([ much_higher_cpos_merging_steptune(DefaultAlgorithm::RadonForwardPDPS), much_higher_cpos_merging_steptune(DefaultAlgorithm::RadonSlidingPDPS), ]), }, }}) }, }) } }