src/run.rs

branch
dev
changeset 35
b087e3eab191
parent 34
efa60bc4f743
child 37
c5d8bd1a7728
--- a/src/run.rs	Thu Aug 29 00:00:00 2024 -0500
+++ b/src/run.rs	Tue Dec 31 09:25:45 2024 -0500
@@ -26,25 +26,46 @@
     AlgIteratorOptions,
     Verbose,
     AlgIteratorFactory,
+    LoggingIteratorFactory,
+    TimingIteratorFactory,
+    BasicAlgIteratorFactory,
 };
 use alg_tools::logger::Logger;
-use alg_tools::error::DynError;
+use alg_tools::error::{
+    DynError,
+    DynResult,
+};
 use alg_tools::tabledump::TableDump;
 use alg_tools::sets::Cube;
 use alg_tools::mapping::{
     RealMapping,
-    DifferentiableRealMapping
+    DifferentiableMapping,
+    DifferentiableRealMapping,
+    Instance
 };
 use alg_tools::nalgebra_support::ToNalgebraRealField;
 use alg_tools::euclidean::Euclidean;
-use alg_tools::lingrid::lingrid;
+use alg_tools::lingrid::{lingrid, LinSpace};
 use alg_tools::sets::SetOrd;
+use alg_tools::linops::{RowOp, IdOp /*, ZeroOp*/};
+use alg_tools::discrete_gradient::{Grad, ForwardNeumann};
+use alg_tools::convex::Zero;
+use alg_tools::maputil::map3;
+use alg_tools::direct_product::Pair;
 
 use crate::kernels::*;
 use crate::types::*;
 use crate::measures::*;
 use crate::measures::merging::SpikeMerging;
 use crate::forward_model::*;
+use crate::forward_model::sensor_grid::{
+    SensorGrid,
+    SensorGridBT,
+    //SensorGridBTFN,
+    Sensor,
+    Spread,
+};
+
 use crate::fb::{
     FBConfig,
     FBGenericConfig,
@@ -58,8 +79,17 @@
 };
 use crate::sliding_fb::{
     SlidingFBConfig,
+    TransportConfig,
     pointsource_sliding_fb_reg
 };
+use crate::sliding_pdps::{
+    SlidingPDPSConfig,
+    pointsource_sliding_pdps_pair
+};
+use crate::forward_pdps::{
+    ForwardPDPSConfig,
+    pointsource_forward_pdps_pair
+};
 use crate::pdps::{
     PDPSConfig,
     pointsource_pdps_reg,
@@ -68,9 +98,9 @@
     FWConfig,
     FWVariant,
     pointsource_fw_reg,
-    WeightOptim,
+    //WeightOptim,
 };
-use crate::subproblem::InnerSettings;
+//use crate::subproblem::InnerSettings;
 use crate::seminorms::*;
 use crate::plot::*;
 use crate::{AlgorithmOverrides, CommandLineArgs};
@@ -82,9 +112,11 @@
 };
 use crate::dataterm::{
     L1,
-    L2Squared
+    L2Squared,
 };
-use alg_tools::norms::L2;
+use alg_tools::norms::{L2, NormExponent};
+use alg_tools::operator_arithmetic::Weighted;
+use anyhow::anyhow;
 
 /// Available algorithms and their configurations
 #[derive(Copy, Clone, Debug, Serialize, Deserialize)]
@@ -96,6 +128,8 @@
     RadonFB(RadonFBConfig<F>),
     RadonFISTA(RadonFBConfig<F>),
     SlidingFB(SlidingFBConfig<F>),
+    ForwardPDPS(ForwardPDPSConfig<F>),
+    SlidingPDPS(SlidingPDPSConfig<F>),
 }
 
 fn unpack_tolerance<F : Float>(v : &Vec<F>) -> Tolerance<F> {
@@ -119,6 +153,15 @@
                 .. g
             }
         };
+        let override_transport = |g : TransportConfig<F>| {
+            TransportConfig {
+                θ0 : cli.theta0.unwrap_or(g.θ0),
+                tolerance_ω: cli.transport_tolerance_omega.unwrap_or(g.tolerance_ω),
+                tolerance_dv: cli.transport_tolerance_dv.unwrap_or(g.tolerance_dv),
+                adaptation: cli.transport_adaptation.unwrap_or(g.adaptation),
+                .. g
+            }
+        };
 
         use AlgorithmConfig::*;
         match self {
@@ -156,17 +199,32 @@
             }),
             SlidingFB(sfb) => SlidingFB(SlidingFBConfig {
                 τ0 : cli.tau0.unwrap_or(sfb.τ0),
-                θ0 : cli.theta0.unwrap_or(sfb.θ0),
-                transport_tolerance_ω: cli.transport_tolerance_omega.unwrap_or(sfb.transport_tolerance_ω),
-                transport_tolerance_dv: cli.transport_tolerance_dv.unwrap_or(sfb.transport_tolerance_dv),
+                transport : override_transport(sfb.transport),
                 insertion : override_fb_generic(sfb.insertion),
                 .. sfb
             }),
+            SlidingPDPS(spdps) => SlidingPDPS(SlidingPDPSConfig {
+                τ0 : cli.tau0.unwrap_or(spdps.τ0),
+                σp0 : cli.sigmap0.unwrap_or(spdps.σp0),
+                σd0 : cli.sigma0.unwrap_or(spdps.σd0),
+                //acceleration : cli.acceleration.unwrap_or(pdps.acceleration),
+                transport : override_transport(spdps.transport),
+                insertion : override_fb_generic(spdps.insertion),
+                .. spdps
+            }),
+            ForwardPDPS(fpdps) => ForwardPDPS(ForwardPDPSConfig {
+                τ0 : cli.tau0.unwrap_or(fpdps.τ0),
+                σp0 : cli.sigmap0.unwrap_or(fpdps.σp0),
+                σd0 : cli.sigma0.unwrap_or(fpdps.σd0),
+                //acceleration : cli.acceleration.unwrap_or(pdps.acceleration),
+                insertion : override_fb_generic(fpdps.insertion),
+                .. fpdps
+            }),
         }
     }
 }
 
-/// Helper struct for tagging and [`AlgorithmConfig`] or [`Experiment`] with a name.
+/// Helper struct for tagging and [`AlgorithmConfig`] or [`ExperimentV2`] with a name.
 #[derive(Clone, Debug, Serialize, Deserialize)]
 pub struct Named<Data> {
     pub name : String,
@@ -198,9 +256,15 @@
     /// The RadonFISTA inertial forward-backward method
     #[clap(name = "radon_fista")]
     RadonFISTA,
-    /// The Sliding FB method
+    /// The sliding FB method
     #[clap(name = "sliding_fb", alias = "sfb")]
     SlidingFB,
+    /// The sliding PDPS method
+    #[clap(name = "sliding_pdps", alias = "spdps")]
+    SlidingPDPS,
+    /// The PDPS method with a forward step for the smooth function
+    #[clap(name = "forward_pdps", alias = "fpdps")]
+    ForwardPDPS,
 }
 
 impl DefaultAlgorithm {
@@ -219,6 +283,8 @@
             RadonFB => AlgorithmConfig::RadonFB(Default::default()),
             RadonFISTA => AlgorithmConfig::RadonFISTA(Default::default()),
             SlidingFB => AlgorithmConfig::SlidingFB(Default::default()),
+            SlidingPDPS => AlgorithmConfig::SlidingPDPS(Default::default()),
+            ForwardPDPS => AlgorithmConfig::ForwardPDPS(Default::default()),
         }
     }
 
@@ -278,7 +344,8 @@
     iter : usize,
     cpu_time : f64,
     value : F,
-    post_value : F,
+    relative_value : F,
+    //post_value : F,
     n_spikes : usize,
     inner_iters : usize,
     merged : usize,
@@ -355,7 +422,7 @@
     /// Kernel $ρ$ of $𝒟$.
     pub kernel : K,
     /// True point sources
-    pub μ_hat : DiscreteMeasure<Loc<F, N>, F>,
+    pub μ_hat : RNDM<F, N>,
     /// Regularisation term and parameter
     pub regularisation : Regularisation<F>,
     /// For plotting : how wide should the kernels be plotted
@@ -367,6 +434,24 @@
     pub algorithm_defaults : HashMap<DefaultAlgorithm, AlgorithmConfig<F>>,
 }
 
+#[derive(Debug, Clone, Serialize)]
+pub struct ExperimentBiased<F, NoiseDistr, S, K, P, B, const N : usize>
+where F : Float,
+      [usize; N] : Serialize,
+      NoiseDistr : Distribution<F>,
+      S : Sensor<F, N>,
+      P : Spread<F, N>,
+      K : SimpleConvolutionKernel<F, N>,
+      B : Mapping<Loc<F, N>, Codomain = F> + Serialize + std::fmt::Debug,
+{
+    /// Basic setup
+    pub base : ExperimentV2<F, NoiseDistr, S, K, P, N>,
+    /// Weight of TV term
+    pub λ : F,
+    /// Bias function
+    pub bias : B,
+}
+
 /// Trait for runnable experiments
 pub trait RunnableExperiment<F : ClapFloat> {
     /// Run all algorithms provided, or default algorithms if none provided, on the experiment.
@@ -374,51 +459,180 @@
               algs : Option<Vec<Named<AlgorithmConfig<F>>>>) -> DynError;
 
     /// Return algorithm default config
-    fn algorithm_defaults(&self, alg : DefaultAlgorithm, cli : &AlgorithmOverrides<F>)
-    -> Named<AlgorithmConfig<F>>;
+    fn algorithm_defaults(&self, alg : DefaultAlgorithm) -> Option<AlgorithmConfig<F>>;
+}
+
+/// Helper function to print experiment start message and save setup.
+/// Returns saving prefix.
+fn start_experiment<E, S>(
+    experiment : &Named<E>,
+    cli : &CommandLineArgs,
+    stats : S,
+) -> DynResult<String>
+where
+    E : Serialize + std::fmt::Debug,
+    S : Serialize,
+{
+    let Named { name : experiment_name, data } = experiment;
+
+    println!("{}\n{}",
+                format!("Performing experiment {}…", experiment_name).cyan(),
+                format!("{:?}", data).bright_black());
+
+    // Set up output directory
+    let prefix = format!("{}/{}/", cli.outdir, experiment_name);
+
+    // Save experiment configuration and statistics
+    let mkname_e = |t| format!("{prefix}{t}.json", prefix = prefix, t = t);
+    std::fs::create_dir_all(&prefix)?;
+    write_json(mkname_e("experiment"), experiment)?;
+    write_json(mkname_e("config"), cli)?;
+    write_json(mkname_e("stats"), &stats)?;
+
+    Ok(prefix)
+}
+
+/// Error codes for running an algorithm on an experiment.
+enum RunError {
+    /// Algorithm not implemented for this experiment
+    NotImplemented,
 }
 
-// *** macro boilerplate ***
-macro_rules! impl_experiment {
-($type:ident, $reg_field:ident, $reg_convert:path) => {
-// *** macro ***
+use RunError::*;
+
+type DoRunAllIt<'a, F, const N : usize> = LoggingIteratorFactory<
+    'a,
+    Timed<IterInfo<F, N>>,
+    TimingIteratorFactory<BasicAlgIteratorFactory<IterInfo<F, N>>>
+>;
+
+/// Helper function to run all algorithms on an experiment.
+fn do_runall<F : Float, Z, const N : usize>(
+    experiment_name : &String,
+    prefix : &String,
+    cli : &CommandLineArgs,
+    algorithms : Vec<Named<AlgorithmConfig<F>>>,
+    plotgrid : LinSpace<Loc<F, N>, [usize; N]>,
+    mut save_extra : impl FnMut(String, Z) -> DynError,
+    mut do_alg : impl FnMut(
+        &AlgorithmConfig<F>,
+        DoRunAllIt<F, N>,
+        SeqPlotter<F, N>,
+        String,
+    ) -> Result<(RNDM<F, N>, Z), RunError>,
+) ->  DynError
+where
+    PlotLookup : Plotting<N>,
+{
+    let mut logs = Vec::new();
+
+    let iterator_options = AlgIteratorOptions{
+            max_iter : cli.max_iter,
+            verbose_iter : cli.verbose_iter
+                                .map_or(Verbose::Logarithmic(10),
+                                        |n| Verbose::Every(n)),
+            quiet : cli.quiet,
+    };
+
+    // Run the algorithm(s)
+    for named @ Named { name : alg_name, data : alg } in algorithms.iter() {
+        let this_prefix = format!("{}{}/", prefix, alg_name);
+
+        // Create Logger and IteratorFactory
+        let mut logger = Logger::new();
+        let iterator = iterator_options.instantiate()
+                                        .timed()
+                                        .into_log(&mut logger);
+
+        let running = if !cli.quiet {
+            format!("{}\n{}\n{}\n",
+                    format!("Running {} on experiment {}…", alg_name, experiment_name).cyan(),
+                    format!("{:?}", iterator_options).bright_black(),
+                    format!("{:?}", alg).bright_black())
+        } else {
+            "".to_string()
+        };
+        //
+        // The following is for postprocessing, which has been disabled anyway.
+        //
+        // let reg : Box<dyn WeightOptim<_, _, _, N>> = match regularisation {
+        //     Regularisation::Radon(α) => Box::new(RadonRegTerm(α)),
+        //     Regularisation::NonnegRadon(α) => Box::new(NonnegRadonRegTerm(α)),
+        // };
+        //let findim_data = reg.prepare_optimise_weights(&opA, &b);
+        //let inner_config : InnerSettings<F> = Default::default();
+        //let inner_it = inner_config.iterator_options;
+
+        // Create plotter and directory if needed.
+        let plot_count = if cli.plot >= PlotLevel::Iter { 2000 } else { 0 };
+        let plotter = SeqPlotter::new(this_prefix, plot_count, plotgrid.clone());
+
+        let start = Instant::now();
+        let start_cpu = ProcessTime::now();
+
+        let (μ, z) = match do_alg(alg, iterator, plotter, running) {
+            Ok(μ) => μ,
+            Err(RunError::NotImplemented) => {
+                let msg = format!("Algorithm “{alg_name}” not implemented for {experiment_name}. \
+                                   Skipping.").red();
+                eprintln!("{}", msg);
+                continue
+            }
+        };
+
+        let elapsed = start.elapsed().as_secs_f64();
+        let cpu_time = start_cpu.elapsed().as_secs_f64();
+
+        println!("{}", format!("Elapsed {elapsed}s (CPU time {cpu_time}s)… ").yellow());
+
+        // Save results
+        println!("{}", "Saving results …".green());
+
+        let mkname = |t| format!("{prefix}{alg_name}_{t}");
+
+        write_json(mkname("config.json"), &named)?;
+        write_json(mkname("stats.json"), &AlgorithmStats { cpu_time, elapsed })?;
+        μ.write_csv(mkname("reco.txt"))?;
+        save_extra(mkname(""), z)?;
+        //logger.write_csv(mkname("log.txt"))?;
+        logs.push((mkname("log.txt"), logger));
+            }
+
+    save_logs(logs)
+}
+
+#[replace_float_literals(F::cast_from(literal))]
 impl<F, NoiseDistr, S, K, P, const N : usize> RunnableExperiment<F> for
-Named<$type<F, NoiseDistr, S, K, P, N>>
-where F : ClapFloat + nalgebra::RealField + ToNalgebraRealField<MixedType=F>,
-      [usize; N] : Serialize,
-      S : Sensor<F, N> + Copy + Serialize + std::fmt::Debug,
-      P : Spread<F, N> + Copy + Serialize + std::fmt::Debug,
-      Convolution<S, P>: Spread<F, N> + Bounded<F> + LocalAnalysis<F, Bounds<F>, N> + Copy
-                         // TODO: shold not have differentiability as a requirement, but
-                         // decide availability of sliding based on it.
-                         //+ for<'b> Differentiable<&'b Loc<F, N>, Output = Loc<F, N>>,
-                         // TODO: very weird that rust only compiles with Differentiable
-                         // instead of the above one on references, which is required by
-                         // poitsource_sliding_fb_reg.
-                         + DifferentiableRealMapping<F, N>
-                         + Lipschitz<L2, FloatType=F>,
-      // <DefaultSG<F, S, P, N> as ForwardModel<Loc<F, N>, F>::PreadjointCodomain : for<'b> Differentiable<&'b Loc<F, N>, Output = Loc<F, N>>,
-      AutoConvolution<P> : BoundedBy<F, K>,
-      K : SimpleConvolutionKernel<F, N>
-          + LocalAnalysis<F, Bounds<F>, N>
-          + Copy + Serialize + std::fmt::Debug,
-      Cube<F, N>: P2Minimise<Loc<F, N>, F> + SetOrd,
-      PlotLookup : Plotting<N>,
-      DefaultBT<F, N> : SensorGridBT<F, S, P, N, Depth=DynamicDepth> + BTSearch<F, N>,
-      BTNodeLookup: BTNode<F, usize, Bounds<F>, N>,
-      DiscreteMeasure<Loc<F, N>, F> : SpikeMerging<F>,
-      NoiseDistr : Distribution<F> + Serialize + std::fmt::Debug {
+Named<ExperimentV2<F, NoiseDistr, S, K, P, N>>
+where
+    F : ClapFloat + nalgebra::RealField + ToNalgebraRealField<MixedType=F>,
+    [usize; N] : Serialize,
+    S : Sensor<F, N> + Copy + Serialize + std::fmt::Debug,
+    P : Spread<F, N> + Copy + Serialize + std::fmt::Debug,
+    Convolution<S, P>: Spread<F, N> + Bounded<F> + LocalAnalysis<F, Bounds<F>, N> + Copy
+                        // TODO: shold not have differentiability as a requirement, but
+                        // decide availability of sliding based on it.
+                        //+ for<'b> Differentiable<&'b Loc<F, N>, Output = Loc<F, N>>,
+                        // TODO: very weird that rust only compiles with Differentiable
+                        // instead of the above one on references, which is required by
+                        // poitsource_sliding_fb_reg.
+                        + DifferentiableRealMapping<F, N>
+                        + Lipschitz<L2, FloatType=F>,
+    for<'b> <Convolution<S, P> as DifferentiableMapping<Loc<F,N>>>::Differential<'b> : Lipschitz<L2, FloatType=F>, // TODO: should not be required generally, only for sliding_fb.
+    AutoConvolution<P> : BoundedBy<F, K>,
+    K : SimpleConvolutionKernel<F, N>
+        + LocalAnalysis<F, Bounds<F>, N>
+        + Copy + Serialize + std::fmt::Debug,
+    Cube<F, N>: P2Minimise<Loc<F, N>, F> + SetOrd,
+    PlotLookup : Plotting<N>,
+    DefaultBT<F, N> : SensorGridBT<F, S, P, N, Depth=DynamicDepth> + BTSearch<F, N>,
+    BTNodeLookup: BTNode<F, usize, Bounds<F>, N>,
+    RNDM<F, N> : SpikeMerging<F>,
+    NoiseDistr : Distribution<F> + Serialize + std::fmt::Debug
+{
 
-    fn algorithm_defaults(&self, alg : DefaultAlgorithm, cli : &AlgorithmOverrides<F>)
-    -> Named<AlgorithmConfig<F>> {
-        alg.to_named(
-            self.data
-                .algorithm_defaults
-                .get(&alg)
-                .map_or_else(|| alg.default_config(),
-                            |config| config.clone())
-                .cli_override(cli)
-        )
+    fn algorithm_defaults(&self, alg : DefaultAlgorithm) -> Option<AlgorithmConfig<F>> {
+        self.data.algorithm_defaults.get(&alg).cloned()
     }
 
     fn runall(&self, cli : &CommandLineArgs,
@@ -426,30 +640,15 @@
         // Get experiment configuration
         let &Named {
             name : ref experiment_name,
-            data : $type {
+            data : ExperimentV2 {
                 domain, sensor_count, ref noise_distr, sensor, spread, kernel,
-                ref μ_hat, /*regularisation,*/ kernel_plot_width, dataterm, noise_seed,
+                ref μ_hat, regularisation, kernel_plot_width, dataterm, noise_seed,
                 ..
             }
         } = self;
-        let regularisation = $reg_convert(self.data.$reg_field);
-
-        println!("{}\n{}",
-                 format!("Performing experiment {}…", experiment_name).cyan(),
-                 format!("{:?}", &self.data).bright_black());
-
-        // Set up output directory
-        let prefix = format!("{}/{}/", cli.outdir, self.name);
 
         // Set up algorithms
-        let iterator_options = AlgIteratorOptions{
-                max_iter : cli.max_iter,
-                verbose_iter : cli.verbose_iter
-                                  .map_or(Verbose::Logarithmic(10),
-                                          |n| Verbose::Every(n)),
-                quiet : cli.quiet,
-        };
-        let algorithms = match (algs, self.data.dataterm) {
+        let algorithms = match (algs, dataterm) {
             (Some(algs), _) => algs,
             (None, DataTerm::L2Squared) => vec![DefaultAlgorithm::FB.get_named()],
             (None, DataTerm::L1) => vec![DefaultAlgorithm::PDPS.get_named()],
@@ -464,281 +663,404 @@
         let mut rng = StdRng::seed_from_u64(noise_seed);
 
         // Generate the data and calculate SSNR statistic
-        let b_hat = opA.apply(μ_hat);
+        let b_hat : DVector<_> = opA.apply(μ_hat);
         let noise = DVector::from_distribution(b_hat.len(), &noise_distr, &mut rng);
         let b = &b_hat + &noise;
         // Need to wrap calc_ssnr into a function to hide ultra-lame nalgebra::RealField
         // overloading log10 and conflicting with standard NumTraits one.
         let stats = ExperimentStats::new(&b, &noise);
 
-        // Save experiment configuration and statistics
-        let mkname_e = |t| format!("{prefix}{t}.json", prefix = prefix, t = t);
-        std::fs::create_dir_all(&prefix)?;
-        write_json(mkname_e("experiment"), self)?;
-        write_json(mkname_e("config"), cli)?;
-        write_json(mkname_e("stats"), &stats)?;
+        let prefix = start_experiment(&self, cli, stats)?;
 
         plotall(cli, &prefix, &domain, &sensor, &kernel, &spread,
                 &μ_hat, &op𝒟, &opA, &b_hat, &b, kernel_plot_width)?;
 
-        // Run the algorithm(s)
-        for named @ Named { name : alg_name, data : alg } in algorithms.iter() {
-            let this_prefix = format!("{}{}/", prefix, alg_name);
+        let plotgrid = lingrid(&domain, &[if N==1 { 1000 } else { 100 }; N]);
+
+        let save_extra = |_, ()| Ok(());
 
-            let running = || if !cli.quiet {
-                println!("{}\n{}\n{}",
-                        format!("Running {} on experiment {}…", alg_name, experiment_name).cyan(),
-                        format!("{:?}", iterator_options).bright_black(),
-                        format!("{:?}", alg).bright_black());
-            };
-            let not_implemented = || {
-                let msg = format!("Algorithm “{alg_name}” not implemented for \
-                                   dataterm {dataterm:?} and regularisation {regularisation:?}. \
-                                   Skipping.").red();
-                eprintln!("{}", msg);
-            };
-            // Create Logger and IteratorFactory
-            let mut logger = Logger::new();
-            let reg : Box<dyn WeightOptim<_, _, _, N>> = match regularisation {
-                Regularisation::Radon(α) => Box::new(RadonRegTerm(α)),
-                Regularisation::NonnegRadon(α) => Box::new(NonnegRadonRegTerm(α)),
-            };
-            let findim_data = reg.prepare_optimise_weights(&opA, &b);
-            let inner_config : InnerSettings<F> = Default::default();
-            let inner_it = inner_config.iterator_options;
-            let logmap = |iter, Timed { cpu_time, data }| {
-                let IterInfo {
-                    value,
-                    n_spikes,
-                    inner_iters,
-                    merged,
-                    pruned,
-                    postprocessing,
-                    this_iters,
-                    ..
-                } = data;
-                let post_value = match (postprocessing, dataterm) {
-                    (Some(mut μ), DataTerm::L2Squared) => {
-                        // Comparison postprocessing is only implemented for the case handled
-                        // by the FW variants.
-                        reg.optimise_weights(
-                            &mut μ, &opA, &b, &findim_data, &inner_config,
-                            inner_it
-                        );
-                        dataterm.value_at_residual(opA.apply(&μ) - &b)
-                            + regularisation.apply(&μ)
-                    },
-                    _ => value,
-                };
-                CSVLog {
-                    iter,
-                    value,
-                    post_value,
-                    n_spikes,
-                    cpu_time : cpu_time.as_secs_f64(),
-                    inner_iters,
-                    merged,
-                    pruned,
-                    this_iters
-                }
-            };
-            let iterator = iterator_options.instantiate()
-                                           .timed()
-                                           .mapped(logmap)
-                                           .into_log(&mut logger);
-            let plotgrid = lingrid(&domain, &[if N==1 { 1000 } else { 100 }; N]);
-
-            // Create plotter and directory if needed.
-            let plot_count = if cli.plot >= PlotLevel::Iter { 2000 } else { 0 };
-            let plotter = SeqPlotter::new(this_prefix, plot_count, plotgrid);
-
-            // Run the algorithm
-            let start = Instant::now();
-            let start_cpu = ProcessTime::now();
+        do_runall(experiment_name, &prefix, cli, algorithms, plotgrid, save_extra,
+            |alg, iterator, plotter, running|
+        {
             let μ = match alg {
                 AlgorithmConfig::FB(ref algconfig) => {
                     match (regularisation, dataterm) {
-                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => {
-                            running();
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_fb_reg(
                                 &opA, &b, NonnegRadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter
                             )
-                        },
-                        (Regularisation::Radon(α), DataTerm::L2Squared) => {
-                            running();
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_fb_reg(
                                 &opA, &b, RadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter
                             )
-                        },
-                        _ => {
-                            not_implemented();
-                            continue
-                        }
+                        }),
+                        _ => Err(NotImplemented)
                     }
                 },
                 AlgorithmConfig::FISTA(ref algconfig) => {
                     match (regularisation, dataterm) {
-                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => {
-                            running();
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_fista_reg(
                                 &opA, &b, NonnegRadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter
                             )
-                        },
-                        (Regularisation::Radon(α), DataTerm::L2Squared) => {
-                            running();
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_fista_reg(
                                 &opA, &b, RadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter
                             )
-                        },
-                        _ => {
-                            not_implemented();
-                            continue
-                        }
+                        }),
+                        _ => Err(NotImplemented),
                     }
                 },
                 AlgorithmConfig::RadonFB(ref algconfig) => {
                     match (regularisation, dataterm) {
-                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => {
-                            running();
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_radon_fb_reg(
                                 &opA, &b, NonnegRadonRegTerm(α), algconfig,
                                 iterator, plotter
                             )
-                        },
-                        (Regularisation::Radon(α), DataTerm::L2Squared) => {
-                            running();
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_radon_fb_reg(
                                 &opA, &b, RadonRegTerm(α), algconfig,
                                 iterator, plotter
                             )
-                        },
-                        _ => {
-                            not_implemented();
-                            continue
-                        }
+                        }),
+                        _ => Err(NotImplemented),
                     }
                 },
                 AlgorithmConfig::RadonFISTA(ref algconfig) => {
                     match (regularisation, dataterm) {
-                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => {
-                            running();
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_radon_fista_reg(
                                 &opA, &b, NonnegRadonRegTerm(α), algconfig,
                                 iterator, plotter
                             )
-                        },
-                        (Regularisation::Radon(α), DataTerm::L2Squared) => {
-                            running();
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_radon_fista_reg(
                                 &opA, &b, RadonRegTerm(α), algconfig,
                                 iterator, plotter
                             )
-                        },
-                        _ => {
-                            not_implemented();
-                            continue
-                        }
+                        }),
+                        _ => Err(NotImplemented),
                     }
                 },
                 AlgorithmConfig::SlidingFB(ref algconfig) => {
                     match (regularisation, dataterm) {
-                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => {
-                            running();
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_sliding_fb_reg(
                                 &opA, &b, NonnegRadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter
                             )
-                        },
-                        (Regularisation::Radon(α), DataTerm::L2Squared) => {
-                            running();
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_sliding_fb_reg(
                                 &opA, &b, RadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter
                             )
-                        },
-                        _ => {
-                            not_implemented();
-                            continue
-                        }
+                        }),
+                        _ => Err(NotImplemented),
                     }
                 },
                 AlgorithmConfig::PDPS(ref algconfig) => {
-                    running();
+                    print!("{running}");
                     match (regularisation, dataterm) {
-                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => {
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
                             pointsource_pdps_reg(
                                 &opA, &b, NonnegRadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter, L2Squared
                             )
-                        },
-                        (Regularisation::Radon(α),DataTerm::L2Squared) => {
+                        }),
+                        (Regularisation::Radon(α),DataTerm::L2Squared) => Ok({
                             pointsource_pdps_reg(
                                 &opA, &b, RadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter, L2Squared
                             )
-                        },
-                        (Regularisation::NonnegRadon(α), DataTerm::L1) => {
+                        }),
+                        (Regularisation::NonnegRadon(α), DataTerm::L1) => Ok({
                             pointsource_pdps_reg(
                                 &opA, &b, NonnegRadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter, L1
                             )
-                        },
-                        (Regularisation::Radon(α), DataTerm::L1) => {
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L1) => Ok({
                             pointsource_pdps_reg(
                                 &opA, &b, RadonRegTerm(α), &op𝒟, algconfig,
                                 iterator, plotter, L1
                             )
-                        },
+                        }),
                     }
                 },
                 AlgorithmConfig::FW(ref algconfig) => {
                     match (regularisation, dataterm) {
-                        (Regularisation::Radon(α), DataTerm::L2Squared) => {
-                            running();
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_fw_reg(&opA, &b, RadonRegTerm(α),
                                                algconfig, iterator, plotter)
-                        },
-                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => {
-                            running();
+                        }),
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
                             pointsource_fw_reg(&opA, &b, NonnegRadonRegTerm(α),
                                                algconfig, iterator, plotter)
-                        },
-                        _ => {
-                            not_implemented();
-                            continue
-                        }
+                        }),
+                        _ => Err(NotImplemented),
                     }
-                }
-            };
-
-            let elapsed = start.elapsed().as_secs_f64();
-            let cpu_time = start_cpu.elapsed().as_secs_f64();
-
-            println!("{}", format!("Elapsed {elapsed}s (CPU time {cpu_time}s)… ").yellow());
-
-            // Save results
-            println!("{}", "Saving results…".green());
-
-            let mkname = |t| format!("{prefix}{alg_name}_{t}");
-
-            write_json(mkname("config.json"), &named)?;
-            write_json(mkname("stats.json"), &AlgorithmStats { cpu_time, elapsed })?;
-            μ.write_csv(mkname("reco.txt"))?;
-            logger.write_csv(mkname("log.txt"))?;
-        }
-
-        Ok(())
+                },
+                _ => Err(NotImplemented),
+            }?;
+            Ok((μ, ()))
+        })
     }
 }
-// *** macro end boiler plate ***
-}}
-// *** actual code ***
+
+
+#[replace_float_literals(F::cast_from(literal))]
+impl<F, NoiseDistr, S, K, P, B, const N : usize> RunnableExperiment<F> for
+Named<ExperimentBiased<F, NoiseDistr, S, K, P, B, N>>
+where
+    F : ClapFloat + nalgebra::RealField + ToNalgebraRealField<MixedType=F>,
+    [usize; N] : Serialize,
+    S : Sensor<F, N> + Copy + Serialize + std::fmt::Debug,
+    P : Spread<F, N> + Copy + Serialize + std::fmt::Debug,
+    Convolution<S, P>: Spread<F, N> + Bounded<F> + LocalAnalysis<F, Bounds<F>, N> + Copy
+                        // TODO: shold not have differentiability as a requirement, but
+                        // decide availability of sliding based on it.
+                        //+ for<'b> Differentiable<&'b Loc<F, N>, Output = Loc<F, N>>,
+                        // TODO: very weird that rust only compiles with Differentiable
+                        // instead of the above one on references, which is required by
+                        // poitsource_sliding_fb_reg.
+                        + DifferentiableRealMapping<F, N>
+                        + Lipschitz<L2, FloatType=F>,
+    for<'b> <Convolution<S, P> as DifferentiableMapping<Loc<F,N>>>::Differential<'b> : Lipschitz<L2, FloatType=F>, // TODO: should not be required generally, only for sliding_fb.
+    AutoConvolution<P> : BoundedBy<F, K>,
+    K : SimpleConvolutionKernel<F, N>
+        + LocalAnalysis<F, Bounds<F>, N>
+        + Copy + Serialize + std::fmt::Debug,
+    Cube<F, N>: P2Minimise<Loc<F, N>, F> + SetOrd,
+    PlotLookup : Plotting<N>,
+    DefaultBT<F, N> : SensorGridBT<F, S, P, N, Depth=DynamicDepth> + BTSearch<F, N>,
+    BTNodeLookup: BTNode<F, usize, Bounds<F>, N>,
+    RNDM<F, N> : SpikeMerging<F>,
+    NoiseDistr : Distribution<F> + Serialize + std::fmt::Debug,
+    B : Mapping<Loc<F, N>, Codomain = F> + Serialize + std::fmt::Debug,
+{
+
+    fn algorithm_defaults(&self, alg : DefaultAlgorithm) -> Option<AlgorithmConfig<F>> {
+        self.data.base.algorithm_defaults.get(&alg).cloned()
+    }
+
+    fn runall(&self, cli : &CommandLineArgs,
+              algs : Option<Vec<Named<AlgorithmConfig<F>>>>) -> DynError {
+        // Get experiment configuration
+        let &Named {
+            name : ref experiment_name,
+            data : ExperimentBiased {
+                λ,
+                ref bias,
+                base : ExperimentV2 {
+                    domain, sensor_count, ref noise_distr, sensor, spread, kernel,
+                    ref μ_hat, regularisation, kernel_plot_width, dataterm, noise_seed,
+                    ..
+                }
+            }
+        } = self;
+
+        // Set up algorithms
+        let algorithms = match (algs, dataterm) {
+            (Some(algs), _) => algs,
+            _ => vec![DefaultAlgorithm::SlidingPDPS.get_named()],
+        };
+
+        // Set up operators
+        let depth = DynamicDepth(8);
+        let opA = DefaultSG::new(domain, sensor_count, sensor, spread, depth);
+        let op𝒟 = DefaultSeminormOp::new(depth, domain, kernel);
+        let opAext = RowOp(opA.clone(), IdOp::new());
+        let fnR = Zero::new();
+        let h = map3(domain.span_start(), domain.span_end(), sensor_count,
+                     |a, b, n| (b-a)/F::cast_from(n))
+                    .into_iter()
+                    .reduce(NumTraitsFloat::max)
+                    .unwrap();
+        let z = DVector::zeros(sensor_count.iter().product());
+        let opKz = Grad::new_for(&z, h, sensor_count, ForwardNeumann).unwrap();
+        let y = opKz.apply(&z);
+        let fnH = Weighted{ base_fn : L1.as_mapping(), weight : λ};  // TODO: L_{2,1}
+        // let zero_y = y.clone();
+        // let zeroBTFN = opA.preadjoint().apply(&zero_y);
+        // let opKμ = ZeroOp::new(&zero_y, zeroBTFN);
+
+        // Set up random number generator.
+        let mut rng = StdRng::seed_from_u64(noise_seed);
+
+        // Generate the data and calculate SSNR statistic
+        let bias_vec = DVector::from_vec(opA.grid()
+                                            .into_iter()
+                                            .map(|v| bias.apply(v))
+                                            .collect::<Vec<F>>());
+        let b_hat : DVector<_> = opA.apply(μ_hat) + &bias_vec;
+        let noise = DVector::from_distribution(b_hat.len(), &noise_distr, &mut rng);
+        let b = &b_hat + &noise;
+        // Need to wrap calc_ssnr into a function to hide ultra-lame nalgebra::RealField
+        // overloading log10 and conflicting with standard NumTraits one.
+        let stats = ExperimentStats::new(&b, &noise);
+
+        let prefix = start_experiment(&self, cli, stats)?;
+
+        plotall(cli, &prefix, &domain, &sensor, &kernel, &spread,
+                &μ_hat, &op𝒟, &opA, &b_hat, &b, kernel_plot_width)?;
+
+        opA.write_observable(&bias_vec, format!("{prefix}bias"))?;
+
+        let plotgrid = lingrid(&domain, &[if N==1 { 1000 } else { 100 }; N]);
+
+        let save_extra = |prefix, z| opA.write_observable(&z, format!("{prefix}z"));
 
-impl_experiment!(ExperimentV2, regularisation, std::convert::identity);
+        // Run the algorithms
+        do_runall(experiment_name, &prefix, cli, algorithms, plotgrid, save_extra,
+            |alg, iterator, plotter, running|
+        {
+            let Pair(μ, z) = match alg {
+                AlgorithmConfig::ForwardPDPS(ref algconfig) => {
+                    match (regularisation, dataterm) {
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
+                            pointsource_forward_pdps_pair(
+                                &opAext, &b, NonnegRadonRegTerm(α), &op𝒟, algconfig,
+                                iterator, plotter,
+                                /* opKμ, */ &opKz, &fnR, &fnH, z.clone(), y.clone(),
+                            )
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
+                            pointsource_forward_pdps_pair(
+                                &opAext, &b, RadonRegTerm(α), &op𝒟, algconfig,
+                                iterator, plotter,
+                                /* opKμ, */ &opKz, &fnR, &fnH, z.clone(), y.clone(),
+                            )
+                        }),
+                        _ => Err(NotImplemented)
+                    }
+                },
+                AlgorithmConfig::SlidingPDPS(ref algconfig) => {
+                    match (regularisation, dataterm) {
+                        (Regularisation::NonnegRadon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
+                            pointsource_sliding_pdps_pair(
+                                &opAext, &b, NonnegRadonRegTerm(α), &op𝒟, algconfig,
+                                iterator, plotter,
+                                /* opKμ, */ &opKz, &fnR, &fnH, z.clone(), y.clone(),
+                            )
+                        }),
+                        (Regularisation::Radon(α), DataTerm::L2Squared) => Ok({
+                            print!("{running}");
+                            pointsource_sliding_pdps_pair(
+                                &opAext, &b, RadonRegTerm(α), &op𝒟, algconfig,
+                                iterator, plotter,
+                                /* opKμ, */ &opKz, &fnR, &fnH, z.clone(), y.clone(),
+                            )
+                        }),
+                        _ => Err(NotImplemented)
+                    }
+                },
+                _ => Err(NotImplemented)
+            }?;
+            Ok((μ, z))
+        })
+    }
+}
+
+
+/// Calculative minimum and maximum values of all the `logs`, and save them into
+/// corresponding file names given as the first elements of the tuples in the vectors.
+fn save_logs<F : Float, const N : usize>(
+    logs : Vec<(String, Logger<Timed<IterInfo<F, N>>>)>
+) -> DynError {
+    // Process logs for relative values
+    println!("{}", "Processing logs…");
+
+
+    // Find minimum value and initial value within a single log
+    let proc_single_log = |log : &Logger<Timed<IterInfo<F, N>>>| {
+        let d = log.data();
+        let mi = d.iter()
+                  .map(|i| i.data.value)
+                  .reduce(NumTraitsFloat::min);
+        d.first()
+            .map(|i| i.data.value)
+            .zip(mi)
+    };
+
+    // Find minimum and maximum value over all logs
+    let (v_ini, v_min) = logs.iter()
+                             .filter_map(|&(_, ref log)| proc_single_log(log))
+                             .reduce(|(i1, m1), (i2, m2)| (i1.max(i2), m1.min(m2)))
+                             .ok_or(anyhow!("No algorithms found"))?;
+
+    let logmap = |Timed { cpu_time, iter, data }| {
+        let IterInfo {
+            value,
+            n_spikes,
+            inner_iters,
+            merged,
+            pruned,
+            //postprocessing,
+            this_iters,
+            ..
+        } = data;
+        // let post_value = match (postprocessing, dataterm) {
+        //     (Some(mut μ), DataTerm::L2Squared) => {
+        //         // Comparison postprocessing is only implemented for the case handled
+        //         // by the FW variants.
+        //         reg.optimise_weights(
+        //             &mut μ, &opA, &b, &findim_data, &inner_config,
+        //             inner_it
+        //         );
+        //         dataterm.value_at_residual(opA.apply(&μ) - &b)
+        //             + regularisation.apply(&μ)
+        //     },
+        //     _ => value,
+        // };
+        let relative_value = (value - v_min)/(v_ini - v_min);
+        CSVLog {
+            iter,
+            value,
+            relative_value,
+            //post_value,
+            n_spikes,
+            cpu_time : cpu_time.as_secs_f64(),
+            inner_iters,
+            merged,
+            pruned,
+            this_iters
+        }
+    };
+
+    println!("{}", "Saving logs …".green());
+
+    for (name, logger) in logs {
+        logger.map(logmap).write_csv(name)?;
+    }
+
+    Ok(())
+}
+
 
 /// Plot experiment setup
 #[replace_float_literals(F::cast_from(literal))]
@@ -749,7 +1071,7 @@
     sensor : &Sensor,
     kernel : &Kernel,
     spread : &Spread,
-    μ_hat : &DiscreteMeasure<Loc<F, N>, F>,
+    μ_hat : &RNDM<F, N>,
     op𝒟 : &𝒟,
     opA : &A,
     b_hat : &A::Observable,
@@ -761,10 +1083,10 @@
       Spread : RealMapping<F, N> + Support<F, N> + Clone,
       Kernel : RealMapping<F, N> + Support<F, N>,
       Convolution<Sensor, Spread> : DifferentiableRealMapping<F, N> + Support<F, N>,
-      //Differential<Loc<F, N>, Convolution<Sensor, Spread>> : RealVectorField<F, N, N>,
       𝒟 : DiscreteMeasureOp<Loc<F, N>, F>,
       𝒟::Codomain : RealMapping<F, N>,
-      A : ForwardModel<Loc<F, N>, F>,
+      A : ForwardModel<RNDM<F, N>, F>,
+      for<'a> &'a A::Observable : Instance<A::Observable>,
       A::PreadjointCodomain : DifferentiableRealMapping<F, N> + Bounded<F>,
       PlotLookup : Plotting<N>,
       Cube<F, N> : SetOrd {
@@ -776,38 +1098,35 @@
     let base = Convolution(sensor.clone(), spread.clone());
 
     let resolution = if N==1 { 100 } else { 40 };
-    let pfx = |n| format!("{}{}", prefix, n);
+    let pfx = |n| format!("{prefix}{n}");
     let plotgrid = lingrid(&[[-kernel_plot_width, kernel_plot_width]; N].into(), &[resolution; N]);
 
-    PlotLookup::plot_into_file(sensor, plotgrid, pfx("sensor"), "sensor".to_string());
-    PlotLookup::plot_into_file(kernel, plotgrid, pfx("kernel"), "kernel".to_string());
-    PlotLookup::plot_into_file(spread, plotgrid, pfx("spread"), "spread".to_string());
-    PlotLookup::plot_into_file_diff(&base, plotgrid, pfx("base_sensor"), "base_sensor".to_string());
+    PlotLookup::plot_into_file(sensor, plotgrid, pfx("sensor"));
+    PlotLookup::plot_into_file(kernel, plotgrid, pfx("kernel"));
+    PlotLookup::plot_into_file(spread, plotgrid, pfx("spread"));
+    PlotLookup::plot_into_file(&base, plotgrid, pfx("base_sensor"));
 
     let plotgrid2 = lingrid(&domain, &[resolution; N]);
 
     let ω_hat = op𝒟.apply(μ_hat);
     let noise =  opA.preadjoint().apply(opA.apply(μ_hat) - b);
-    PlotLookup::plot_into_file(&ω_hat, plotgrid2, pfx("omega_hat"), "ω̂".to_string());
-    PlotLookup::plot_into_file(&noise, plotgrid2, pfx("omega_noise"),
-                               "noise Aᵀ(Aμ̂ - b)".to_string());
+    PlotLookup::plot_into_file(&ω_hat, plotgrid2, pfx("omega_hat"));
+    PlotLookup::plot_into_file(&noise, plotgrid2, pfx("omega_noise"));
 
     let preadj_b =  opA.preadjoint().apply(b);
     let preadj_b_hat =  opA.preadjoint().apply(b_hat);
     //let bounds = preadj_b.bounds().common(&preadj_b_hat.bounds());
     PlotLookup::plot_into_file_spikes(
-        "Aᵀb".to_string(), &preadj_b,
-        "Aᵀb̂".to_string(), Some(&preadj_b_hat),
-        plotgrid2, None, &μ_hat,
+        Some(&preadj_b),
+        Some(&preadj_b_hat),
+        plotgrid2,
+        &μ_hat,
         pfx("omega_b")
     );
-    PlotLookup::plot_into_file_diff(&preadj_b, plotgrid2, pfx("preadj_b"),
-                                    "preadj_b".to_string());
-    PlotLookup::plot_into_file_diff(&preadj_b_hat, plotgrid2, pfx("preadj_b_hat"),
-                                    "preadj_b_hat".to_string());
+    PlotLookup::plot_into_file(&preadj_b, plotgrid2, pfx("preadj_b"));
+    PlotLookup::plot_into_file(&preadj_b_hat, plotgrid2, pfx("preadj_b_hat"));
 
     // Save true solution and observables
-    let pfx = |n| format!("{}{}", prefix, n);
     μ_hat.write_csv(pfx("orig.txt"))?;
     opA.write_observable(&b_hat, pfx("b_hat"))?;
     opA.write_observable(&b, pfx("b_noisy"))

mercurial