#![allow(non_snake_case)] //! Provide FatEstimator trait to allow for testing of many estimators with disparate requirements. //! //! This defines predict and observe operations using linearised models to be tested. //! //! Where necessary 'fat' estimator states are defined so that different operations of an estimator can be tested using //! the FatEstimator trait. //! //! Implementation are provides for all the linearised estimators. //! The [`sir`] sampled estimator is implemented, so it can be tested using linearised models. use std::iter::StepBy; use nalgebra::{allocator::Allocator, DefaultAllocator, storage::Storage}; use nalgebra::{Dim, DimAdd, DimMin, DimMinimum, DimSum, U1}; use nalgebra::{Matrix, Scalar}; use nalgebra::{OMatrix, OVector, RealField}; use nalgebra::iter::MatrixIter; use num_traits::FromPrimitive; use rand::RngCore; use bayes_estimate::estimators::information_root::InformationRootState; use bayes_estimate::estimators::sir; use bayes_estimate::estimators::sir::{SampleState, SampleStateDraw}; use bayes_estimate::estimators::ud::{CorrelatedFactorNoise, UDState}; use bayes_estimate::estimators::unscented_duplex::UnscentedDuplexState; use bayes_estimate::models::{ Estimator, ExtendedLinearObserver, ExtendedLinearPredictor, FunctionalObserver, FunctionalPredictor, InformationState, KalmanEstimator, KalmanState, }; use bayes_estimate::noise::{CorrelatedNoise, CoupledNoise, UncorrelatedNoise}; /// Define the estimator operations to be tested. pub trait FatEstimator: Estimator + KalmanEstimator where DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator, { fn dim(&self) -> D { return Estimator::state(self).unwrap().shape_generic().0; } fn allow_error_by(&self) -> f64 { 1f64 } fn trace_state(&self) {} // Initialize with KalmanState fn init(&mut self, state: &KalmanState) -> Result<(), &str>; /// Prediction with additive noise fn predict_fn( &mut self, x_pred: &OVector, f: fn(&OVector) -> OVector, F: &OMatrix, noise: &CoupledNoise, ) where DefaultAllocator: Allocator + Allocator; /// Observation with correlated noise fn observe_any( &mut self, z: &OVector, h: fn(&OVector) -> OVector, h_normalize: fn(h: &mut OVector, h0: &OVector), H: &OMatrix, noise: &CorrelatedNoise, ) -> Result<(), &str>; } /// Test covariance estimator operations defined on a KalmanState. impl FatEstimator for KalmanState where DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator, { fn init(&mut self, state: &KalmanState) -> Result<(), &str> { self.x = state.x.clone(); self.X = state.X.clone(); Ok(()) } fn predict_fn( &mut self, x_pred: &OVector, _f: fn(&OVector) -> OVector, F: &OMatrix, noise: &CoupledNoise, ) { ExtendedLinearPredictor::::predict( self, x_pred, F, &CorrelatedNoise::from_coupled::(noise), ) .unwrap(); } fn observe_any( &mut self, z: &OVector, h: fn(&OVector) -> OVector, h_normalize: fn(h: &mut OVector, h0: &OVector), H: &OMatrix, noise: &CorrelatedNoise, ) -> Result<(), &str> { let mut hx = h(&self.x); h_normalize(&mut hx, &z); let s = z - &hx; ExtendedLinearObserver::observe_innovation(self, &s, H, noise) // TODO tests application of observations sequentially // for o in 0..(s.nrows()) { // ExtendedLinearObserver::observe_innovation(self, &Vector1::new(s[o]), &H.row(o).into_owned(), &CorrelatedNoise{Q: Matrix1::new(noise.Q[(o, o)])}).unwrap(); // } // Ok(()) } } /// Test information estimator operations defined on a InformationState. impl FatEstimator for InformationState where DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator, // observe_innovation DefaultAllocator: Allocator, { fn dim(&self) -> D { return self.i.shape_generic().0; } fn init(&mut self, state: &KalmanState) -> Result<(), &str> { let info = InformationState::try_from(state.clone()).unwrap(); self.i = info.i; self.I = info.I; Ok(()) } fn predict_fn( &mut self, x_pred: &OVector, _f: fn(&OVector) -> OVector, F: &OMatrix, noise: &CoupledNoise, ) { ExtendedLinearPredictor::::predict( self, x_pred, F, &CorrelatedNoise::from_coupled::(noise), ) .unwrap(); } fn observe_any( &mut self, z: &OVector, h: fn(&OVector) -> OVector, h_normalize: fn(h: &mut OVector, h0: &OVector), H: &OMatrix, noise: &CorrelatedNoise, ) -> Result<(), &str> { let mut hx = h(&self.state().unwrap()); h_normalize(&mut hx, &z); let s = z - hx; ExtendedLinearObserver::observe_innovation(self, &s, H, noise)?; Ok(()) } } /// Test information estimator operations defined on a InformationRootState. impl FatEstimator for InformationRootState where // display DefaultAllocator: Allocator, // InformationRootState DefaultAllocator: Allocator + Allocator, // FatEstimator DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator, // predict D: DimAdd, DefaultAllocator: Allocator, DimSum> + Allocator> + Allocator + Allocator, DimSum: DimMin>, DefaultAllocator: Allocator, DimSum>> + Allocator, DimSum>, DimSum>, // observe DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator, D: DimAdd + DimAdd, DefaultAllocator: Allocator, DimSum> + Allocator>, DimSum: DimMin>, DefaultAllocator: Allocator, DimSum>> + Allocator, DimSum>, DimSum>, { fn dim(&self) -> D { return self.r.shape_generic().0; } fn trace_state(&self) { println!("{}", self.R); } fn init(&mut self, state: &KalmanState) -> Result<(), &str> { let info = InformationRootState::try_from(state.clone()).unwrap(); self.r = info.r; self.R = info.R; Ok(()) } fn predict_fn( &mut self, x_pred: &OVector, _f: fn(&OVector) -> OVector, F: &OMatrix, noise: &CoupledNoise, ) { InformationRootState::predict::(self, x_pred, F, &noise).unwrap(); } fn observe_any( &mut self, z: &OVector, h: fn(&OVector) -> OVector, h_normalize: fn(h: &mut OVector, h0: &OVector), H: &OMatrix, noise: &CorrelatedNoise, ) -> Result<(), &str> where DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator, { let mut hx = h(&self.state().unwrap()); h_normalize(&mut hx, &z); let s = z - hx; ExtendedLinearObserver::observe_innovation(self, &s, H, noise)?; Ok(()) } } /// Test UD estimator operations defined on a FatUDState. pub struct FatUDState where DefaultAllocator: Allocator + Allocator, { pub ud: UDState, pub obs_uncorrelated: bool, } impl Estimator for FatUDState where DefaultAllocator: Allocator + Allocator, { fn state(&self) -> Result, &str> { self.ud.state() } } impl KalmanEstimator for FatUDState where DefaultAllocator: Allocator + Allocator, { fn kalman_state(&self) -> Result, &str> { self.ud.kalman_state() } } impl, QD: Dim, ZD: Dim> FatEstimator for FatUDState where DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator>, D: DimAdd, DefaultAllocator: Allocator, U1>, { fn trace_state(&self) { println!("{}", self.ud.UD); } fn init(&mut self, state: &KalmanState) -> Result<(), &str> { let ud_state = UDState::try_from(state.clone()).unwrap(); self.ud = ud_state; Ok(()) } fn predict_fn( &mut self, x_pred: &OVector, _f: fn(&OVector) -> OVector, F: &OMatrix, noise: &CoupledNoise, ) { self.ud.predict::(F, x_pred, noise).unwrap(); } fn observe_any( &mut self, z: &OVector, h: fn(&OVector) -> OVector, h_normalize: fn(h: &mut OVector, h0: &OVector), H: &OMatrix, noise: &CorrelatedNoise, ) -> Result<(), &str> { if self.obs_uncorrelated { assert_eq!(noise.Q.upper_triangle() - Matrix::from_diagonal(&noise.Q.diagonal()), Matrix::zeros_generic(noise.Q.shape_generic().0, noise.Q.shape_generic().1)); let mut hx = h(&self.state().unwrap()); h_normalize(&mut hx, &z); let s = z - &hx; let uncorrelated_noise = UncorrelatedNoise:: { q: noise.Q.diagonal(), }; self.ud .observe_innovation::(&s, H, &uncorrelated_noise) .map(|_rcond| {}) } else { let noise_fac = CorrelatedFactorNoise::from_correlated(&noise)?; self.ud .observe_linear_correlated::(z, H, h_normalize, &noise_fac) .map(|_rcond| {}) } } } /// Test Unscented estimator operations defined on a UnscentedDuplexState. impl FatEstimator for UnscentedDuplexState where DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator, // predict_unscented DefaultAllocator: Allocator, // observe_unscented DefaultAllocator: Allocator, DefaultAllocator: Allocator, // DefaultAllocator: Allocator + Allocator, DefaultAllocator: Allocator, DefaultAllocator: Allocator, { fn trace_state(&self) { println!("{:}", self.kalman.X); } fn init(&mut self, state: &KalmanState) -> Result<(), &str> { let duplex_state = UnscentedDuplexState::try_from(state.clone()).unwrap(); self.kalman = duplex_state.kalman; Ok(()) } fn predict_fn( &mut self, _x_pred: &OVector, f: fn(&OVector) -> OVector, _F: &OMatrix, noise: &CoupledNoise, ) { self.predict(f, &CorrelatedNoise::from_coupled::(noise)).unwrap(); } fn observe_any( &mut self, z: &OVector, h: fn(&OVector) -> OVector, h_normalize: fn(h: &mut OVector, h0: &OVector), _H: &OMatrix, noise: &CorrelatedNoise, ) -> Result<(), &str> { let hn = |x: &OVector| -> OVector { let mut zp = h(x); h_normalize(&mut zp, z); zp }; self.observe(&z, hn, noise) } } /// Test SIR estimator operations defined on a FatSampleState. pub struct FatSampleState<'a, N: RealField, D: Dim> where DefaultAllocator: Allocator + Allocator, { pub sample: SampleStateDraw<'a, N, D>, pub systematic_resampler: bool, pub kalman_roughening: bool, } impl<'a, N: FromPrimitive + RealField, D: Dim> Estimator for FatSampleState<'a, N, D> where DefaultAllocator: Allocator + Allocator, { fn state(&self) -> Result, &str> { self.sample.state.state() } } impl<'a, N: Copy + FromPrimitive + RealField, D: Dim> KalmanEstimator for FatSampleState<'a, N, D> where DefaultAllocator: Allocator + Allocator + Allocator, { fn kalman_state(&self) -> Result, &str> { self.sample.state.kalman_state() } } impl<'a, D: Dim, QD: Dim, ZD: Dim> FatEstimator for FatSampleState<'a, f64, D> where DefaultAllocator: Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator + Allocator, // sample DefaultAllocator: Allocator, { fn dim(&self) -> D { return self.sample.state.s[0].shape_generic().0; } fn allow_error_by(&self) -> f64 { 3000000. / (self.sample.state.s.len() as f64).sqrt() // sample error scales with sqrt number samples } fn trace_state(&self) { // println!("{:?}\n{:?}", self.w, self.s); } fn init(&mut self, state: &KalmanState) -> Result<(), &str> { let sample_state = SampleState::from_kalman((*state).clone(), self.sample.state.s.len(), self.sample.rng).unwrap(); self.sample.state = sample_state; Ok(()) } fn predict_fn( &mut self, _x_pred: &OVector, f: fn(&OVector) -> OVector, _F: &OMatrix, noise: &CoupledNoise, ) { // Predict amd sample the noise let cn = &noise.G * Matrix::from_diagonal(&noise.q.map(|v| v.sqrt())); let sampler = sir::normal_noise_sampler_coupled(cn); self.sample.predict_sampled( move |x: &OVector, rng: &mut dyn RngCore| -> OVector { f(&x) + sampler(rng) }, ); } fn observe_any( &mut self, z: &OVector, h: fn(&OVector) -> OVector, _h_normalize: fn(h: &mut OVector, h0: &OVector), _H: &OMatrix, noise: &CorrelatedNoise, ) -> Result<(), &str> { self.sample.state.observe(sir::gaussian_observation_likelihood(z, h, noise)); let mut resampler = if self.systematic_resampler { |w: &mut sir::Likelihoods, rng: &mut dyn RngCore| sir::standard_resampler(w, rng) } else { |w: &mut sir::Likelihoods, rng: &mut dyn RngCore| sir::systematic_resampler(w, rng) }; if self.kalman_roughening { let noise = CoupledNoise::from_correlated(&CorrelatedNoise { Q: self.kalman_state().unwrap().X, }) .unwrap(); let mut roughener = move |s: &mut sir::Samples, rng: &mut dyn RngCore| { sir::roughen_noise(s, &noise, 1., rng) }; self.sample .update_resample(&mut resampler, &mut roughener)?; } else { let mut roughener = |s: &mut sir::Samples, rng: &mut dyn RngCore| { sir::roughen_minmax(s, 1., rng) }; self.sample .update_resample(&mut resampler, &mut roughener)?; }; Ok(()) } } trait Diagonal> { fn diagonal_iter(&self) -> StepBy>; } impl> Diagonal for Matrix { fn diagonal_iter(&self) -> StepBy> { self.iter().step_by(self.nrows() + 1) } }