Skip to main content

fugue_evo/fugue_integration/
evolution_model.rs

1//! Integration with Fugue's `Model<T>` monad
2//!
3//! This module provides probabilistic evolutionary models that integrate
4//! with Fugue's inference engine (SMC, MCMC).
5//!
6//! # Key Concepts
7//!
8//! - **Evolution as Inference**: Treat evolution as posterior sampling
9//! - **Fitness as Likelihood**: Selection pressure = Bayesian conditioning
10//! - **Operators as Kernels**: Mutation/crossover define proposal distributions
11
12use fugue::{addr, ChoiceValue, Trace};
13use rand::Rng;
14use std::marker::PhantomData;
15
16use crate::fitness::traits::Fitness;
17use crate::genome::bounds::MultiBounds;
18use crate::genome::traits::EvolutionaryGenome;
19
20/// A probabilistic model of an evolutionary step
21///
22/// This wraps an evolutionary genome in Fugue's Model monad,
23/// enabling integration with SMC and MCMC inference.
24#[derive(Clone)]
25pub struct EvolutionModel<G, F>
26where
27    G: EvolutionaryGenome,
28    F: Fitness<Genome = G, Value = f64>,
29{
30    /// The fitness function used for conditioning
31    pub fitness: F,
32    /// Temperature for fitness-based weighting
33    temperature: f64,
34    /// Bounds for generating new genomes
35    bounds: MultiBounds,
36    /// Phantom data for genome type
37    _marker: PhantomData<G>,
38}
39
40impl<G, F> EvolutionModel<G, F>
41where
42    G: EvolutionaryGenome,
43    F: Fitness<Genome = G, Value = f64>,
44{
45    /// Create a new evolution model
46    pub fn new(fitness: F, bounds: MultiBounds) -> Self {
47        Self {
48            fitness,
49            temperature: 1.0,
50            bounds,
51            _marker: PhantomData,
52        }
53    }
54
55    /// Set the temperature for fitness weighting
56    ///
57    /// Lower temperature = stronger selection pressure
58    pub fn with_temperature(mut self, temperature: f64) -> Self {
59        self.temperature = temperature;
60        self
61    }
62
63    /// Sample a genome from the prior (uniform within bounds)
64    pub fn sample_prior<R: Rng>(&self, rng: &mut R) -> G {
65        G::generate(rng, &self.bounds)
66    }
67
68    /// Compute the log-weight for a genome based on fitness
69    ///
70    /// w(x) = exp(f(x) / T) where T is temperature
71    pub fn log_weight(&self, genome: &G) -> f64 {
72        let fitness = self.fitness.evaluate(genome);
73        fitness / self.temperature
74    }
75
76    /// Create a Fugue trace representing this genome with fitness weight
77    pub fn to_weighted_trace(&self, genome: &G) -> Trace {
78        let mut trace = genome.to_trace();
79        let log_weight = self.log_weight(genome);
80
81        // Add the fitness as a special address in the trace
82        trace.insert_choice(
83            addr!("__fitness__"),
84            ChoiceValue::F64(log_weight),
85            log_weight, // Use fitness as log probability
86        );
87
88        trace
89    }
90}
91
92/// Configuration for the evolutionary Markov chain
93#[derive(Clone, Debug)]
94pub struct EvolutionChainConfig {
95    /// Mutation probability per address
96    pub mutation_rate: f64,
97    /// Standard deviation for Gaussian mutations
98    pub mutation_sigma: f64,
99    /// Temperature for fitness weighting
100    pub temperature: f64,
101    /// Number of generations
102    pub generations: usize,
103}
104
105impl Default for EvolutionChainConfig {
106    fn default() -> Self {
107        Self {
108            mutation_rate: 0.1,
109            mutation_sigma: 0.1,
110            temperature: 1.0,
111            generations: 100,
112        }
113    }
114}
115
116impl EvolutionChainConfig {
117    /// Create a new configuration
118    pub fn new() -> Self {
119        Self::default()
120    }
121
122    /// Set the mutation rate
123    pub fn mutation_rate(mut self, rate: f64) -> Self {
124        self.mutation_rate = rate;
125        self
126    }
127
128    /// Set the mutation sigma
129    pub fn mutation_sigma(mut self, sigma: f64) -> Self {
130        self.mutation_sigma = sigma;
131        self
132    }
133
134    /// Set the temperature
135    pub fn temperature(mut self, temp: f64) -> Self {
136        self.temperature = temp;
137        self
138    }
139
140    /// Set the number of generations
141    pub fn generations(mut self, gens: usize) -> Self {
142        self.generations = gens;
143        self
144    }
145}
146
147/// A single step in the evolutionary Markov chain
148///
149/// This represents the transition kernel for MCMC-style evolution.
150pub struct EvolutionStep<G, F>
151where
152    G: EvolutionaryGenome,
153    F: Fitness<Genome = G, Value = f64>,
154{
155    model: EvolutionModel<G, F>,
156    config: EvolutionChainConfig,
157}
158
159impl<G, F> EvolutionStep<G, F>
160where
161    G: EvolutionaryGenome,
162    F: Fitness<Genome = G, Value = f64>,
163{
164    /// Create a new evolution step
165    pub fn new(model: EvolutionModel<G, F>, config: EvolutionChainConfig) -> Self {
166        Self { model, config }
167    }
168
169    /// Propose a new genome via mutation
170    pub fn propose<R: Rng>(&self, current: &G, rng: &mut R) -> G {
171        let trace = current.to_trace();
172        let mut new_trace = Trace::default();
173
174        for (addr, choice) in &trace.choices {
175            let new_value = if rng.gen::<f64>() < self.config.mutation_rate {
176                self.mutate_value(&choice.value, rng)
177            } else {
178                choice.value.clone()
179            };
180            new_trace.insert_choice(addr.clone(), new_value, 0.0);
181        }
182
183        G::from_trace(&new_trace).unwrap_or_else(|_| current.clone())
184    }
185
186    /// Mutate a single value
187    fn mutate_value<R: Rng>(&self, value: &ChoiceValue, rng: &mut R) -> ChoiceValue {
188        match value {
189            ChoiceValue::F64(v) => {
190                let noise = (rng.gen::<f64>() * 2.0 - 1.0) * self.config.mutation_sigma;
191                ChoiceValue::F64(v + noise)
192            }
193            ChoiceValue::Bool(b) => ChoiceValue::Bool(!b),
194            ChoiceValue::Usize(n) => {
195                // Random walk on integers
196                let delta: i32 = if rng.gen::<bool>() { 1 } else { -1 };
197                ChoiceValue::Usize((*n as i32 + delta).max(0) as usize)
198            }
199            other => other.clone(),
200        }
201    }
202
203    /// Compute acceptance probability (Metropolis-Hastings)
204    ///
205    /// α(x, x') = min(1, exp(f(x') - f(x)) / T)
206    pub fn acceptance_probability(&self, current: &G, proposed: &G) -> f64 {
207        let current_fitness = self.model.log_weight(current);
208        let proposed_fitness = self.model.log_weight(proposed);
209
210        let log_ratio = proposed_fitness - current_fitness;
211        (log_ratio.exp()).min(1.0)
212    }
213
214    /// Perform one MCMC step
215    pub fn step<R: Rng>(&self, current: &G, rng: &mut R) -> G {
216        let proposed = self.propose(current, rng);
217        let acceptance = self.acceptance_probability(current, &proposed);
218
219        if rng.gen::<f64>() < acceptance {
220            proposed
221        } else {
222            current.clone()
223        }
224    }
225
226    /// Run the chain for multiple steps
227    pub fn run_chain<R: Rng>(&self, initial: &G, num_steps: usize, rng: &mut R) -> Vec<G> {
228        let mut chain = Vec::with_capacity(num_steps + 1);
229        let mut current = initial.clone();
230
231        chain.push(current.clone());
232
233        for _ in 0..num_steps {
234            current = self.step(&current, rng);
235            chain.push(current.clone());
236        }
237
238        chain
239    }
240}
241
242/// Particle representation for Sequential Monte Carlo
243#[derive(Clone)]
244pub struct Particle<G>
245where
246    G: EvolutionaryGenome,
247{
248    /// The genome state
249    pub genome: G,
250    /// Log weight of this particle
251    pub log_weight: f64,
252    /// Normalized weight
253    pub normalized_weight: f64,
254}
255
256impl<G: EvolutionaryGenome> Particle<G> {
257    /// Create a new particle
258    pub fn new(genome: G, log_weight: f64) -> Self {
259        Self {
260            genome,
261            log_weight,
262            normalized_weight: 0.0,
263        }
264    }
265}
266
267/// Sequential Monte Carlo for evolutionary inference
268///
269/// SMC treats evolution as a sequence of distributions
270/// that progressively concentrate on high-fitness solutions.
271pub struct EvolutionarySMC<G, F>
272where
273    G: EvolutionaryGenome,
274    F: Fitness<Genome = G, Value = f64>,
275{
276    model: EvolutionModel<G, F>,
277    /// Number of particles
278    num_particles: usize,
279    /// Current temperature schedule position
280    temperature_schedule: Vec<f64>,
281}
282
283impl<G, F> EvolutionarySMC<G, F>
284where
285    G: EvolutionaryGenome + Clone,
286    F: Fitness<Genome = G, Value = f64> + Clone,
287{
288    /// Create a new SMC sampler
289    pub fn new(fitness: F, bounds: MultiBounds, num_particles: usize) -> Self {
290        Self {
291            model: EvolutionModel::new(fitness, bounds),
292            num_particles,
293            temperature_schedule: Self::default_schedule(10),
294        }
295    }
296
297    /// Create a default annealing schedule
298    fn default_schedule(steps: usize) -> Vec<f64> {
299        (0..=steps)
300            .map(|i| {
301                let t = i as f64 / steps as f64;
302                // Geometric annealing from infinity to 1
303                (1.0 - t).powi(2) * 100.0 + 1.0
304            })
305            .collect()
306    }
307
308    /// Set a custom temperature schedule
309    pub fn with_schedule(mut self, schedule: Vec<f64>) -> Self {
310        self.temperature_schedule = schedule;
311        self
312    }
313
314    /// Initialize particles from prior
315    pub fn initialize<R: Rng>(&self, rng: &mut R) -> Vec<Particle<G>> {
316        (0..self.num_particles)
317            .map(|_| {
318                let genome = self.model.sample_prior(rng);
319                Particle::new(genome, 0.0)
320            })
321            .collect()
322    }
323
324    /// Normalize particle weights
325    fn normalize_weights(particles: &mut [Particle<G>]) {
326        let max_log_weight = particles
327            .iter()
328            .map(|p| p.log_weight)
329            .fold(f64::NEG_INFINITY, f64::max);
330
331        let sum: f64 = particles
332            .iter()
333            .map(|p| (p.log_weight - max_log_weight).exp())
334            .sum();
335
336        let log_sum = sum.ln() + max_log_weight;
337
338        for particle in particles.iter_mut() {
339            particle.normalized_weight = (particle.log_weight - log_sum).exp();
340        }
341    }
342
343    /// Compute effective sample size
344    fn effective_sample_size(particles: &[Particle<G>]) -> f64 {
345        let sum_sq: f64 = particles.iter().map(|p| p.normalized_weight.powi(2)).sum();
346        if sum_sq > 0.0 {
347            1.0 / sum_sq
348        } else {
349            0.0
350        }
351    }
352
353    /// Resample particles using systematic resampling
354    pub fn resample<R: Rng>(particles: &[Particle<G>], rng: &mut R) -> Vec<Particle<G>> {
355        let n = particles.len();
356        let mut resampled = Vec::with_capacity(n);
357
358        // Build cumulative weights
359        let mut cumulative = vec![0.0; n];
360        cumulative[0] = particles[0].normalized_weight;
361        for i in 1..n {
362            cumulative[i] = cumulative[i - 1] + particles[i].normalized_weight;
363        }
364
365        // Systematic resampling
366        let u0: f64 = rng.gen::<f64>() / n as f64;
367
368        let mut j = 0;
369        for i in 0..n {
370            let u = u0 + i as f64 / n as f64;
371            while j < n - 1 && cumulative[j] < u {
372                j += 1;
373            }
374            let mut particle = particles[j].clone();
375            particle.log_weight = 0.0;
376            particle.normalized_weight = 1.0 / n as f64;
377            resampled.push(particle);
378        }
379
380        resampled
381    }
382
383    /// Mutation kernel (MCMC move)
384    fn mutate_particle<R: Rng>(
385        &self,
386        particle: &Particle<G>,
387        temperature: f64,
388        rng: &mut R,
389    ) -> Particle<G> {
390        let model = EvolutionModel::new(self.model.fitness.clone(), self.model.bounds.clone())
391            .with_temperature(temperature);
392        let config = EvolutionChainConfig::default()
393            .mutation_rate(0.2)
394            .mutation_sigma(0.1);
395        let step = EvolutionStep::new(model, config);
396
397        let new_genome = step.step(&particle.genome, rng);
398        Particle::new(new_genome, particle.log_weight)
399    }
400
401    /// Run the SMC sampler
402    pub fn run<R: Rng>(&self, rng: &mut R) -> Vec<Particle<G>> {
403        let mut particles = self.initialize(rng);
404
405        for (i, &temperature) in self.temperature_schedule.iter().enumerate() {
406            // Update model temperature
407            let model = EvolutionModel::new(self.model.fitness.clone(), self.model.bounds.clone())
408                .with_temperature(temperature);
409
410            // Reweight particles
411            for particle in &mut particles {
412                particle.log_weight = model.log_weight(&particle.genome);
413            }
414
415            Self::normalize_weights(&mut particles);
416
417            // Resample if ESS too low
418            let ess = Self::effective_sample_size(&particles);
419            if ess < self.num_particles as f64 / 2.0 && i < self.temperature_schedule.len() - 1 {
420                particles = Self::resample(&particles, rng);
421            }
422
423            // Mutate particles (MCMC rejuvenation)
424            if i < self.temperature_schedule.len() - 1 {
425                particles = particles
426                    .into_iter()
427                    .map(|p| self.mutate_particle(&p, temperature, rng))
428                    .collect();
429            }
430        }
431
432        // Final reweighting and normalization
433        for particle in &mut particles {
434            particle.log_weight = self.model.log_weight(&particle.genome);
435        }
436        Self::normalize_weights(&mut particles);
437
438        particles
439    }
440
441    /// Get the best particle
442    pub fn best_particle(particles: &[Particle<G>]) -> Option<&Particle<G>> {
443        particles.iter().max_by(|a, b| {
444            a.log_weight
445                .partial_cmp(&b.log_weight)
446                .unwrap_or(std::cmp::Ordering::Equal)
447        })
448    }
449
450    /// Compute posterior mean (weighted average of traces)
451    pub fn posterior_mean(particles: &[Particle<G>]) -> Option<Trace> {
452        if particles.is_empty() {
453            return None;
454        }
455
456        let mut mean_trace = Trace::default();
457        let first_trace = particles[0].genome.to_trace();
458
459        for addr in first_trace.choices.keys() {
460            let weighted_sum: f64 = particles
461                .iter()
462                .map(|p| {
463                    let trace = p.genome.to_trace();
464                    let value = trace
465                        .choices
466                        .get(addr)
467                        .map(|c| match &c.value {
468                            ChoiceValue::F64(f) => *f,
469                            _ => 0.0,
470                        })
471                        .unwrap_or(0.0);
472                    value * p.normalized_weight
473                })
474                .sum();
475
476            mean_trace.insert_choice(addr.clone(), ChoiceValue::F64(weighted_sum), 0.0);
477        }
478
479        Some(mean_trace)
480    }
481}
482
483/// Hierarchical Bayesian Genetic Algorithm
484///
485/// This combines population-based evolution with Bayesian inference
486/// over hyperparameters.
487pub struct HBGA<G, F>
488where
489    G: EvolutionaryGenome,
490    F: Fitness<Genome = G, Value = f64>,
491{
492    model: EvolutionModel<G, F>,
493    /// Population size
494    population_size: usize,
495    /// Number of generations
496    generations: usize,
497    /// Prior on mutation rate (Beta distribution parameters)
498    mutation_rate_prior: (f64, f64),
499    /// Prior on mutation sigma (Gamma distribution parameters)
500    mutation_sigma_prior: (f64, f64),
501}
502
503impl<G, F> HBGA<G, F>
504where
505    G: EvolutionaryGenome + Clone,
506    F: Fitness<Genome = G, Value = f64> + Clone,
507{
508    /// Create a new HBGA
509    pub fn new(
510        fitness: F,
511        bounds: MultiBounds,
512        population_size: usize,
513        generations: usize,
514    ) -> Self {
515        Self {
516            model: EvolutionModel::new(fitness, bounds),
517            population_size,
518            generations,
519            mutation_rate_prior: (2.0, 8.0), // Prior favors low mutation rates
520            mutation_sigma_prior: (2.0, 10.0), // Prior favors small mutations
521        }
522    }
523
524    /// Set the mutation rate prior
525    pub fn with_mutation_rate_prior(mut self, alpha: f64, beta: f64) -> Self {
526        self.mutation_rate_prior = (alpha, beta);
527        self
528    }
529
530    /// Set the mutation sigma prior
531    pub fn with_mutation_sigma_prior(mut self, shape: f64, rate: f64) -> Self {
532        self.mutation_sigma_prior = (shape, rate);
533        self
534    }
535
536    /// Sample mutation rate from prior
537    fn sample_mutation_rate<R: Rng>(&self, rng: &mut R) -> f64 {
538        // Simple approximation: use mean with some noise
539        let (alpha, beta) = self.mutation_rate_prior;
540        let mean = alpha / (alpha + beta);
541        let noise = (rng.gen::<f64>() - 0.5) * 0.1;
542        (mean + noise).clamp(0.01, 0.5)
543    }
544
545    /// Sample mutation sigma from prior
546    fn sample_mutation_sigma<R: Rng>(&self, rng: &mut R) -> f64 {
547        // Simple approximation: use mean with some noise
548        let (shape, rate) = self.mutation_sigma_prior;
549        let mean = shape / rate;
550        let noise = (rng.gen::<f64>() - 0.5) * 0.1;
551        (mean + noise).max(0.01)
552    }
553
554    /// Run the HBGA
555    pub fn run<R: Rng>(&self, rng: &mut R) -> HBGAResult<G> {
556        // Initialize population
557        let mut population: Vec<G> = (0..self.population_size)
558            .map(|_| self.model.sample_prior(rng))
559            .collect();
560
561        let mut fitness_history = Vec::new();
562        let mut best_genome: Option<G> = None;
563        let mut best_fitness = f64::NEG_INFINITY;
564
565        // Hyperparameter samples (would be inferred in full HBGA)
566        let mut mutation_rate = self.sample_mutation_rate(rng);
567        let mutation_sigma = self.sample_mutation_sigma(rng);
568
569        for gen in 0..self.generations {
570            // Evaluate fitness
571            let fitnesses: Vec<f64> = population
572                .iter()
573                .map(|g| self.model.fitness.evaluate(g))
574                .collect();
575
576            // Track best
577            for (i, &f) in fitnesses.iter().enumerate() {
578                if f > best_fitness {
579                    best_fitness = f;
580                    best_genome = Some(population[i].clone());
581                }
582            }
583
584            let mean_fitness: f64 = fitnesses.iter().sum::<f64>() / fitnesses.len() as f64;
585            fitness_history.push(mean_fitness);
586
587            // Adaptive hyperparameter update (simplified Bayesian update)
588            let improvement = if gen > 0 {
589                mean_fitness - fitness_history[gen - 1]
590            } else {
591                0.0
592            };
593
594            // If improving, slightly increase mutation; otherwise decrease
595            if improvement > 0.0 {
596                mutation_rate = (mutation_rate * 1.05).min(0.5);
597            } else {
598                mutation_rate = (mutation_rate * 0.95).max(0.01);
599            }
600
601            // Selection (tournament)
602            let mut selected = Vec::with_capacity(self.population_size);
603            for _ in 0..self.population_size {
604                let i = rng.gen_range(0..self.population_size);
605                let j = rng.gen_range(0..self.population_size);
606                if fitnesses[i] > fitnesses[j] {
607                    selected.push(population[i].clone());
608                } else {
609                    selected.push(population[j].clone());
610                }
611            }
612
613            // Mutation with current hyperparameters
614            let config = EvolutionChainConfig::default()
615                .mutation_rate(mutation_rate)
616                .mutation_sigma(mutation_sigma);
617
618            let model = EvolutionModel::new(self.model.fitness.clone(), self.model.bounds.clone());
619            let step = EvolutionStep::new(model, config);
620
621            population = selected
622                .into_iter()
623                .map(|g| step.propose(&g, rng))
624                .collect();
625        }
626
627        HBGAResult {
628            best_genome: best_genome.unwrap_or_else(|| population[0].clone()),
629            best_fitness,
630            fitness_history,
631            final_mutation_rate: mutation_rate,
632            final_mutation_sigma: mutation_sigma,
633        }
634    }
635}
636
637/// Result of HBGA run
638pub struct HBGAResult<G> {
639    /// Best genome found
640    pub best_genome: G,
641    /// Best fitness value
642    pub best_fitness: f64,
643    /// Mean fitness over generations
644    pub fitness_history: Vec<f64>,
645    /// Final learned mutation rate
646    pub final_mutation_rate: f64,
647    /// Final learned mutation sigma
648    pub final_mutation_sigma: f64,
649}
650
651#[cfg(test)]
652mod tests {
653    use super::*;
654    use crate::fitness::benchmarks::Sphere;
655    use crate::genome::real_vector::RealVector;
656
657    #[test]
658    fn test_evolution_model_basic() {
659        let sphere = Sphere::new(3);
660        let bounds = MultiBounds::symmetric(5.0, 3);
661        let model = EvolutionModel::<RealVector, _>::new(sphere, bounds);
662
663        let genome = RealVector::new(vec![0.0, 0.0, 0.0]);
664        let weight = model.log_weight(&genome);
665
666        // Optimal solution should have highest weight (fitness = 0 for sphere)
667        assert!(weight.is_finite());
668    }
669
670    #[test]
671    fn test_evolution_step() {
672        let mut rng = rand::thread_rng();
673        let sphere = Sphere::new(3);
674        let bounds = MultiBounds::symmetric(5.0, 3);
675        let model = EvolutionModel::<RealVector, _>::new(sphere, bounds);
676        let config = EvolutionChainConfig::default();
677        let step = EvolutionStep::new(model, config);
678
679        let genome = RealVector::new(vec![1.0, 2.0, 3.0]);
680        let proposed = step.propose(&genome, &mut rng);
681
682        // Proposed should be different (with high probability)
683        assert_eq!(proposed.dimension(), 3);
684    }
685
686    #[test]
687    fn test_mcmc_chain() {
688        let mut rng = rand::thread_rng();
689        let sphere = Sphere::new(3);
690        let bounds = MultiBounds::symmetric(5.0, 3);
691        let model = EvolutionModel::<RealVector, _>::new(sphere, bounds);
692        let config = EvolutionChainConfig::default().generations(10);
693        let step = EvolutionStep::new(model, config);
694
695        let initial = RealVector::new(vec![5.0, 5.0, 5.0]);
696        let chain = step.run_chain(&initial, 10, &mut rng);
697
698        assert_eq!(chain.len(), 11); // Initial + 10 steps
699    }
700
701    #[test]
702    fn test_smc_basic() {
703        let mut rng = rand::thread_rng();
704        let sphere = Sphere::new(2);
705        let bounds = MultiBounds::symmetric(5.0, 2);
706        let smc = EvolutionarySMC::<RealVector, _>::new(sphere, bounds, 20);
707
708        let particles = smc.run(&mut rng);
709
710        assert_eq!(particles.len(), 20);
711
712        // Check that weights are normalized
713        let total_weight: f64 = particles.iter().map(|p| p.normalized_weight).sum();
714        assert!((total_weight - 1.0).abs() < 0.01);
715    }
716
717    #[test]
718    fn test_hbga_basic() {
719        let mut rng = rand::thread_rng();
720        let sphere = Sphere::new(2);
721        let bounds = MultiBounds::symmetric(5.0, 2);
722        let hbga = HBGA::new(sphere, bounds, 20, 10);
723
724        let result = hbga.run(&mut rng);
725
726        // Should have made some progress
727        assert!(result.fitness_history.len() == 10);
728        assert!(result.best_fitness.is_finite());
729    }
730
731    #[test]
732    fn test_particle_resampling() {
733        let mut rng = rand::thread_rng();
734
735        let genomes: Vec<RealVector> = (0..5)
736            .map(|_| RealVector::new(vec![rng.gen(), rng.gen()]))
737            .collect();
738
739        let particles: Vec<Particle<RealVector>> = genomes
740            .into_iter()
741            .enumerate()
742            .map(|(i, g)| {
743                let mut p = Particle::new(g, i as f64);
744                p.normalized_weight = (i + 1) as f64 / 15.0; // 1+2+3+4+5 = 15
745                p
746            })
747            .collect();
748
749        let resampled = EvolutionarySMC::<RealVector, Sphere>::resample(&particles, &mut rng);
750        assert_eq!(resampled.len(), 5);
751    }
752}