//! Types for controlling batching behavior during parallel processing.12use core::ops::Range;34/// Dictates how a parallel operation chunks up large quantities5/// during iteration.6///7/// A parallel query will chunk up large tables and archetypes into8/// chunks of at most a certain batch size. Similarly, a parallel event9/// reader will chunk up the remaining events.10///11/// By default, this batch size is automatically determined by dividing12/// the size of the largest matched archetype by the number13/// of threads (rounded up). This attempts to minimize the overhead of scheduling14/// tasks onto multiple threads, but assumes each entity has roughly the15/// same amount of work to be done, which may not hold true in every16/// workload.17///18/// See [`Query::par_iter`], [`EventReader::par_read`] for more information.19///20/// [`Query::par_iter`]: crate::system::Query::par_iter21/// [`EventReader::par_read`]: crate::event::EventReader::par_read22#[derive(Clone, Debug)]23pub struct BatchingStrategy {24/// The upper and lower limits for a batch of items.25///26/// Setting the bounds to the same value will result in a fixed27/// batch size.28///29/// Defaults to `[1, usize::MAX]`.30pub batch_size_limits: Range<usize>,31/// The number of batches per thread in the [`ComputeTaskPool`].32/// Increasing this value will decrease the batch size, which may33/// increase the scheduling overhead for the iteration.34///35/// Defaults to 1.36///37/// [`ComputeTaskPool`]: bevy_tasks::ComputeTaskPool38pub batches_per_thread: usize,39}4041impl Default for BatchingStrategy {42fn default() -> Self {43Self::new()44}45}4647impl BatchingStrategy {48/// Creates a new unconstrained default batching strategy.49pub const fn new() -> Self {50Self {51batch_size_limits: 1..usize::MAX,52batches_per_thread: 1,53}54}5556/// Declares a batching strategy with a fixed batch size.57pub const fn fixed(batch_size: usize) -> Self {58Self {59batch_size_limits: batch_size..batch_size,60batches_per_thread: 1,61}62}6364/// Configures the minimum allowed batch size of this instance.65pub const fn min_batch_size(mut self, batch_size: usize) -> Self {66self.batch_size_limits.start = batch_size;67self68}6970/// Configures the maximum allowed batch size of this instance.71pub const fn max_batch_size(mut self, batch_size: usize) -> Self {72self.batch_size_limits.end = batch_size;73self74}7576/// Configures the number of batches to assign to each thread for this instance.77pub fn batches_per_thread(mut self, batches_per_thread: usize) -> Self {78assert!(79batches_per_thread > 0,80"The number of batches per thread must be non-zero."81);82self.batches_per_thread = batches_per_thread;83self84}8586/// Calculate the batch size according to the given thread count and max item count.87/// The count is provided as a closure so that it can be calculated only if needed.88///89/// # Panics90///91/// Panics if `thread_count` is 0.92#[inline]93pub fn calc_batch_size(&self, max_items: impl FnOnce() -> usize, thread_count: usize) -> usize {94if self.batch_size_limits.is_empty() {95return self.batch_size_limits.start;96}97assert!(98thread_count > 0,99"Attempted to run parallel iteration with an empty TaskPool"100);101let batches = thread_count * self.batches_per_thread;102// Round up to the nearest batch size.103let batch_size = max_items().div_ceil(batches);104batch_size.clamp(self.batch_size_limits.start, self.batch_size_limits.end)105}106}107108109