Skip to content

Commit

Permalink
Re-use SendDispatcher in Dispatcher to reduce code duplication
Browse files Browse the repository at this point in the history
  • Loading branch information
Imberflur committed Jan 3, 2024
1 parent 188e064 commit 3f15a49
Showing 1 changed file with 16 additions and 53 deletions.
69 changes: 16 additions & 53 deletions src/dispatch/dispatcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,15 @@ pub type ThreadPoolWrapper = Option<::std::sync::Arc<::rayon::ThreadPool>>;
/// The dispatcher struct, allowing
/// systems to be executed in parallel.
pub struct Dispatcher<'a, 'b> {
stages: Vec<Stage<'a>>,
inner: SendDispatcher<'a>,
thread_local: ThreadLocal<'b>,
#[cfg(feature = "parallel")]
thread_pool: ::std::sync::Arc<::std::sync::RwLock<ThreadPoolWrapper>>,
}

impl<'a, 'b> Dispatcher<'a, 'b> {
/// Sets up all the systems which means they are gonna add default values
/// for the resources they need.
pub fn setup(&mut self, world: &mut World) {
for stage in &mut self.stages {
stage.setup(world);
}
self.inner.setup(world);

for sys in &mut self.thread_local {
sys.setup(world);
Expand All @@ -38,9 +34,7 @@ impl<'a, 'b> Dispatcher<'a, 'b> {
/// / or resources from the `World` which are associated with external
/// resources.
pub fn dispose(self, world: &mut World) {
for stage in self.stages {
stage.dispose(world);
}
self.inner.dispose(world);

for sys in self.thread_local {
sys.dispose(world);
Expand All @@ -60,12 +54,7 @@ impl<'a, 'b> Dispatcher<'a, 'b> {
/// Please note that this method assumes that no resource
/// is currently borrowed. If that's the case, it panics.
pub fn dispatch(&mut self, world: &World) {
#[cfg(feature = "parallel")]
self.dispatch_par(world);

#[cfg(not(feature = "parallel"))]
self.dispatch_seq(world);

self.inner.dispatch(world);
self.dispatch_thread_local(world);
}

Expand All @@ -81,18 +70,7 @@ impl<'a, 'b> Dispatcher<'a, 'b> {
/// is currently borrowed. If that's the case, it panics.
#[cfg(feature = "parallel")]
pub fn dispatch_par(&mut self, world: &World) {
let stages = &mut self.stages;

self.thread_pool
.read()
.unwrap()
.as_ref()
.unwrap()
.install(move || {
for stage in stages {
stage.execute(world);
}
});
self.inner.dispatch_par(world);
}

/// Dispatches the systems (except thread local systems) sequentially.
Expand All @@ -103,9 +81,7 @@ impl<'a, 'b> Dispatcher<'a, 'b> {
/// Please note that this method assumes that no resource
/// is currently borrowed. If that's the case, it panics.
pub fn dispatch_seq(&mut self, world: &World) {
for stage in &mut self.stages {
stage.execute_seq(world);
}
self.inner.dispatch_seq(world);
}

/// Dispatch only thread local systems sequentially.
Expand All @@ -123,25 +99,14 @@ impl<'a, 'b> Dispatcher<'a, 'b> {
/// Fails and returns the original distpatcher if it contains thread local systems.
pub fn try_into_sendable(self) -> Result<SendDispatcher<'a>, Self> {
let Dispatcher {
stages,
inner: _,
thread_local,
#[cfg(feature = "parallel")]
thread_pool,
} = self;
} = &self;

if thread_local.is_empty() {
Ok(SendDispatcher {
stages,
#[cfg(feature = "parallel")]
thread_pool,
})
Ok(self.inner)
} else {
Err(Dispatcher {
stages,
thread_local,
#[cfg(feature = "parallel")]
thread_pool,
})
Err(self)
}
}

Expand All @@ -150,11 +115,7 @@ impl<'a, 'b> Dispatcher<'a, 'b> {
/// how well your systems can make use of multi-threading.
#[cfg(feature = "parallel")]
pub fn max_threads(&self) -> usize {
self.stages
.iter()
.map(Stage::max_threads)
.max()
.unwrap_or(0)
self.inner.max_threads()
}
}

Expand Down Expand Up @@ -185,9 +146,11 @@ pub fn new_dispatcher<'a, 'b>(
thread_pool: ::std::sync::Arc<::std::sync::RwLock<ThreadPoolWrapper>>,
) -> Dispatcher<'a, 'b> {
Dispatcher {
stages,
inner: SendDispatcher {
stages,
thread_pool,
},
thread_local,
thread_pool,
}
}

Expand All @@ -197,7 +160,7 @@ pub fn new_dispatcher<'a, 'b>(
thread_local: ThreadLocal<'b>,
) -> Dispatcher<'a, 'b> {
Dispatcher {
stages,
inner: SendDispatcher { stages },
thread_local,
}
}
Expand Down

0 comments on commit 3f15a49

Please sign in to comment.