@@ -14,8 +14,19 @@ impl TaskPoolBuilder {
14
14
Self :: default ( )
15
15
}
16
16
17
- /// No op on the single threaded task pool
18
- pub fn num_threads ( self , _num_threads : usize ) -> Self {
17
+ /// Override the number of compute-priority threads created for the pool. If unset, this default to the number
18
+ /// of logical cores of the system
19
+ pub fn compute_threads ( self , num_threads : usize ) -> Self {
20
+ self
21
+ }
22
+
23
+ /// Override the number of async-compute priority threads created for the pool. If unset, this defaults to 0.
24
+ pub fn async_compute_threads ( self , num_threads : usize ) -> Self {
25
+ self
26
+ }
27
+
28
+ /// Override the number of IO-priority threads created for the pool. If unset, this defaults to 0.
29
+ pub fn io_threads ( self , num_threads : usize ) -> Self {
19
30
self
20
31
}
21
32
@@ -37,6 +48,20 @@ impl TaskPoolBuilder {
37
48
38
49
/// A thread pool for executing tasks. Tasks are futures that are being automatically driven by
39
50
/// the pool on threads owned by the pool. In this case - main thread only.
51
+ ///
52
+ /// # Scheduling Semantics
53
+ /// Each thread in the pool is assigned to one of three priority groups: Compute, IO, and Async
54
+ /// Compute. Compute is higher priority than IO, which are both higher priority than async compute.
55
+ /// Every task is assigned to a group upon being spawned. A lower priority thread will always prioritize
56
+ /// its specific tasks (i.e. IO tasks on a IO thread), but will run higher priority tasks if it would
57
+ /// otherwise be sitting idle.
58
+ ///
59
+ /// For example, under heavy compute workloads, compute tasks will be scheduled to run on the IO and
60
+ /// async compute thread groups, but any IO task will take precedence over any compute task on the IO
61
+ /// threads. Likewise, async compute tasks will never be scheduled on a compute or IO thread.
62
+ ///
63
+ /// By default, all threads in the pool are dedicated to compute group. Thread counts can be altered
64
+ /// via [`TaskPoolBuilder`] when constructing the pool.
40
65
#[ derive( Debug , Default , Clone ) ]
41
66
pub struct TaskPool { }
42
67
@@ -106,6 +131,44 @@ impl TaskPool {
106
131
FakeTask
107
132
}
108
133
134
+ /// Spawns a static future onto the JS event loop. For now it is returning FakeTask
135
+ /// instance with no-op detach method. Returning real Task is possible here, but tricky:
136
+ /// future is running on JS event loop, Task is running on async_executor::LocalExecutor
137
+ /// so some proxy future is needed. Moreover currently we don't have long-living
138
+ /// LocalExecutor here (above `spawn` implementation creates temporary one)
139
+ /// But for typical use cases it seems that current implementation should be sufficient:
140
+ /// caller can spawn long-running future writing results to some channel / event queue
141
+ /// and simply call detach on returned Task (like AssetServer does) - spawned future
142
+ /// can write results to some channel / event queue.
143
+ pub fn spawn_async_compute < T > ( & self , future : impl Future < Output = T > + ' static ) -> FakeTask
144
+ where
145
+ T : Send + ' static ,
146
+ {
147
+ wasm_bindgen_futures:: spawn_local ( async move {
148
+ future. await ;
149
+ } ) ;
150
+ FakeTask
151
+ }
152
+
153
+ /// Spawns a static future onto the JS event loop. For now it is returning FakeTask
154
+ /// instance with no-op detach method. Returning real Task is possible here, but tricky:
155
+ /// future is running on JS event loop, Task is running on async_executor::LocalExecutor
156
+ /// so some proxy future is needed. Moreover currently we don't have long-living
157
+ /// LocalExecutor here (above `spawn` implementation creates temporary one)
158
+ /// But for typical use cases it seems that current implementation should be sufficient:
159
+ /// caller can spawn long-running future writing results to some channel / event queue
160
+ /// and simply call detach on returned Task (like AssetServer does) - spawned future
161
+ /// can write results to some channel / event queue.
162
+ pub fn spawn_io < T > ( & self , future : impl Future < Output = T > + ' static ) -> FakeTask
163
+ where
164
+ T : Send + ' static ,
165
+ {
166
+ wasm_bindgen_futures:: spawn_local ( async move {
167
+ future. await ;
168
+ } ) ;
169
+ FakeTask
170
+ }
171
+
109
172
/// Spawns a static future on the JS event loop. This is exactly the same as [`TaskSpool::spawn`].
110
173
pub fn spawn_local < T > ( & self , future : impl Future < Output = T > + ' static ) -> FakeTask
111
174
where
@@ -141,7 +204,29 @@ impl<'scope, T: Send + 'scope> Scope<'scope, T> {
141
204
/// On the single threaded task pool, it just calls [`Scope::spawn_local`].
142
205
///
143
206
/// For more information, see [`TaskPool::scope`].
144
- pub fn spawn < Fut : Future < Output = T > + ' scope + Send > ( & mut self , f : Fut ) {
207
+ pub fn spawn < Fut : Future < Output = T > + ' scope > ( & mut self , f : Fut ) {
208
+ self . spawn_local ( f) ;
209
+ }
210
+
211
+ /// Spawns a scoped future onto the thread-local executor. The scope *must* outlive
212
+ /// the provided future. The results of the future will be returned as a part of
213
+ /// [`TaskPool::scope`]'s return value.
214
+ ///
215
+ /// On the single threaded task pool, it just calls [`Scope::spawn_local`].
216
+ ///
217
+ /// For more information, see [`TaskPool::scope`].
218
+ pub fn spawn_async_compute < Fut : Future < Output = T > + ' scope > ( & mut self , f : Fut ) {
219
+ self . spawn_local ( f) ;
220
+ }
221
+
222
+ /// Spawns a scoped future onto the thread-local executor. The scope *must* outlive
223
+ /// the provided future. The results of the future will be returned as a part of
224
+ /// [`TaskPool::scope`]'s return value.
225
+ ///
226
+ /// On the single threaded task pool, it just calls [`Scope::spawn_local`].
227
+ ///
228
+ /// For more information, see [`TaskPool::scope`].
229
+ pub fn spawn_io < Fut : Future < Output = T > + ' scope > ( & mut self , f : Fut ) {
145
230
self . spawn_local ( f) ;
146
231
}
147
232
0 commit comments