-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbackends.py
More file actions
185 lines (146 loc) · 5.33 KB
/
backends.py
File metadata and controls
185 lines (146 loc) · 5.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
"""Configures the available backends."""
from __future__ import annotations
import warnings
from concurrent.futures import Executor
from concurrent.futures import Future
from concurrent.futures import ProcessPoolExecutor
from concurrent.futures import ThreadPoolExecutor
from enum import Enum
from typing import Any
from typing import Callable
from typing import ClassVar
import cloudpickle
from attrs import define
from loky import get_reusable_executor
__all__ = ["ParallelBackend", "ParallelBackendRegistry", "WorkerType", "registry"]
def _deserialize_and_run_with_cloudpickle(fn: bytes, kwargs: bytes) -> Any:
"""Deserialize and execute a function and keyword arguments."""
deserialized_fn = cloudpickle.loads(fn)
deserialized_kwargs = cloudpickle.loads(kwargs)
return deserialized_fn(**deserialized_kwargs)
class _CloudpickleProcessPoolExecutor(ProcessPoolExecutor):
"""Patches the standard executor to serialize functions with cloudpickle."""
def submit(
self,
fn: Callable[..., Any],
/,
*args: Any, # noqa: ARG002
**kwargs: Any,
) -> Future[Any]:
"""Submit a new task."""
return super().submit(
_deserialize_and_run_with_cloudpickle,
fn=cloudpickle.dumps(fn),
kwargs=cloudpickle.dumps(kwargs),
)
def _get_dask_executor(n_workers: int) -> Executor:
"""Get an executor from a dask client."""
_rich_traceback_guard = True
try:
import distributed # noqa: PLC0415
except ImportError:
msg = "The distributed package is not installed. Please install it."
raise ImportError(msg) from None
try:
client = distributed.Client.current()
except ValueError:
client = distributed.Client(distributed.LocalCluster(n_workers=n_workers))
else:
if client.cluster and len(client.cluster.workers) != n_workers:
warnings.warn(
"The number of workers in the dask cluster "
f"({len(client.cluster.workers)}) does not match the number of workers "
f"requested ({n_workers}). The requested number of workers will be "
"ignored.",
stacklevel=1,
)
return client.get_executor()
def _get_loky_executor(n_workers: int) -> Executor:
"""Get a loky executor."""
return get_reusable_executor(max_workers=n_workers)
def _get_process_pool_executor(n_workers: int) -> Executor:
"""Get a process pool executor."""
return _CloudpickleProcessPoolExecutor(max_workers=n_workers)
def _get_thread_pool_executor(n_workers: int) -> Executor:
"""Get a thread pool executor."""
return ThreadPoolExecutor(max_workers=n_workers)
class ParallelBackend(Enum):
"""Choices for parallel backends.
Attributes
----------
NONE
No parallel backend.
CUSTOM
A custom parallel backend.
DASK
A dask parallel backend.
LOKY
A loky parallel backend.
PROCESSES
A process pool parallel backend.
THREADS
A thread pool parallel backend.
"""
NONE = "none"
CUSTOM = "custom"
DASK = "dask"
LOKY = "loky"
PROCESSES = "processes"
THREADS = "threads"
class WorkerType(Enum):
"""A type for workers that either spawned as threads or processes.
Attributes
----------
THREADS
Workers are threads.
PROCESSES
Workers are processes.
"""
THREADS = "threads"
PROCESSES = "processes"
@define
class _ParallelBackend:
builder: Callable[..., Executor]
worker_type: WorkerType
remote: bool
@define
class ParallelBackendRegistry:
"""Registry for parallel backends."""
registry: ClassVar[dict[ParallelBackend, _ParallelBackend]] = {}
def register_parallel_backend(
self,
kind: ParallelBackend,
builder: Callable[..., Executor],
*,
worker_type: WorkerType | str = WorkerType.PROCESSES,
remote: bool = False,
) -> None:
"""Register a parallel backend."""
self.registry[kind] = _ParallelBackend(
builder=builder, worker_type=WorkerType(worker_type), remote=remote
)
def get_parallel_backend(self, kind: ParallelBackend, n_workers: int) -> Executor:
"""Get a parallel backend."""
__tracebackhide__ = True
try:
return self.registry[kind].builder(n_workers=n_workers)
except KeyError:
msg = f"No registered parallel backend found for kind {kind.value!r}."
raise ValueError(msg) from None
except Exception as e:
msg = f"Could not instantiate parallel backend {kind.value!r}."
raise ValueError(msg) from e
def reset(self) -> None:
"""Register the default backends."""
self.registry.clear()
for parallel_backend, builder, worker_type, remote in (
(ParallelBackend.DASK, _get_dask_executor, "processes", False),
(ParallelBackend.LOKY, _get_loky_executor, "processes", False),
(ParallelBackend.PROCESSES, _get_process_pool_executor, "processes", False),
(ParallelBackend.THREADS, _get_thread_pool_executor, "threads", False),
):
self.register_parallel_backend(
parallel_backend, builder, worker_type=worker_type, remote=remote
)
registry = ParallelBackendRegistry()
registry.reset()