Skip to content

Commit f54d1ef

Browse files
committed
Merge branch 'release/0.4.2'
2 parents 40bc192 + 355c6e3 commit f54d1ef

File tree

19 files changed

+242
-139
lines changed

19 files changed

+242
-139
lines changed

.flake8

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@ extend-ignore = E203
66
docstring_style=sphinx
77

88
ignore =
9+
; Found `no cover` comments overuse
10+
WPS403
911
; Found a line that starts with a dot
1012
WPS348,
1113
; Found overly complex type annotation

docs/available-components/middlewares.md

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,11 @@ the task would be resent with same parameters.
1515
To enable this middleware, add it to the list of middlewares for a broker.
1616

1717
```python
18-
from taskiq import SimpleRetryMiddleware
18+
from taskiq import ZeroMQBroker, SimpleRetryMiddleware
1919

20-
broker = ...
21-
22-
broker.add_middlewares(SimpleRetryMiddleware(default_retry_count=3))
20+
broker = ZeroMQBroker().with_middlewares(
21+
SimpleRetryMiddleware(default_retry_count=3),
22+
)
2323
```
2424

2525
After that you can add a label to task that you want to restart on error.
@@ -58,11 +58,11 @@ pip install "taskiq[metrics]"
5858

5959

6060
```python
61-
from taskiq import PrometheusMiddleware
62-
63-
broker = ...
61+
from taskiq import ZeroMQBroker, PrometheusMiddleware
6462

65-
broker.add_middlewares(PrometheusMiddleware(server_addr="0.0.0.0", server_port=9000))
63+
broker = ZeroMQBroker().with_middlewares(
64+
PrometheusMiddleware(server_addr="0.0.0.0", server_port=9000),
65+
)
6666
```
6767

6868
After that, metrics will be available at port 9000. Of course, this parameter can be configured.

docs/available-components/result-backends.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ order: 3
77
Result backends are used to store execution results.
88
This includes:
99

10-
- Captured logs;
1110
- return value;
1211
- Execution time in seconds.
1312

docs/examples/extending/broker.py

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,13 @@
1-
from typing import AsyncGenerator, Callable, Optional, TypeVar
1+
from typing import AsyncGenerator
22

3-
from taskiq import AsyncBroker, AsyncResultBackend, BrokerMessage
4-
5-
_T = TypeVar("_T")
3+
from taskiq import AsyncBroker, BrokerMessage
64

75

86
class MyBroker(AsyncBroker):
9-
def __init__(
10-
self,
11-
result_backend: "Optional[AsyncResultBackend[_T]]" = None,
12-
task_id_generator: Optional[Callable[[], str]] = None,
13-
) -> None:
14-
# Please call this super and allow people to use their result_backends.
15-
super().__init__(result_backend, task_id_generator)
7+
def __init__(self) -> None:
8+
# Please call this super method to set default values to
9+
# many different fields.
10+
super().__init__()
1611

1712
async def startup(self) -> None:
1813
# Here you can do some startup magic.

docs/examples/extending/result_backend.py

Lines changed: 30 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -18,27 +18,45 @@ async def set_result(
1818
task_id: str,
1919
result: TaskiqResult[_ReturnType],
2020
) -> None:
21-
# Here you must set result somewhere.
22-
pass
21+
"""
22+
Set result in your backend.
23+
24+
:param task_id: current task id.
25+
:param result: result of execution.
26+
"""
2327

2428
async def get_result(
2529
self,
2630
task_id: str,
2731
with_logs: bool = False,
2832
) -> TaskiqResult[_ReturnType]:
29-
# Here you must retrieve result by id.
33+
"""
34+
Here you must retrieve result by id.
35+
36+
Logs is a part of a result.
37+
Here we have a parameter whether you want to
38+
fetch result with logs or not, because logs
39+
can have a lot of info and sometimes it's critical
40+
to get only needed information.
3041
31-
# Logs is a part of a result.
32-
# Here we have a parameter whether you want to
33-
# fetch result with logs or not, because logs
34-
# can have a lot of info and sometimes it's critical
35-
# to get only needed information.
36-
pass
42+
:param task_id: id of a task.
43+
:param with_logs: whether to fetch logs.
44+
:return: result.
45+
"""
46+
return ... # type: ignore
3747

3848
async def is_result_ready(
3949
self,
4050
task_id: str,
4151
) -> bool:
42-
# This function checks if result of a task exists,
43-
# without actual fetching the result.
44-
pass
52+
"""
53+
Check if result exists.
54+
55+
This function must check whether result
56+
is available in your resul backend
57+
without fetching the result.
58+
59+
:param task_id: id of a task.
60+
:return: True if result is ready.
61+
"""
62+
return ... # type: ignore

docs/examples/introduction/full_example.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,7 @@
66

77
broker = AioPikaBroker(
88
"amqp://guest:guest@localhost:5672",
9-
result_backend=RedisAsyncResultBackend("redis://localhost"),
10-
)
9+
).with_result_backend(RedisAsyncResultBackend("redis://localhost"))
1110

1211

1312
@broker.task

docs/examples/state/events_example.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,7 @@
1414

1515
broker = AioPikaBroker(
1616
"amqp://localhost",
17-
result_backend=RedisAsyncResultBackend(
18-
"redis://localhost/0",
19-
),
20-
)
17+
).with_result_backend(RedisAsyncResultBackend("redis://localhost"))
2118

2219

2320
@broker.on_event(TaskiqEvents.WORKER_STARTUP)

docs/guide/getting-started.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -171,8 +171,7 @@ from taskiq_redis import RedisAsyncResultBackend
171171

172172
broker = AioPikaBroker(
173173
"amqp://guest:guest@localhost:5672",
174-
result_backend=RedisAsyncResultBackend("redis://localhost"),
175-
)
174+
).with_result_backend(RedisAsyncResultBackend("redis://localhost"))
176175
```
177176

178177
Now we need to start redis.

0 commit comments

Comments
 (0)