Skip to content

Commit

Permalink
Merge branch 'release/0.4.2'
Browse files Browse the repository at this point in the history
  • Loading branch information
s3rius committed May 6, 2023
2 parents 40bc192 + 355c6e3 commit f54d1ef
Show file tree
Hide file tree
Showing 19 changed files with 242 additions and 139 deletions.
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ extend-ignore = E203
docstring_style=sphinx

ignore =
; Found `no cover` comments overuse
WPS403
; Found a line that starts with a dot
WPS348,
; Found overly complex type annotation
Expand Down
16 changes: 8 additions & 8 deletions docs/available-components/middlewares.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ the task would be resent with same parameters.
To enable this middleware, add it to the list of middlewares for a broker.

```python
from taskiq import SimpleRetryMiddleware
from taskiq import ZeroMQBroker, SimpleRetryMiddleware

broker = ...

broker.add_middlewares(SimpleRetryMiddleware(default_retry_count=3))
broker = ZeroMQBroker().with_middlewares(
SimpleRetryMiddleware(default_retry_count=3),
)
```

After that you can add a label to task that you want to restart on error.
Expand Down Expand Up @@ -58,11 +58,11 @@ pip install "taskiq[metrics]"


```python
from taskiq import PrometheusMiddleware

broker = ...
from taskiq import ZeroMQBroker, PrometheusMiddleware

broker.add_middlewares(PrometheusMiddleware(server_addr="0.0.0.0", server_port=9000))
broker = ZeroMQBroker().with_middlewares(
PrometheusMiddleware(server_addr="0.0.0.0", server_port=9000),
)
```

After that, metrics will be available at port 9000. Of course, this parameter can be configured.
Expand Down
1 change: 0 additions & 1 deletion docs/available-components/result-backends.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ order: 3
Result backends are used to store execution results.
This includes:

- Captured logs;
- return value;
- Execution time in seconds.

Expand Down
17 changes: 6 additions & 11 deletions docs/examples/extending/broker.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,13 @@
from typing import AsyncGenerator, Callable, Optional, TypeVar
from typing import AsyncGenerator

from taskiq import AsyncBroker, AsyncResultBackend, BrokerMessage

_T = TypeVar("_T")
from taskiq import AsyncBroker, BrokerMessage


class MyBroker(AsyncBroker):
def __init__(
self,
result_backend: "Optional[AsyncResultBackend[_T]]" = None,
task_id_generator: Optional[Callable[[], str]] = None,
) -> None:
# Please call this super and allow people to use their result_backends.
super().__init__(result_backend, task_id_generator)
def __init__(self) -> None:
# Please call this super method to set default values to
# many different fields.
super().__init__()

async def startup(self) -> None:
# Here you can do some startup magic.
Expand Down
42 changes: 30 additions & 12 deletions docs/examples/extending/result_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,27 +18,45 @@ async def set_result(
task_id: str,
result: TaskiqResult[_ReturnType],
) -> None:
# Here you must set result somewhere.
pass
"""
Set result in your backend.
:param task_id: current task id.
:param result: result of execution.
"""

async def get_result(
self,
task_id: str,
with_logs: bool = False,
) -> TaskiqResult[_ReturnType]:
# Here you must retrieve result by id.
"""
Here you must retrieve result by id.
Logs is a part of a result.
Here we have a parameter whether you want to
fetch result with logs or not, because logs
can have a lot of info and sometimes it's critical
to get only needed information.
# Logs is a part of a result.
# Here we have a parameter whether you want to
# fetch result with logs or not, because logs
# can have a lot of info and sometimes it's critical
# to get only needed information.
pass
:param task_id: id of a task.
:param with_logs: whether to fetch logs.
:return: result.
"""
return ... # type: ignore

async def is_result_ready(
self,
task_id: str,
) -> bool:
# This function checks if result of a task exists,
# without actual fetching the result.
pass
"""
Check if result exists.
This function must check whether result
is available in your resul backend
without fetching the result.
:param task_id: id of a task.
:return: True if result is ready.
"""
return ... # type: ignore
3 changes: 1 addition & 2 deletions docs/examples/introduction/full_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@

broker = AioPikaBroker(
"amqp://guest:guest@localhost:5672",
result_backend=RedisAsyncResultBackend("redis://localhost"),
)
).with_result_backend(RedisAsyncResultBackend("redis://localhost"))


@broker.task
Expand Down
5 changes: 1 addition & 4 deletions docs/examples/state/events_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@

broker = AioPikaBroker(
"amqp://localhost",
result_backend=RedisAsyncResultBackend(
"redis://localhost/0",
),
)
).with_result_backend(RedisAsyncResultBackend("redis://localhost"))


@broker.on_event(TaskiqEvents.WORKER_STARTUP)
Expand Down
3 changes: 1 addition & 2 deletions docs/guide/getting-started.md
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,7 @@ from taskiq_redis import RedisAsyncResultBackend

broker = AioPikaBroker(
"amqp://guest:guest@localhost:5672",
result_backend=RedisAsyncResultBackend("redis://localhost"),
)
).with_result_backend(RedisAsyncResultBackend("redis://localhost"))
```

Now we need to start redis.
Expand Down
Loading

0 comments on commit f54d1ef

Please sign in to comment.