-
Notifications
You must be signed in to change notification settings - Fork 83
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Pydantic contrib module #757
base: main
Are you sure you want to change the base?
Changes from all commits
106f584
4d9943b
72c9cd1
e9e0a84
819b7ee
67bad54
2a263cf
a4393e4
9669e8b
b2ab2c9
f48a993
b65781a
6813ffa
53b453e
6adffc6
938a218
c379acf
0e61991
09d4d9e
a7b30a5
e383353
e3729eb
7a2c785
ad26ef9
c626724
d970b74
6362836
c05f346
92bc1cb
75aea5d
480a40b
780a829
e2bb705
bc70bfb
c29f498
bd7ccca
2c32442
e0a937c
73a330c
1e51a19
686b18d
91c098a
4b10f5a
efe28f6
09e655b
c250f4d
44cece0
9201aa1
76cb905
024547d
ab6eade
757df57
3103fa3
1d5971e
419354f
827b692
33a0016
8717425
ac04b46
a58c945
e67ac04
77d01eb
d51b0dc
ede73f6
5e7310c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -35,6 +35,7 @@ grpcio = {version = "^1.48.2", optional = true} | |
opentelemetry-api = { version = "^1.11.1", optional = true } | ||
opentelemetry-sdk = { version = "^1.11.1", optional = true } | ||
protobuf = ">=3.20" | ||
pydantic = { version = "^2.10.6", optional = true } | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you confirm that pydantic v1 users aren't bound by our version constraint here if they never add the extra? Maybe the easiest way to confirm is somehow have a sample or test that confirms existing pydantic support? |
||
python = "^3.9" | ||
python-dateutil = { version = "^2.8.2", python = "<3.11" } | ||
types-protobuf = ">=3.20" | ||
|
@@ -46,7 +47,6 @@ grpcio-tools = "^1.48.2" | |
mypy = "^1.0.0" | ||
mypy-protobuf = "^3.3.0" | ||
psutil = "^5.9.3" | ||
pydantic = "^1.10.19" | ||
pydocstyle = "^6.3.0" | ||
pydoctor = "^24.11.1" | ||
pyright = ">=1.1.377" | ||
|
@@ -63,6 +63,7 @@ wheel = "^0.42.0" | |
[tool.poetry.extras] | ||
opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] | ||
grpc = ["grpcio"] | ||
pydantic = ["pydantic"] | ||
|
||
[tool.poetry.group.dev.dependencies] | ||
ruff = "^0.5.0" | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,108 @@ | ||
"""A data converter for Pydantic models | ||
|
||
To use, pass ``pydantic_data_converter`` as the ``data_converter`` argument to | ||
:py:class:`temporalio.client.Client`: | ||
|
||
.. code-block:: python | ||
|
||
client = Client( | ||
data_converter=pydantic_data_converter, | ||
... | ||
) | ||
""" | ||
|
||
import inspect | ||
from typing import ( | ||
Any, | ||
Type, | ||
) | ||
|
||
import pydantic | ||
|
||
try: | ||
from pydantic_core import to_jsonable_python | ||
except ImportError: | ||
# pydantic v1 | ||
from pydantic.json import pydantic_encoder as to_jsonable_python # type: ignore | ||
Comment on lines
+25
to
+26
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Are we saying this contrib library works even with pydantic v1? If so, we should change our version constraint to be |
||
|
||
from temporalio.converter import ( | ||
AdvancedJSONEncoder, | ||
CompositePayloadConverter, | ||
DataConverter, | ||
DefaultPayloadConverter, | ||
JSONPlainPayloadConverter, | ||
JSONTypeConverter, | ||
) | ||
|
||
# Note that in addition to the implementation in this module, _RestrictedProxy | ||
# implements __get_pydantic_core_schema__ so that pydantic unwraps proxied types. | ||
|
||
|
||
class PydanticModelTypeConverter(JSONTypeConverter): | ||
"""Type converter for pydantic model instances.""" | ||
|
||
def to_typed_value(self, hint: Type, value: Any) -> Any: | ||
"""Convert dict value to pydantic model instance of the specified type""" | ||
if not inspect.isclass(hint) or not issubclass(hint, pydantic.BaseModel): | ||
return JSONTypeConverter.Unhandled | ||
model = hint | ||
if not isinstance(value, dict): | ||
raise TypeError( | ||
f"Cannot convert to {model}, value is {type(value)} not dict" | ||
) | ||
if hasattr(model, "model_validate"): | ||
return model.model_validate(value) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I assume this raises a Pydantic-specific exception that a user could add to workflow failure exception types if they wanted? (if not, we should consider a usability improvement to have an option to wrap this in application error) |
||
elif hasattr(model, "parse_obj"): | ||
# pydantic v1 | ||
return model.parse_obj(value) | ||
else: | ||
raise ValueError( | ||
f"{model} is a Pydantic model but does not have a `model_validate` or `parse_obj` method" | ||
) | ||
|
||
|
||
class PydanticJSONEncoder(AdvancedJSONEncoder): | ||
"""JSON encoder for python objects containing pydantic model instances.""" | ||
|
||
def default(self, o: Any) -> Any: | ||
"""Convert object to jsonable python. | ||
|
||
See :py:meth:`json.JSONEncoder.default`. | ||
""" | ||
if isinstance(o, pydantic.BaseModel): | ||
return to_jsonable_python(o) | ||
return super().default(o) | ||
|
||
|
||
class PydanticPayloadConverter(CompositePayloadConverter): | ||
"""Payload converter for payloads containing pydantic model instances. | ||
|
||
JSON conversion is replaced with a converter that uses | ||
:py:class:`PydanticJSONEncoder` to convert the python object to JSON, and | ||
:py:class:`PydanticModelTypeConverter` to convert raw python values to | ||
pydantic model instances. | ||
""" | ||
|
||
def __init__(self) -> None: | ||
"""Initialize object""" | ||
json_payload_converter = JSONPlainPayloadConverter( | ||
encoder=PydanticJSONEncoder, | ||
custom_type_converters=[PydanticModelTypeConverter()], | ||
) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Our pydantic sample was implementing |
||
super().__init__( | ||
*( | ||
c | ||
if not isinstance(c, JSONPlainPayloadConverter) | ||
else json_payload_converter | ||
for c in DefaultPayloadConverter.default_encoding_payload_converters | ||
) | ||
) | ||
|
||
|
||
pydantic_data_converter = DataConverter( | ||
payload_converter_class=PydanticPayloadConverter | ||
) | ||
"""Data converter for payloads containing pydantic model instances. | ||
|
||
To use, pass as the ``data_converter`` argument of :py:class:`temporalio.client.Client` | ||
""" |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,11 +10,11 @@ | |
import sys | ||
import traceback | ||
import uuid | ||
import warnings | ||
from abc import ABC, abstractmethod | ||
from dataclasses import dataclass | ||
from datetime import datetime | ||
from enum import IntEnum | ||
from itertools import zip_longest | ||
from typing import ( | ||
Any, | ||
Awaitable, | ||
|
@@ -291,10 +291,8 @@ def from_payloads( | |
RuntimeError: Error during decode | ||
""" | ||
values = [] | ||
for index, payload in enumerate(payloads): | ||
type_hint = None | ||
if type_hints and len(type_hints) > index: | ||
type_hint = type_hints[index] | ||
type_hints = type_hints or [] | ||
for index, (payload, type_hint) in enumerate(zip_longest(payloads, type_hints)): | ||
# Raw value should just wrap | ||
if type_hint == temporalio.common.RawValue: | ||
values.append(temporalio.common.RawValue(payload)) | ||
|
@@ -558,12 +556,7 @@ def encoding(self) -> str: | |
|
||
def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: | ||
"""See base class.""" | ||
# Check for pydantic then send warning | ||
if hasattr(value, "parse_obj"): | ||
warnings.warn( | ||
"If you're using pydantic model, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter for better support" | ||
) | ||
# We let JSON conversion errors be thrown to caller | ||
Comment on lines
-561
to
-566
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hrmm, I wonder if there is any way to warn users that are trying to encode Pydantic models without using our suggested converter |
||
# Let JSON conversion errors be thrown to caller | ||
return temporalio.api.common.v1.Payload( | ||
metadata={"encoding": self._encoding.encode()}, | ||
data=json.dumps( | ||
|
@@ -1523,8 +1516,13 @@ def value_to_type( | |
# TODO(cretz): Want way to convert snake case to camel case? | ||
return hint(**field_values) | ||
|
||
# If there is a @staticmethod or @classmethod parse_obj, we will use it. | ||
# This covers Pydantic models. | ||
# Pydantic model instance | ||
# Pydantic users should use | ||
# temporalio.contrib.pydantic.pydantic_data_converter, in which case a | ||
# pydantic model instance will have been handled by the custom_converters at | ||
# the start of this function. We retain the following for backwards | ||
# compatibility with pydantic users who are not using contrib.pydantic, but | ||
# this is deprecated. | ||
Comment on lines
+1519
to
+1525
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hrmm, wonder if we should warn if they are trying to decode to a Pydantic model without using our suggested converter |
||
parse_obj_attr = inspect.getattr_static(hint, "parse_obj", None) | ||
if isinstance(parse_obj_attr, classmethod) or isinstance( | ||
parse_obj_attr, staticmethod | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -7,6 +7,7 @@ | |
from __future__ import annotations | ||
|
||
import dataclasses | ||
import datetime | ||
import functools | ||
import inspect | ||
import logging | ||
|
@@ -31,6 +32,14 @@ | |
cast, | ||
) | ||
|
||
try: | ||
import pydantic | ||
import pydantic_core | ||
|
||
HAVE_PYDANTIC = True | ||
except ImportError: | ||
HAVE_PYDANTIC = False | ||
|
||
import temporalio.workflow | ||
|
||
logger = logging.getLogger(__name__) | ||
|
@@ -435,9 +444,10 @@ def with_child_unrestricted(self, *child_path: str) -> SandboxMatcher: | |
# Due to a metaclass conflict in sandbox, we need zipfile module to pass | ||
# through always | ||
"zipfile", | ||
# This is a very general module needed by many things including pytest's | ||
# Very general modules needed by many things including pytest's | ||
# assertion rewriter | ||
"typing", | ||
"typing_extensions", | ||
# Required due to https://github.com/protocolbuffers/protobuf/issues/10143 | ||
# for older versions. This unfortunately means that on those versions, | ||
# everyone using Python protos has to pass their module through. | ||
|
@@ -943,7 +953,17 @@ def r_op(obj: Any, other: Any) -> Any: | |
|
||
def _is_restrictable(v: Any) -> bool: | ||
return v is not None and not isinstance( | ||
v, (bool, int, float, complex, str, bytes, bytearray) | ||
v, | ||
( | ||
bool, | ||
int, | ||
float, | ||
complex, | ||
str, | ||
bytes, | ||
bytearray, | ||
datetime.date, # from which datetime.datetime inherits | ||
), | ||
) | ||
|
||
|
||
|
@@ -971,6 +991,8 @@ def __init__(self, *args, **kwargs) -> None: | |
_trace("__init__ unrecognized with args %s", args) | ||
|
||
def __getattribute__(self, __name: str) -> Any: | ||
if HAVE_PYDANTIC and __name == "__get_pydantic_core_schema__": | ||
return object.__getattribute__(self, "__get_pydantic_core_schema__") | ||
Comment on lines
+994
to
+995
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hrmm, trying to think if we even want this generic to void pydantic code inside our sandbox. I guess all ways to do it would require that the contrib library is at least imported by the user somewhere in the process, but maybe that is ok? If that is ok, I wonder if a side-effect of importing the contrib is ideal over altering the sandbox even if |
||
state = _RestrictionState.from_proxy(self) | ||
_trace("__getattribute__ %s on %s", __name, state.name) | ||
# We do not restrict __spec__ or __name__ | ||
|
@@ -1020,6 +1042,17 @@ def __getitem__(self, key: Any) -> Any: | |
) | ||
return ret | ||
|
||
if HAVE_PYDANTIC: | ||
# Instruct pydantic to use the proxied type when determining the schema | ||
# https://docs.pydantic.dev/latest/concepts/types/#customizing-validation-with-__get_pydantic_core_schema__ | ||
@classmethod | ||
def __get_pydantic_core_schema__( | ||
cls, | ||
source_type: Any, | ||
handler: pydantic.GetCoreSchemaHandler, | ||
) -> pydantic_core.CoreSchema: | ||
return handler(RestrictionContext.unwrap_if_proxied(source_type)) | ||
|
||
__doc__ = _RestrictedProxyLookup( # type: ignore | ||
class_value=__doc__, fallback_func=lambda self: type(self).__doc__, is_attr=True | ||
) | ||
|
@@ -1032,7 +1065,7 @@ def __getitem__(self, key: Any) -> Any: | |
) | ||
__str__ = _RestrictedProxyLookup(str) # type: ignore | ||
__bytes__ = _RestrictedProxyLookup(bytes) | ||
__format__ = _RestrictedProxyLookup() # type: ignore | ||
__format__ = _RestrictedProxyLookup(format) # type: ignore | ||
__lt__ = _RestrictedProxyLookup(operator.lt) | ||
__le__ = _RestrictedProxyLookup(operator.le) | ||
__eq__ = _RestrictedProxyLookup(operator.eq) # type: ignore | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think we should make this a whole subsection, e.g.
##### Pydantic