diff --git a/docs/conf.py b/docs/conf.py index f83edac93b..29e8ea3330 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,7 +3,12 @@ # Copyright Contributors to the OpenTimelineIO project import re +import docutils.nodes +import sphinx.addnodes +import sphinx.application +import sphinx.environment import sphinx_rtd_theme + import opentimelineio # -- Project information --------------------------------------------------------------- @@ -184,6 +189,35 @@ def process_docstring( lines[index] = line -def setup(app): +def process_missing_reference( + app: sphinx.application.Sphinx, + env: sphinx.environment.BuildEnvironment, + node: sphinx.addnodes.pending_xref, + contnode: docutils.nodes.Element +): + if node.get('refdomain') != 'py': + return None + + if node.get('reftype') == 'class': + reftarget = node.get('reftarget') + if reftarget == 'opentimelineio.core.Metadata': + # This is one heck of a hack. As it is right now, when opentimelineio.core.Metadata + # appears in a C++ function/method signature, it cannot be properly + # resolved by Sphinx. Not too sure why. + new_node = docutils.nodes.reference('') + new_node['refuri'] = f'{node.get("py:module")}.html#{reftarget}' + new_node['reftitle'] = reftarget + # new_node['classname'] = 'Metadata' + # Note that normally we would append "contnode" since it's + # the node that contains the text (opentimelineio.core.Metadata), + # but in our case we simply append a custom Text node so that the + # displayed is shorter (no module name). + new_node.append(docutils.nodes.Text('Metadata')) + + return new_node + + +def setup(app: sphinx.application.Sphinx): app.connect("autodoc-process-signature", process_signature) app.connect("autodoc-process-docstring", process_docstring) + app.connect("missing-reference", process_missing_reference) diff --git a/src/opentimelineio/serialization.cpp b/src/opentimelineio/serialization.cpp index 32f107deb3..e6e4a8690b 100644 --- a/src/opentimelineio/serialization.cpp +++ b/src/opentimelineio/serialization.cpp @@ -666,6 +666,7 @@ SerializableObject::Writer::_build_dispatch_tables() auto& wt = _write_dispatch_table; wt[&typeid(void)] = [this](any const&) { _encoder.write_null_value(); }; + wt[&typeid(nullptr)] = [this](any const&) { _encoder.write_null_value(); }; wt[&typeid(bool)] = [this](any const& value) { _encoder.write_value(any_cast(value)); }; diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.cpp b/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.cpp index 385fd70db7..f5d18e9f32 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.cpp +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.cpp @@ -23,6 +23,13 @@ void otio_any_dictionary_bindings(py::module m) { py::class_(m, "AnyDictionary") .def(py::init<>()) + .def(py::init([](py::dict item) { + AnyDictionary d = py_to_cpp(item); + auto proxy = new AnyDictionaryProxy; + proxy->fetch_any_dictionary().swap(*d.get_or_create_mutation_stamp()->any_dictionary); + + return proxy; + })) .def("__getitem__", &AnyDictionaryProxy::get_item, "key"_a) .def("__internal_setitem__", &AnyDictionaryProxy::set_item, "key"_a, "item"_a) .def("__delitem__", &AnyDictionaryProxy::del_item, "key"_a) diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.h b/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.h index c49ebe8876..5d1c268656 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.h +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_anyDictionary.h @@ -11,10 +11,29 @@ namespace py = pybind11; struct AnyDictionaryProxy : public AnyDictionary::MutationStamp { + using MutationStamp = AnyDictionary::MutationStamp; + + AnyDictionaryProxy() {} + + // TODO: Should we instead just pass an AnyDictionary? + AnyDictionaryProxy(MutationStamp *d) { + any_dictionary = d->any_dictionary; + } + + AnyDictionaryProxy(const AnyDictionaryProxy& other) // Copy constructor. Required to convert a py::handle to an AnyDictionaryProxy. + { + AnyDictionary* d = new AnyDictionary; + + AnyDictionary::iterator ptr; + for (ptr = other.any_dictionary->begin(); ptr != other.any_dictionary->end(); ptr++) { + d->insert(*ptr); + } + + any_dictionary = d; + } + ~AnyDictionaryProxy() { } - - using MutationStamp = AnyDictionary::MutationStamp; static void throw_dictionary_was_deleted() { throw py::value_error("Underlying C++ AnyDictionary has been destroyed"); @@ -99,3 +118,52 @@ struct AnyDictionaryProxy : public AnyDictionary::MutationStamp { } }; +// Taken from https://github.com/pybind/pybind11/issues/1176#issuecomment-343312352 +// This is a custom type caster for the AnyDictionaryProxy class. This makes AnyDictionaryProxy +// accept both AnyDictionaryProxy and py::dict. +namespace pybind11 { namespace detail { + template <> struct type_caster : public type_caster_base { + using base = type_caster_base; + public: + + // Override the type reported in docstings. opentimelineio.core.Metadata is defined + // in Python. It's defined as a union of a dict and AnyDictionary. + // This will allow mypy to do its job. + static constexpr auto name = const_name("opentimelineio.core.Metadata"); + + /** + * Conversion part 1 (Python->C++): convert a PyObject into an any + * instance or return false upon failure. The second argument + * indicates whether implicit conversions should be applied. + */ + bool load(handle src, bool convert) { + // First try to convert using the base (default) type caster for AnyDictionaryProxy. + if (base::load(src, convert)) { + return true; + } + + // If we got a dict, then do our own thing to convert the dict into an AnyDictionaryProxy. + else if (py::isinstance(src)) { + auto proxy = new AnyDictionaryProxy(); + AnyDictionary&& d = py_to_cpp(py::cast(src)); + proxy->fetch_any_dictionary().swap(d); + value = proxy; + return true; + } + + return false; + } + + /** + * Conversion part 2 (C++ -> Python): convert an any instance into + * a Python object. The second and third arguments are used to + * indicate the return value policy and parent object (for + * ``return_value_policy::reference_internal``) and are generally + * ignored by implicit casters. + */ + // static handle cast(AnyDictionaryProxy *src, return_value_policy policy, handle parent) { + // /* Do any additional work here */ + // return base::cast(src, policy, parent); + // } + }; +}} diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.cpp b/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.cpp index 4d974383b2..f6db9074d1 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.cpp +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.cpp @@ -19,6 +19,12 @@ void otio_any_vector_bindings(py::module m) { py::class_(m, "AnyVector") .def(py::init<>()) + .def(py::init([](const py::iterable &it) { + auto v = py_to_cpp(it); + auto proxy = new AnyVectorProxy; + proxy->fetch_any_vector().swap(*v.get_or_create_mutation_stamp()->any_vector); + return proxy; + })) .def("__internal_getitem__", &AnyVectorProxy::get_item, "index"_a) .def("__internal_setitem__", &AnyVectorProxy::set_item, "index"_a, "item"_a) .def("__internal_delitem__", &AnyVectorProxy::del_item, "index"_a) @@ -26,7 +32,3 @@ void otio_any_vector_bindings(py::module m) { .def("__internal_insert", &AnyVectorProxy::insert) .def("__iter__", &AnyVectorProxy::iter, py::return_value_policy::reference_internal); } - - - - diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.h b/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.h index ed43e5dec1..41ee2f343d 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.h +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_anyVector.h @@ -13,6 +13,22 @@ namespace py = pybind11; struct AnyVectorProxy : public AnyVector::MutationStamp { using MutationStamp = AnyVector::MutationStamp; + AnyVectorProxy() {} + AnyVectorProxy(MutationStamp *v) { + any_vector = v->any_vector; + } + + AnyVectorProxy(const AnyVectorProxy& other) // Copy constructor. Required to convert a py::handle to an AnyVectorProxy. + { + AnyVector* v = new AnyVector; + + AnyVector::iterator ptr; + for (ptr = other.any_vector->begin(); ptr < other.any_vector->end(); ptr++) { + v->push_back(*ptr); + } + any_vector = v; + } + static void throw_array_was_deleted() { throw py::value_error("Underlying C++ AnyVector object has been destroyed"); } diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_bindings.cpp b/src/py-opentimelineio/opentimelineio-bindings/otio_bindings.cpp index 74572125ca..ce9a78baf8 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_bindings.cpp +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_bindings.cpp @@ -157,10 +157,9 @@ static void set_type_record(SerializableObject* so, std::string schema_name) { } static SerializableObject* instance_from_schema(std::string schema_name, - int schema_version, py::object data) { - AnyDictionary object_data = py_to_any_dictionary(data); + int schema_version, AnyDictionaryProxy* data) { auto result = TypeRegistry::instance().instance_from_schema(schema_name, schema_version, - object_data, ErrorStatusHandler()); + data->fetch_any_dictionary(), ErrorStatusHandler()); return result; } diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_serializableObjects.cpp b/src/py-opentimelineio/opentimelineio-bindings/otio_serializableObjects.cpp index e2821199cb..be7104c9f4 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_serializableObjects.cpp +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_serializableObjects.cpp @@ -187,11 +187,11 @@ static void define_bases1(py::module m) { py::class_>(m, "SerializableObjectWithMetadata", py::dynamic_attr()) - .def(py::init([](std::string name, py::object metadata) { - return new SOWithMetadata(name, py_to_any_dictionary(metadata)); + .def(py::init([](std::string name, AnyDictionaryProxy* metadata) { + return new SOWithMetadata(name, metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property_readonly("metadata", [](SOWithMetadata* s) { auto ptr = s->metadata().get_or_create_mutation_stamp(); return (AnyDictionaryProxy*)(ptr); }, py::return_value_policy::take_ownership) @@ -214,17 +214,17 @@ The marked range may have a zero duration. The marked range is in the owning ite std::string name, TimeRange marked_range, std::string const& color, - py::object metadata) { + AnyDictionaryProxy* metadata) { return new Marker( name, marked_range, color, - py_to_any_dictionary(metadata)); + metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "marked_range"_a = TimeRange(), "color"_a = std::string(Marker::Color::red), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property("color", &Marker::color, &Marker::set_color, "Color string for this marker (for example: 'RED'), based on the :class:`~Color` enum.") .def_property("marked_range", &Marker::marked_range, &Marker::set_marked_range, "Range this marker applies to, relative to the :class:`.Item` this marker is attached to (e.g. the :class:`.Clip` or :class:`.Track` that owns this marker)."); @@ -258,13 +258,13 @@ a named collection. A :class:`~SerializableCollection` is useful for serializing multiple timelines, clips, or media references to a single file. )docstring") .def(py::init([](std::string const& name, optional> children, - py::object metadata) { + AnyDictionaryProxy*metadata) { return new SerializableCollection(name, vector_or_default(children), - py_to_any_dictionary(metadata)); }), + metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "children"_a = py::none(), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def("__internal_getitem__", [](SerializableCollection* c, int index) { index = adjusted_vector_index(index, c->children()); if (index < 0 || index >= int(c->children().size())) { @@ -307,9 +307,9 @@ An object that can be composed within a :class:`~Composition` (such as :class:`~ py::class_>(m, "Item", py::dynamic_attr()) .def(py::init([](std::string name, optional source_range, - optional> effects, optional> markers, py::bool_ enabled, py::object metadata) { + optional> effects, optional> markers, py::bool_ enabled, AnyDictionaryProxy* metadata) { return new Item(name, source_range, - py_to_any_dictionary(metadata), + metadata->fetch_any_dictionary(), vector_or_default(effects), vector_or_default(markers), enabled); }), @@ -318,7 +318,7 @@ An object that can be composed within a :class:`~Composition` (such as :class:`~ "effects"_a = py::none(), "markers"_a = py::none(), "enabled"_a = true, - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property("enabled", &Item::enabled, &Item::set_enabled, "If true, an Item contributes to compositions. For example, when an audio/video clip is ``enabled=false`` the clip is muted/hidden.") .def_property("source_range", &Item::source_range, &Item::set_source_range) .def("available_range", [](Item* item) { @@ -359,15 +359,15 @@ An object that can be composed within a :class:`~Composition` (such as :class:`~ py::class_>(m, "Transition", py::dynamic_attr(), "Represents a transition between the two adjacent items in a :class:`.Track`. For example, a cross dissolve or wipe.") .def(py::init([](std::string const& name, std::string const& transition_type, RationalTime in_offset, RationalTime out_offset, - py::object metadata) { + AnyDictionaryProxy* metadata) { return new Transition(name, transition_type, in_offset, out_offset, - py_to_any_dictionary(metadata)); }), + metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "transition_type"_a = std::string(), "in_offset"_a = RationalTime(), "out_offset"_a = RationalTime(), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property("transition_type", &Transition::transition_type, &Transition::set_transition_type, "Kind of transition, as defined by the :class:`Type` enum.") .def_property("in_offset", &Transition::in_offset, &Transition::set_in_offset, "Amount of the previous clip this transition overlaps, exclusive.") .def_property("out_offset", &Transition::out_offset, &Transition::set_out_offset, "Amount of the next clip this transition overlaps, exclusive.") @@ -392,27 +392,27 @@ Other effects are handled by the :class:`Effect` class. py::class_>(m, "Gap", py::dynamic_attr()) .def(py::init([](std::string name, TimeRange source_range, optional> effects, - optional> markers, py::object metadata) { + optional> markers, AnyDictionaryProxy* metadata) { return new Gap(source_range, name, vector_or_default(effects), vector_or_default(markers), - py_to_any_dictionary(metadata)); }), + metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "source_range"_a = TimeRange(), "effects"_a = py::none(), "markers"_a = py::none(), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def(py::init([](std::string name, RationalTime duration, optional> effects, - optional> markers, py::object metadata) { + optional> markers, AnyDictionaryProxy* metadata) { return new Gap(duration, name, vector_or_default(effects), vector_or_default(markers), - py_to_any_dictionary(metadata)); }), + metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "duration"_a = RationalTime(), "effects"_a = py::none(), "markers"_a = py::none(), - py::arg_v("metadata"_a = py::none())); + py::arg_v("metadata"_a = py::dict())); auto clip_class = py::class_>(m, "Clip", py::dynamic_attr(), R"docstring( A :class:`~Clip` is a segment of editable media (usually audio or video). @@ -420,14 +420,14 @@ A :class:`~Clip` is a segment of editable media (usually audio or video). Contains a :class:`.MediaReference` and a trim on that media reference. )docstring") .def(py::init([](std::string name, MediaReference* media_reference, - optional source_range, py::object metadata, + optional source_range, AnyDictionaryProxy* metadata, const std::string& active_media_reference) { - return new Clip(name, media_reference, source_range, py_to_any_dictionary(metadata), active_media_reference); + return new Clip(name, media_reference, source_range, metadata->fetch_any_dictionary(), active_media_reference); }), py::arg_v("name"_a = std::string()), "media_reference"_a = nullptr, "source_range"_a = nullopt, - py::arg_v("metadata"_a = py::none()), + py::arg_v("metadata"_a = py::dict()), "active_media_reference"_a = std::string(Clip::default_media_key)) .def_property_readonly_static("DEFAULT_MEDIA_KEY",[](py::object /* self */) { return Clip::default_media_key; @@ -453,16 +453,16 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u )docstring") .def(py::init([](std::string name, optional> children, - optional source_range, py::object metadata) { + optional source_range, AnyDictionaryProxy* metadata) { Composition* c = new Composition(name, source_range, - py_to_any_dictionary(metadata)); + metadata->fetch_any_dictionary()); c->set_children(vector_or_default(children), ErrorStatusHandler()); return c; }), py::arg_v("name"_a = std::string()), "children"_a = py::none(), "source_range"_a = nullopt, - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property_readonly("composition_kind", &Composition::composition_kind) .def("is_parent_of", &Composition::is_parent_of, "other"_a) .def("range_of_child_at_index", [](Composition* c, int index) { @@ -536,11 +536,11 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u composable_class .def(py::init([](std::string const& name, - py::object metadata) { - return new Composable(name, py_to_any_dictionary(metadata)); + AnyDictionaryProxy* metadata) { + return new Composable(name, metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def("parent", &Composable::parent) .def("visible", &Composable::visible) .def("overlapping", &Composable::overlapping); @@ -554,13 +554,13 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u track_class .def(py::init([](std::string name, optional> children, optional const& source_range, - std::string const& kind, py::object metadata) { + std::string const& kind, AnyDictionaryProxy* metadata) { auto composable_children = vector_or_default(children); Track* t = new Track( name, source_range, kind, - py_to_any_dictionary(metadata) + metadata->fetch_any_dictionary() ); if (!composable_children.empty()) t->set_children(composable_children, ErrorStatusHandler()); @@ -570,7 +570,7 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u "children"_a = py::none(), "source_range"_a = nullopt, "kind"_a = std::string(Track::Kind::video), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property("kind", &Track::kind, &Track::set_kind) .def("neighbors_of", [](Track* t, Composable* item, Track::NeighborGapPolicy policy) { auto result = t->neighbors_of(item, ErrorStatusHandler(), policy); @@ -591,12 +591,12 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u optional const& source_range, optional> markers, optional> effects, - py::object metadata) { + AnyDictionaryProxy* metadata) { auto composable_children = vector_or_default(children); Stack* s = new Stack( name, source_range, - py_to_any_dictionary(metadata), + metadata->fetch_any_dictionary(), vector_or_default(effects), vector_or_default(markers) ); @@ -610,7 +610,7 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u "source_range"_a = nullopt, "markers"_a = py::none(), "effects"_a = py::none(), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def("find_clips", [](Stack* s, optional const& search_range, bool shallow_search) { return find_clips(s, search_range, shallow_search); }, "search_range"_a = nullopt, "shallow_search"_a = false); @@ -619,10 +619,10 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u .def(py::init([](std::string name, optional> children, optional global_start_time, - py::object metadata) { + AnyDictionaryProxy* metadata) { auto composable_children = vector_or_default(children); Timeline* t = new Timeline(name, global_start_time, - py_to_any_dictionary(metadata)); + metadata->fetch_any_dictionary()); if (!composable_children.empty()) t->tracks()->set_children(composable_children, ErrorStatusHandler()); return t; @@ -630,7 +630,7 @@ Should be subclassed (for example by :class:`.Track` and :class:`.Stack`), not u py::arg_v("name"_a = std::string()), "tracks"_a = py::none(), "global_start_time"_a = nullopt, - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property("global_start_time", &Timeline::global_start_time, &Timeline::set_global_start_time) .def_property("tracks", &Timeline::tracks, &Timeline::set_tracks) .def("duration", [](Timeline* t) { @@ -653,33 +653,33 @@ static void define_effects(py::module m) { py::class_>(m, "Effect", py::dynamic_attr()) .def(py::init([](std::string name, std::string effect_name, - py::object metadata) { - return new Effect(name, effect_name, py_to_any_dictionary(metadata)); }), + AnyDictionaryProxy* metadata) { + return new Effect(name, effect_name, metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "effect_name"_a = std::string(), - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property("effect_name", &Effect::effect_name, &Effect::set_effect_name); py::class_>(m, "TimeEffect", py::dynamic_attr(), "Base class for all effects that alter the timing of an item.") .def(py::init([](std::string name, std::string effect_name, - py::object metadata) { - return new TimeEffect(name, effect_name, py_to_any_dictionary(metadata)); }), + AnyDictionaryProxy* metadata) { + return new TimeEffect(name, effect_name, metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "effect_name"_a = std::string(), - py::arg_v("metadata"_a = py::none())); + py::arg_v("metadata"_a = py::dict())); py::class_>(m, "LinearTimeWarp", py::dynamic_attr(), R"docstring( A time warp that applies a linear speed up or slow down across the entire clip. )docstring") .def(py::init([](std::string name, double time_scalar, - py::object metadata) { + AnyDictionaryProxy* metadata) { return new LinearTimeWarp(name, "LinearTimeWarp", time_scalar, - py_to_any_dictionary(metadata)); }), + metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), "time_scalar"_a = 1.0, - py::arg_v("metadata"_a = py::none())) + py::arg_v("metadata"_a = py::dict())) .def_property("time_scalar", &LinearTimeWarp::time_scalar, &LinearTimeWarp::set_time_scalar, R"docstring( Linear time scalar applied to clip. 2.0 means the clip occupies half the time in the parent item, i.e. plays at double speed, 0.5 means the clip occupies twice the time in the parent item, i.e. plays at half speed. @@ -689,10 +689,10 @@ Instead it affects the speed of the media displayed within that item. )docstring"); py::class_>(m, "FreezeFrame", py::dynamic_attr(), "Hold the first frame of the clip for the duration of the clip.") - .def(py::init([](std::string name, py::object metadata) { - return new FreezeFrame(name, py_to_any_dictionary(metadata)); }), + .def(py::init([](std::string name, AnyDictionaryProxy* metadata) { + return new FreezeFrame(name, metadata->fetch_any_dictionary()); }), py::arg_v("name"_a = std::string()), - py::arg_v("metadata"_a = py::none())); + py::arg_v("metadata"_a = py::dict())); } static void define_media_references(py::module m) { @@ -700,12 +700,12 @@ static void define_media_references(py::module m) { managing_ptr>(m, "MediaReference", py::dynamic_attr()) .def(py::init([](std::string name, optional available_range, - py::object metadata, + AnyDictionaryProxy* metadata, optional const& available_image_bounds) { - return new MediaReference(name, available_range, py_to_any_dictionary(metadata), available_image_bounds); }), + return new MediaReference(name, available_range, metadata->fetch_any_dictionary(), available_image_bounds); }), py::arg_v("name"_a = std::string()), "available_range"_a = nullopt, - py::arg_v("metadata"_a = py::none()), + py::arg_v("metadata"_a = py::dict()), "available_image_bounds"_a = nullopt) .def_property("available_range", &MediaReference::available_range, &MediaReference::set_available_range) @@ -716,18 +716,18 @@ static void define_media_references(py::module m) { managing_ptr>(m, "GeneratorReference", py::dynamic_attr()) .def(py::init([](std::string name, std::string generator_kind, optional const& available_range, - py::object parameters, py::object metadata, + AnyDictionaryProxy* parameters, AnyDictionaryProxy* metadata, optional const& available_image_bounds) { return new GeneratorReference(name, generator_kind, available_range, - py_to_any_dictionary(parameters), - py_to_any_dictionary(metadata), + parameters->fetch_any_dictionary(), + metadata->fetch_any_dictionary(), available_image_bounds); }), py::arg_v("name"_a = std::string()), "generator_kind"_a = std::string(), "available_range"_a = nullopt, - "parameters"_a = py::none(), - py::arg_v("metadata"_a = py::none()), + "parameters"_a = py::dict(), + py::arg_v("metadata"_a = py::dict()), "available_image_bounds"_a = nullopt) .def_property("generator_kind", &GeneratorReference::generator_kind, &GeneratorReference::set_generator_kind) .def_property_readonly("parameters", [](GeneratorReference* g) { @@ -744,17 +744,17 @@ Note that a :class:`~MissingReference` may have useful metadata, even if the loc .def(py::init([]( std::string name, optional available_range, - py::object metadata, + AnyDictionaryProxy* metadata, optional const& available_image_bounds) { return new MissingReference( name, available_range, - py_to_any_dictionary(metadata), + metadata->fetch_any_dictionary(), available_image_bounds); }), py::arg_v("name"_a = std::string()), "available_range"_a = nullopt, - py::arg_v("metadata"_a = py::none()), + py::arg_v("metadata"_a = py::dict()), "available_image_bounds"_a = nullopt); @@ -762,15 +762,15 @@ Note that a :class:`~MissingReference` may have useful metadata, even if the loc managing_ptr>(m, "ExternalReference", py::dynamic_attr()) .def(py::init([](std::string target_url, optional const& available_range, - py::object metadata, + AnyDictionaryProxy* metadata, optional const& available_image_bounds) { return new ExternalReference(target_url, available_range, - py_to_any_dictionary(metadata), + metadata->fetch_any_dictionary(), available_image_bounds); }), "target_url"_a = std::string(), "available_range"_a = nullopt, - py::arg_v("metadata"_a = py::none()), + py::arg_v("metadata"_a = py::dict()), "available_image_bounds"_a = nullopt) .def_property("target_url", &ExternalReference::target_url, &ExternalReference::set_target_url); @@ -858,7 +858,7 @@ Negative ``start_frame`` is also handled. The above example with a ``start_frame int frame_zero_padding, ImageSequenceReference::MissingFramePolicy const missing_frame_policy, optional const& available_range, - py::object metadata, + AnyDictionaryProxy* metadata, optional const& available_image_bounds) { return new ImageSequenceReference(target_url_base, name_prefix, @@ -869,7 +869,7 @@ Negative ``start_frame`` is also handled. The above example with a ``start_frame frame_zero_padding, missing_frame_policy, available_range, - py_to_any_dictionary(metadata), + metadata->fetch_any_dictionary(), available_image_bounds); }), "target_url_base"_a = std::string(), "name_prefix"_a = std::string(), @@ -880,7 +880,7 @@ Negative ``start_frame`` is also handled. The above example with a ``start_frame "frame_zero_padding"_a = 0, "missing_frame_policy"_a = ImageSequenceReference::MissingFramePolicy::error, "available_range"_a = nullopt, - py::arg_v("metadata"_a = py::none()), + py::arg_v("metadata"_a = py::dict()), "available_image_bounds"_a = nullopt) .def_property("target_url_base", &ImageSequenceReference::target_url_base, &ImageSequenceReference::set_target_url_base, "Everything leading up to the file name in the ``target_url``.") .def_property("name_prefix", &ImageSequenceReference::name_prefix, &ImageSequenceReference::set_name_prefix, "Everything in the file name leading up to the frame number.") diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_utils.cpp b/src/py-opentimelineio/opentimelineio-bindings/otio_utils.cpp index c327374e3c..e0c8b866c7 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_utils.cpp +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_utils.cpp @@ -52,6 +52,7 @@ void _build_any_to_py_dispatch_table() { auto& t = _py_cast_dispatch_table; t[&typeid(void)] = [](any const& /* a */, bool) { return py::none(); }; + t[&typeid(nullptr)] = [](any const& /* a */, bool) { return py::none(); }; t[&typeid(bool)] = [](any const& a, bool) { return py::cast(safely_cast_bool_any(a)); }; t[&typeid(int)] = [](any const& a, bool) { return plain_int(safely_cast_int_any(a)); }; t[&typeid(int64_t)] = [](any const& a, bool) { return plain_int(safely_cast_int64_any(a)); }; @@ -94,30 +95,126 @@ void _build_any_to_py_dispatch_table() { } } -static py::object _value_to_any = py::none(); +any py_to_any(py::handle const& o) { + if (o.ptr() == nullptr || o.is_none()) { + return any(nullptr); + } + + if (py::isinstance(o)) { + return any(py_to_cpp(py::cast(o))); + } + + if (py::isinstance(o)) { + try { + return any(py_to_cpp(py::cast(o))); + } catch (...) {} + + try { + return any(py_to_cpp(py::cast(o))); + } catch (...) {} + + try { + return any(py_to_cpp(py::cast(o))); + } catch (...) {} + + try { + return any(py_to_cpp(py::cast(o))); + } catch (...) {} + + throw py::type_error("Failed to convert Python int to C++ int"); + } + + if (py::isinstance(o)) { + return any(py_to_cpp(py::cast(o))); + } + + if (py::isinstance(o)) { + return any(py_to_cpp(py::cast(o))); + } + + // Convert AnyDictionaryProxy and dict before vector and sequence because + // a dict is a sequence. + if (py::isinstance(o)) { + return any(o.cast().fetch_any_dictionary()); + } + + if (py::isinstance(o)) { + return any(py_to_cpp(py::cast(o))); + } + + if (py::isinstance(o)) { + return any(o.cast().fetch_any_vector()); + } + + if (py::isinstance(o)) { + return any(py_to_cpp(py::cast(o))); + } -static void py_to_any(py::object const& o, any* result) { - if (_value_to_any.is_none()) { - py::object core = py::module::import("opentimelineio.core"); - _value_to_any = core.attr("_value_to_any"); + if (py::isinstance(o)) { + return any(py_to_cpp(o)); } - result->swap(_value_to_any(o).cast()->a); + if (py::isinstance(o)) { + return any(py_to_cpp(o)); + } + + if (py::isinstance(o)) { + return any(py_to_cpp(o)); + } + + if (py::isinstance(o)) { + SerializableObject::Retainer<> r(py::cast(o)); + return create_safely_typed_any(r.take_value()); + } + + py::type pytype = py::type::of(o); + throw py::type_error("Unsupported value type: " + py::cast(pytype.attr("__name__"))); +} + +bool py_to_cpp(py::bool_ const& o) { + return o.cast(); +} + +template +T py_to_cpp(py::int_ const& o) { + return o.cast(); } -AnyDictionary py_to_any_dictionary(py::object const& o) { - if (o.is_none()) { - return AnyDictionary(); +double py_to_cpp(py::float_ const& o) { + return o.cast(); +} + +std::string py_to_cpp(py::str const& o) { + return o.cast(); +} + +AnyDictionary py_to_cpp(py::dict const& o) { + AnyDictionary d = AnyDictionary(); + + for (auto &it : o) { + if (!py::isinstance(it.first)) { + throw py::value_error("Keys must be of type string, not " + py::cast(py::type::of(it.first).attr("__name__"))); + } + + // Note that storing an any is expected, since AnyDictionary values + // can only be of type any. + d[it.first.cast()] = py_to_any(it.second); } - any a; - py_to_any(o, &a); - if (!compare_typeids(a.type(), typeid(AnyDictionary))) { - throw py::type_error(string_printf("Expected an AnyDictionary (i.e. metadata); got %s instead", - type_name_for_error_message(a).c_str())); + return d; +} + +AnyVector py_to_cpp(py::iterable const& o) { + AnyVector av = AnyVector(); + for (auto &it : o) { + av.push_back(py_to_any(it)); } + return av; +} - return safely_cast_any_dictionary_any(a); +template +T py_to_cpp(py::handle const& o) { + return o.cast(); } py::object any_to_py(any const& a, bool top_level) { diff --git a/src/py-opentimelineio/opentimelineio-bindings/otio_utils.h b/src/py-opentimelineio/opentimelineio-bindings/otio_utils.h index 2a979aabc9..417a7b74b0 100644 --- a/src/py-opentimelineio/opentimelineio-bindings/otio_utils.h +++ b/src/py-opentimelineio/opentimelineio-bindings/otio_utils.h @@ -17,6 +17,7 @@ using namespace opentimelineio::OPENTIMELINEIO_VERSION; void install_external_keepalive_monitor(SerializableObject* so, bool apply_now); +bool compare_typeids(std::type_info const& lhs, std::type_info const& rhs); template struct managing_ptr { @@ -152,6 +153,14 @@ struct PyAny { pybind11::object any_to_py(any const& a, bool top_level = false); pybind11::object plain_string(std::string const& s); pybind11::object plain_int(int i); -AnyDictionary py_to_any_dictionary(pybind11::object const& o); - -bool compare_typeids(std::type_info const& lhs, std::type_info const& rhs); +any py_to_any(pybind11::handle const& o); + +bool py_to_cpp(pybind11::bool_ const& o); +template +T py_to_cpp(pybind11::int_ const& o); +double py_to_cpp(pybind11::float_ const& o); +std::string py_to_cpp(pybind11::str const& o); +AnyDictionary py_to_cpp(pybind11::dict const& o); +AnyVector py_to_cpp(pybind11::iterable const& o); +template +T py_to_cpp(pybind11::handle const& o); diff --git a/src/py-opentimelineio/opentimelineio/adapters/fcp_xml.py b/src/py-opentimelineio/opentimelineio/adapters/fcp_xml.py index 34e420768d..000c3a734b 100644 --- a/src/py-opentimelineio/opentimelineio/adapters/fcp_xml.py +++ b/src/py-opentimelineio/opentimelineio/adapters/fcp_xml.py @@ -743,7 +743,7 @@ def track_for_element(self, track_element, track_kind, context): timeline_item_tags = {"clipitem", "generatoritem", "transitionitem"} md_dict = _xml_tree_to_dict(track_element, timeline_item_tags) - track_metadata = {META_NAMESPACE: md_dict} if md_dict else None + track_metadata = {META_NAMESPACE: md_dict} if md_dict else {} track = schema.Track( name=track_name, diff --git a/src/py-opentimelineio/opentimelineio/core/__init__.py b/src/py-opentimelineio/opentimelineio/core/__init__.py index 5bd586cd4b..51a6dbf09e 100644 --- a/src/py-opentimelineio/opentimelineio/core/__init__.py +++ b/src/py-opentimelineio/opentimelineio/core/__init__.py @@ -2,12 +2,18 @@ # Copyright Contributors to the OpenTimelineIO project """Core implementation details and wrappers around the C++ library""" +from __future__ import annotations + +from typing import Union, Dict, TypeAlias + +from .. opentime import RationalTime, TimeRange, TimeTransform from .. _otio import ( # noqa # errors CannotComputeAvailableRangeError, # classes + AnyDictionary, Composable, Composition, Item, @@ -32,6 +38,17 @@ release_to_schema_version_map, ) +Metadata: TypeAlias = Union[Dict[str, 'MetadataValue'], AnyDictionary] +"""OTIO custom metadata type.""" + +MetadataValue: TypeAlias = Union[ + bool, int, float, str, None, + SerializableObject, + RationalTime, TimeRange, TimeTransform, + Metadata +] +"""Metadata's values.""" + from . _core_utils import ( # noqa add_method, _value_to_any, @@ -46,6 +63,9 @@ ) __all__ = [ + 'Metadata', + 'MetadataValue', + 'AnyDictionary', 'Composable', 'Composition', 'Item', diff --git a/src/py-opentimelineio/opentimelineio/core/_core_utils.py b/src/py-opentimelineio/opentimelineio/core/_core_utils.py index df2a075b97..e437f3bab0 100644 --- a/src/py-opentimelineio/opentimelineio/core/_core_utils.py +++ b/src/py-opentimelineio/opentimelineio/core/_core_utils.py @@ -302,6 +302,41 @@ def insert(self, index, item): if conversion_func else item ) + def __le__(self, other): # Taken from collections.abc.Set + if not isinstance(other, collections.abc.Sequence): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): # Taken from collections.abc.Set + if not isinstance(other, collections.abc.Sequence): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): # Taken from collections.abc.Set + if not isinstance(other, collections.abc.Sequence): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): # Taken from collections.abc.Set + if not isinstance(other, collections.abc.Sequence): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): # Taken from collections.abc.Set + if not isinstance(other, collections.abc.Sequence): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + collections.abc.MutableSequence.register(sequenceClass) sequenceClass.__radd__ = __radd__ sequenceClass.__add__ = __add__ @@ -311,6 +346,11 @@ def insert(self, index, item): sequenceClass.insert = insert sequenceClass.__str__ = __str__ sequenceClass.__repr__ = __repr__ + sequenceClass.__le__ = __le__ + sequenceClass.__lt__ = __lt__ + sequenceClass.__gt__ = __gt__ + sequenceClass.__ge__ = __ge__ + sequenceClass.__eq__ = __eq__ seen = set() for klass in (collections.abc.MutableSequence, collections.abc.Sequence): diff --git a/tests/test_core_utils.py b/tests/test_core_utils.py index a0a7b9425f..770618f614 100644 --- a/tests/test_core_utils.py +++ b/tests/test_core_utils.py @@ -2,11 +2,34 @@ import unittest import opentimelineio._otio +import opentimelineio.opentime import opentimelineio.core._core_utils class AnyDictionaryTests(unittest.TestCase): def test_main(self): + opentimelineio.core._core_utils.AnyDictionary({ + 'string': 'myvalue', + 'int': -999999999999, + 'list': [1, 2.5, 'asd'], + 'dict': {'map1': [345]}, + 'AnyVector': opentimelineio.core._core_utils.AnyVector(), + 'AnyDictionary': opentimelineio.core._core_utils.AnyDictionary(), + 'RationalTime': opentimelineio.opentime.RationalTime( + value=10.0, + rate=5.0 + ), + 'TimeRange': opentimelineio.opentime.TimeRange( + opentimelineio.opentime.RationalTime(value=1.0), + opentimelineio.opentime.RationalTime(value=100.0) + ), + 'TimeTransform': opentimelineio.opentime.TimeTransform( + offset=opentimelineio.opentime.RationalTime(value=55.0), + scale=999 + ), + 'SerializableObjectWithMetadata': opentimelineio._otio.SerializableObjectWithMetadata(), + }) + d = opentimelineio.core._core_utils.AnyDictionary() d['a'] = 1 @@ -100,10 +123,10 @@ def test_main(self): v.append(2) self.assertEqual(len(v), 2) - self.assertEqual([value for value in v], [1, 2]) + self.assertEqual(v, [1, 2]) v.insert(0, 5) - self.assertEqual([value for value in v], [5, 1, 2]) + self.assertEqual(v, [5, 1, 2]) self.assertEqual(v[0], 5) self.assertEqual(v[-3], 5) @@ -124,13 +147,11 @@ def test_main(self): del v[0] self.assertEqual(len(v), 2) - # Doesn't work... - # assert v == [1, 100] - self.assertEqual([value for value in v], [1, 100]) + self.assertEqual(v, [1, 100]) del v[1000] # This will surprisingly delete the last item... self.assertEqual(len(v), 1) - self.assertEqual([value for value in v], [1]) + self.assertEqual(v, [1]) # Will delete the last item even if the index doesn't match. # It's a surprising behavior. @@ -144,7 +165,7 @@ def test_main(self): items.append(value) self.assertEqual(items, [1, '234', {}]) - self.assertFalse(v == [1, '234', {}]) # __eq__ is not implemented + self.assertTrue(v == [1, '234', {}]) self.assertTrue(1 in v) # Test __contains__ self.assertTrue('234' in v) @@ -181,13 +202,13 @@ def test_main(self): self.assertEqual(v3[1:7:2], [1, 3, 5]) del v3[2:7] - self.assertEqual(list(v3), [0, 1, 7, 8, 9]) + self.assertEqual(v3, [0, 1, 7, 8, 9]) v4 = opentimelineio.core._core_utils.AnyVector() v4.extend(range(10)) del v4[::2] - self.assertEqual(list(v4), [1, 3, 5, 7, 9]) + self.assertEqual(v4, [1, 3, 5, 7, 9]) v5 = opentimelineio.core._core_utils.AnyVector() tmplist = [1, 2] @@ -225,7 +246,7 @@ def test_raises_if_ref_destroyed(self): def test_copy(self): list1 = [1, 2, [3, 4], 5] copied = copy.copy(list1) - self.assertEqual(list(list1), list(copied)) + self.assertEqual(list1, copied) v = opentimelineio.core._core_utils.AnyVector() v.extend([1, 2, [3, 4], 5]) diff --git a/tests/test_serializable_object.py b/tests/test_serializable_object.py index 788a3f7988..358e09540d 100755 --- a/tests/test_serializable_object.py +++ b/tests/test_serializable_object.py @@ -4,7 +4,9 @@ # Copyright Contributors to the OpenTimelineIO project import opentimelineio as otio +from opentimelineio._otio import AnyDictionary, AnyVector import opentimelineio.test_utils as otio_test_utils +from opentimelineio.opentime import RationalTime, TimeRange, TimeTransform import unittest import json @@ -36,6 +38,71 @@ def test_cons(self): so.metadata['foo'] = 'bar' self.assertEqual(so.metadata['foo'], 'bar') + def test_cons2(self): + v = AnyVector() + v.append(1) + v.append('inside any vector') + + d = AnyDictionary() + d['key_1'] = 1234 + d['key_2'] = {'asdasdasd': 5.6} + so = otio.core.SerializableObjectWithMetadata( + metadata={ + 'string': 'myvalue', + 'int': -999999999999, + 'list': [1, 2.5, 'asd'], + 'dict': {'map1': [345]}, + 'AnyVector': v, + 'AnyDictionary': d, + 'RationalTime': RationalTime( + value=10.0, + rate=5.0 + ), + 'TimeRange': TimeRange( + RationalTime(value=1.0), + RationalTime(value=100.0) + ), + 'TimeTransform': TimeTransform( + offset=RationalTime(value=55.0), + scale=999 + ) + } + ) + so.metadata['foo'] = 'bar' + self.assertEqual(so.metadata['foo'], 'bar') + self.assertEqual(so.metadata['string'], 'myvalue') + self.assertEqual(so.metadata['int'], -999999999999) + self.assertIsInstance(so.metadata['list'], AnyVector) + self.assertEqual( + so.metadata['list'], + AnyVector([1, 2.5, 'asd']) + ) + self.assertIsInstance(so.metadata['dict'], AnyDictionary) + self.assertIsInstance(so.metadata['dict']['map1'], AnyVector) + self.assertEqual(so.metadata['dict'], AnyDictionary({'map1': [345]})) + self.assertIsInstance(so.metadata['AnyVector'], AnyVector) + self.assertEqual( + so.metadata['AnyVector'], + AnyVector([1, 'inside any vector']) + ) + self.assertIsInstance(so.metadata['AnyDictionary'], AnyDictionary) + self.assertEqual( + so.metadata['AnyDictionary'], + AnyDictionary({'key_1': 1234, 'key_2': {'asdasdasd': 5.6}}) + ) + self.assertEqual( + so.metadata['RationalTime'], + RationalTime(value=10.0, rate=5.0) + ) + self.assertEqual(so.metadata['TimeRange'], TimeRange( + RationalTime(value=1.0), + RationalTime(value=100.0) + )) + self.assertEqual(so.metadata['TimeTransform'], TimeTransform( + offset=RationalTime(value=55.0), + scale=999 + )) + def test_update(self): so = otio.core.SerializableObjectWithMetadata() so.metadata.update({"foo": "bar"})